force all readers to use an asynchronous writer (#2265)

needed by #2255
This commit is contained in:
Alessandro Ros 2023-08-30 11:24:14 +02:00 committed by GitHub
parent 30a69a7722
commit 5fb7f4e846
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
26 changed files with 962 additions and 938 deletions

View file

@ -1,73 +0,0 @@
package core
import (
"fmt"
"time"
"github.com/bluenviron/gortsplib/v4/pkg/ringbuffer"
"github.com/bluenviron/mediamtx/internal/logger"
)
const (
minIntervalBetweenWarnings = 1 * time.Second
)
type asyncWriter struct {
writeErrLogger logger.Writer
buffer *ringbuffer.RingBuffer
// out
err chan error
}
func newAsyncWriter(
queueSize int,
parent logger.Writer,
) *asyncWriter {
buffer, _ := ringbuffer.New(uint64(queueSize))
return &asyncWriter{
writeErrLogger: newLimitedLogger(parent),
buffer: buffer,
err: make(chan error),
}
}
func (w *asyncWriter) start() {
go w.run()
}
func (w *asyncWriter) stop() {
w.buffer.Close()
<-w.err
}
func (w *asyncWriter) error() chan error {
return w.err
}
func (w *asyncWriter) run() {
w.err <- w.runInner()
}
func (w *asyncWriter) runInner() error {
for {
cb, ok := w.buffer.Pull()
if !ok {
return fmt.Errorf("terminated")
}
err := cb.(func() error)()
if err != nil {
return err
}
}
}
func (w *asyncWriter) push(cb func() error) {
ok := w.buffer.Push(cb)
if !ok {
w.writeErrLogger.Log(logger.Warn, "write queue is full")
}
}

View file

@ -142,7 +142,7 @@ func newHLSManager(
// Log is the main logging function.
func (m *hlsManager) Log(level logger.Level, format string, args ...interface{}) {
m.parent.Log(level, "[HLS] "+format, append([]interface{}{}, args...)...)
m.parent.Log(level, "[HLS] "+format, args...)
}
func (m *hlsManager) close() {

View file

@ -17,6 +17,7 @@ import (
"github.com/bluenviron/gortsplib/v4/pkg/format"
"github.com/gin-gonic/gin"
"github.com/bluenviron/mediamtx/internal/asyncwriter"
"github.com/bluenviron/mediamtx/internal/conf"
"github.com/bluenviron/mediamtx/internal/logger"
"github.com/bluenviron/mediamtx/internal/stream"
@ -75,7 +76,7 @@ type hlsMuxer struct {
ctxCancel func()
created time.Time
path *path
writer *asyncWriter
writer *asyncwriter.Writer
lastRequestTime *int64
muxer *gohlslib.Muxer
requests []*hlsMuxerHandleRequestReq
@ -207,7 +208,7 @@ func (m *hlsMuxer) run() {
innerCtxCancel()
if m.remoteAddr == "" { // created with "always remux"
m.Log(logger.Info, "ERR: %v", err)
m.Log(logger.Error, err.Error())
m.clearQueuedRequests()
isReady = false
isRecreating = true
@ -253,7 +254,7 @@ func (m *hlsMuxer) runInner(innerCtx context.Context, innerReady chan struct{})
defer m.path.removeReader(pathRemoveReaderReq{author: m})
m.writer = newAsyncWriter(m.writeQueueSize, m)
m.writer = asyncwriter.New(m.writeQueueSize, m)
var medias []*description.Media
@ -267,7 +268,7 @@ func (m *hlsMuxer) runInner(innerCtx context.Context, innerReady chan struct{})
medias = append(medias, audioMedia)
}
defer res.stream.RemoveReader(m)
defer res.stream.RemoveReader(m.writer)
if medias == nil {
return fmt.Errorf(
@ -303,7 +304,7 @@ func (m *hlsMuxer) runInner(innerCtx context.Context, innerReady chan struct{})
m.Log(logger.Info, "is converting into HLS, %s",
sourceMediaInfo(medias))
m.writer.start()
m.writer.Start()
closeCheckTicker := time.NewTicker(closeCheckPeriod)
defer closeCheckTicker.Stop()
@ -314,16 +315,16 @@ func (m *hlsMuxer) runInner(innerCtx context.Context, innerReady chan struct{})
if m.remoteAddr != "" {
t := time.Unix(0, atomic.LoadInt64(m.lastRequestTime))
if time.Since(t) >= closeAfterInactivity {
m.writer.stop()
m.writer.Stop()
return fmt.Errorf("not used anymore")
}
}
case err := <-m.writer.error():
case err := <-m.writer.Error():
return err
case <-innerCtx.Done():
m.writer.stop()
m.writer.Stop()
return fmt.Errorf("terminated")
}
}
@ -334,22 +335,19 @@ func (m *hlsMuxer) createVideoTrack(stream *stream.Stream) (*description.Media,
videoMedia := stream.Desc().FindFormat(&videoFormatAV1)
if videoFormatAV1 != nil {
stream.AddReader(m, videoMedia, videoFormatAV1, func(u unit.Unit) {
m.writer.push(func() error {
tunit := u.(*unit.AV1)
if tunit.TU == nil {
return nil
}
pts := tunit.PTS
err := m.muxer.WriteAV1(tunit.NTP, pts, tunit.TU)
if err != nil {
return fmt.Errorf("muxer error: %v", err)
}
stream.AddReader(m.writer, videoMedia, videoFormatAV1, func(u unit.Unit) error {
tunit := u.(*unit.AV1)
if tunit.TU == nil {
return nil
})
}
err := m.muxer.WriteAV1(tunit.NTP, tunit.PTS, tunit.TU)
if err != nil {
return fmt.Errorf("muxer error: %v", err)
}
return nil
})
return videoMedia, &gohlslib.Track{
@ -361,22 +359,19 @@ func (m *hlsMuxer) createVideoTrack(stream *stream.Stream) (*description.Media,
videoMedia = stream.Desc().FindFormat(&videoFormatVP9)
if videoFormatVP9 != nil {
stream.AddReader(m, videoMedia, videoFormatVP9, func(u unit.Unit) {
m.writer.push(func() error {
tunit := u.(*unit.VP9)
if tunit.Frame == nil {
return nil
}
pts := tunit.PTS
err := m.muxer.WriteVP9(tunit.NTP, pts, tunit.Frame)
if err != nil {
return fmt.Errorf("muxer error: %v", err)
}
stream.AddReader(m.writer, videoMedia, videoFormatVP9, func(u unit.Unit) error {
tunit := u.(*unit.VP9)
if tunit.Frame == nil {
return nil
})
}
err := m.muxer.WriteVP9(tunit.NTP, tunit.PTS, tunit.Frame)
if err != nil {
return fmt.Errorf("muxer error: %v", err)
}
return nil
})
return videoMedia, &gohlslib.Track{
@ -388,22 +383,19 @@ func (m *hlsMuxer) createVideoTrack(stream *stream.Stream) (*description.Media,
videoMedia = stream.Desc().FindFormat(&videoFormatH265)
if videoFormatH265 != nil {
stream.AddReader(m, videoMedia, videoFormatH265, func(u unit.Unit) {
m.writer.push(func() error {
tunit := u.(*unit.H265)
if tunit.AU == nil {
return nil
}
pts := tunit.PTS
err := m.muxer.WriteH26x(tunit.NTP, pts, tunit.AU)
if err != nil {
return fmt.Errorf("muxer error: %v", err)
}
stream.AddReader(m.writer, videoMedia, videoFormatH265, func(u unit.Unit) error {
tunit := u.(*unit.H265)
if tunit.AU == nil {
return nil
})
}
err := m.muxer.WriteH26x(tunit.NTP, tunit.PTS, tunit.AU)
if err != nil {
return fmt.Errorf("muxer error: %v", err)
}
return nil
})
vps, sps, pps := videoFormatH265.SafeParams()
@ -421,22 +413,19 @@ func (m *hlsMuxer) createVideoTrack(stream *stream.Stream) (*description.Media,
videoMedia = stream.Desc().FindFormat(&videoFormatH264)
if videoFormatH264 != nil {
stream.AddReader(m, videoMedia, videoFormatH264, func(u unit.Unit) {
m.writer.push(func() error {
tunit := u.(*unit.H264)
if tunit.AU == nil {
return nil
}
pts := tunit.PTS
err := m.muxer.WriteH26x(tunit.NTP, pts, tunit.AU)
if err != nil {
return fmt.Errorf("muxer error: %v", err)
}
stream.AddReader(m.writer, videoMedia, videoFormatH264, func(u unit.Unit) error {
tunit := u.(*unit.H264)
if tunit.AU == nil {
return nil
})
}
err := m.muxer.WriteH26x(tunit.NTP, tunit.PTS, tunit.AU)
if err != nil {
return fmt.Errorf("muxer error: %v", err)
}
return nil
})
sps, pps := videoFormatH264.SafeParams()
@ -457,21 +446,18 @@ func (m *hlsMuxer) createAudioTrack(stream *stream.Stream) (*description.Media,
audioMedia := stream.Desc().FindFormat(&audioFormatOpus)
if audioMedia != nil {
stream.AddReader(m, audioMedia, audioFormatOpus, func(u unit.Unit) {
m.writer.push(func() error {
tunit := u.(*unit.Opus)
stream.AddReader(m.writer, audioMedia, audioFormatOpus, func(u unit.Unit) error {
tunit := u.(*unit.Opus)
pts := tunit.PTS
err := m.muxer.WriteOpus(
tunit.NTP,
pts,
tunit.Packets)
if err != nil {
return fmt.Errorf("muxer error: %v", err)
}
err := m.muxer.WriteOpus(
tunit.NTP,
tunit.PTS,
tunit.Packets)
if err != nil {
return fmt.Errorf("muxer error: %v", err)
}
return nil
})
return nil
})
return audioMedia, &gohlslib.Track{
@ -490,25 +476,22 @@ func (m *hlsMuxer) createAudioTrack(stream *stream.Stream) (*description.Media,
audioMedia = stream.Desc().FindFormat(&audioFormatMPEG4AudioGeneric)
if audioMedia != nil {
stream.AddReader(m, audioMedia, audioFormatMPEG4AudioGeneric, func(u unit.Unit) {
m.writer.push(func() error {
tunit := u.(*unit.MPEG4AudioGeneric)
if tunit.AUs == nil {
return nil
}
pts := tunit.PTS
err := m.muxer.WriteMPEG4Audio(
tunit.NTP,
pts,
tunit.AUs)
if err != nil {
return fmt.Errorf("muxer error: %v", err)
}
stream.AddReader(m.writer, audioMedia, audioFormatMPEG4AudioGeneric, func(u unit.Unit) error {
tunit := u.(*unit.MPEG4AudioGeneric)
if tunit.AUs == nil {
return nil
})
}
err := m.muxer.WriteMPEG4Audio(
tunit.NTP,
tunit.PTS,
tunit.AUs)
if err != nil {
return fmt.Errorf("muxer error: %v", err)
}
return nil
})
return audioMedia, &gohlslib.Track{
@ -525,25 +508,22 @@ func (m *hlsMuxer) createAudioTrack(stream *stream.Stream) (*description.Media,
audioFormatMPEG4AudioLATM.Config != nil &&
len(audioFormatMPEG4AudioLATM.Config.Programs) == 1 &&
len(audioFormatMPEG4AudioLATM.Config.Programs[0].Layers) == 1 {
stream.AddReader(m, audioMedia, audioFormatMPEG4AudioLATM, func(u unit.Unit) {
m.writer.push(func() error {
tunit := u.(*unit.MPEG4AudioLATM)
if tunit.AU == nil {
return nil
}
pts := tunit.PTS
err := m.muxer.WriteMPEG4Audio(
tunit.NTP,
pts,
[][]byte{tunit.AU})
if err != nil {
return fmt.Errorf("muxer error: %v", err)
}
stream.AddReader(m.writer, audioMedia, audioFormatMPEG4AudioLATM, func(u unit.Unit) error {
tunit := u.(*unit.MPEG4AudioLATM)
if tunit.AU == nil {
return nil
})
}
err := m.muxer.WriteMPEG4Audio(
tunit.NTP,
tunit.PTS,
[][]byte{tunit.AU})
if err != nil {
return fmt.Errorf("muxer error: %v", err)
}
return nil
})
return audioMedia, &gohlslib.Track{

View file

@ -48,7 +48,7 @@ func (s *hlsSource) run(ctx context.Context, cnf *conf.PathConf, reloadConf chan
}
}()
decodeErrLogger := newLimitedLogger(s)
decodeErrLogger := logger.NewLimitedLogger(s)
var c *gohlslib.Client
c = &gohlslib.Client{

View file

@ -1,30 +0,0 @@
package core
import (
"sync"
"time"
"github.com/bluenviron/mediamtx/internal/logger"
)
type limitedLogger struct {
w logger.Writer
mutex sync.Mutex
lastPrinted time.Time
}
func newLimitedLogger(w logger.Writer) *limitedLogger {
return &limitedLogger{
w: w,
}
}
func (l *limitedLogger) Log(level logger.Level, format string, args ...interface{}) {
now := time.Now()
l.mutex.Lock()
if now.Sub(l.lastPrinted) >= minIntervalBetweenWarnings {
l.lastPrinted = now
l.w.Log(level, format, args...)
}
l.mutex.Unlock()
}

View file

@ -380,112 +380,85 @@ func (pa *path) runInner() error {
for {
select {
case <-pa.onDemandStaticSourceReadyTimer.C:
for _, req := range pa.describeRequestsOnHold {
req.res <- pathDescribeRes{err: fmt.Errorf("source of path '%s' has timed out", pa.name)}
}
pa.describeRequestsOnHold = nil
for _, req := range pa.readerAddRequestsOnHold {
req.res <- pathAddReaderRes{err: fmt.Errorf("source of path '%s' has timed out", pa.name)}
}
pa.readerAddRequestsOnHold = nil
pa.onDemandStaticSourceStop()
pa.doOnDemandStaticSourceReadyTimer()
if pa.shouldClose() {
return fmt.Errorf("not in use")
}
case <-pa.onDemandStaticSourceCloseTimer.C:
pa.setNotReady()
pa.onDemandStaticSourceStop()
pa.doOnDemandStaticSourceCloseTimer()
if pa.shouldClose() {
return fmt.Errorf("not in use")
}
case <-pa.onDemandPublisherReadyTimer.C:
for _, req := range pa.describeRequestsOnHold {
req.res <- pathDescribeRes{err: fmt.Errorf("source of path '%s' has timed out", pa.name)}
}
pa.describeRequestsOnHold = nil
for _, req := range pa.readerAddRequestsOnHold {
req.res <- pathAddReaderRes{err: fmt.Errorf("source of path '%s' has timed out", pa.name)}
}
pa.readerAddRequestsOnHold = nil
pa.onDemandStopPublisher()
pa.doOnDemandPublisherReadyTimer()
if pa.shouldClose() {
return fmt.Errorf("not in use")
}
case <-pa.onDemandPublisherCloseTimer.C:
pa.onDemandStopPublisher()
pa.doOnDemandPublisherCloseTimer()
if pa.shouldClose() {
return fmt.Errorf("not in use")
}
case newConf := <-pa.chReloadConf:
if pa.conf.HasStaticSource() {
go pa.source.(*sourceStatic).reloadConf(newConf)
}
pa.confMutex.Lock()
pa.conf = newConf
pa.confMutex.Unlock()
pa.doReloadConf(newConf)
case req := <-pa.chSourceStaticSetReady:
pa.handleSourceStaticSetReady(req)
pa.doSourceStaticSetReady(req)
case req := <-pa.chSourceStaticSetNotReady:
pa.handleSourceStaticSetNotReady(req)
pa.doSourceStaticSetNotReady(req)
if pa.shouldClose() {
return fmt.Errorf("not in use")
}
case req := <-pa.chDescribe:
pa.handleDescribe(req)
pa.doDescribe(req)
if pa.shouldClose() {
return fmt.Errorf("not in use")
}
case req := <-pa.chRemovePublisher:
pa.handleRemovePublisher(req)
pa.doRemovePublisher(req)
if pa.shouldClose() {
return fmt.Errorf("not in use")
}
case req := <-pa.chAddPublisher:
pa.handleAddPublisher(req)
pa.doAddPublisher(req)
case req := <-pa.chStartPublisher:
pa.handleStartPublisher(req)
pa.doStartPublisher(req)
case req := <-pa.chStopPublisher:
pa.handleStopPublisher(req)
pa.doStopPublisher(req)
if pa.shouldClose() {
return fmt.Errorf("not in use")
}
case req := <-pa.chAddReader:
pa.handleAddReader(req)
pa.doAddReader(req)
if pa.shouldClose() {
return fmt.Errorf("not in use")
}
case req := <-pa.chRemoveReader:
pa.handleRemoveReader(req)
pa.doRemoveReader(req)
case req := <-pa.chAPIPathsGet:
pa.handleAPIPathsGet(req)
pa.doAPIPathsGet(req)
case <-pa.ctx.Done():
return fmt.Errorf("terminated")
@ -493,6 +466,304 @@ func (pa *path) runInner() error {
}
}
func (pa *path) doOnDemandStaticSourceReadyTimer() {
for _, req := range pa.describeRequestsOnHold {
req.res <- pathDescribeRes{err: fmt.Errorf("source of path '%s' has timed out", pa.name)}
}
pa.describeRequestsOnHold = nil
for _, req := range pa.readerAddRequestsOnHold {
req.res <- pathAddReaderRes{err: fmt.Errorf("source of path '%s' has timed out", pa.name)}
}
pa.readerAddRequestsOnHold = nil
pa.onDemandStaticSourceStop()
}
func (pa *path) doOnDemandStaticSourceCloseTimer() {
pa.setNotReady()
pa.onDemandStaticSourceStop()
}
func (pa *path) doOnDemandPublisherReadyTimer() {
for _, req := range pa.describeRequestsOnHold {
req.res <- pathDescribeRes{err: fmt.Errorf("source of path '%s' has timed out", pa.name)}
}
pa.describeRequestsOnHold = nil
for _, req := range pa.readerAddRequestsOnHold {
req.res <- pathAddReaderRes{err: fmt.Errorf("source of path '%s' has timed out", pa.name)}
}
pa.readerAddRequestsOnHold = nil
pa.onDemandStopPublisher()
}
func (pa *path) doOnDemandPublisherCloseTimer() {
pa.onDemandStopPublisher()
}
func (pa *path) doReloadConf(newConf *conf.PathConf) {
if pa.conf.HasStaticSource() {
go pa.source.(*sourceStatic).reloadConf(newConf)
}
pa.confMutex.Lock()
pa.conf = newConf
pa.confMutex.Unlock()
}
func (pa *path) doSourceStaticSetReady(req pathSourceStaticSetReadyReq) {
err := pa.setReady(req.desc, req.generateRTPPackets)
if err != nil {
req.res <- pathSourceStaticSetReadyRes{err: err}
return
}
if pa.conf.HasOnDemandStaticSource() {
pa.onDemandStaticSourceReadyTimer.Stop()
pa.onDemandStaticSourceReadyTimer = newEmptyTimer()
pa.onDemandStaticSourceScheduleClose()
for _, req := range pa.describeRequestsOnHold {
req.res <- pathDescribeRes{
stream: pa.stream,
}
}
pa.describeRequestsOnHold = nil
for _, req := range pa.readerAddRequestsOnHold {
pa.addReaderPost(req)
}
pa.readerAddRequestsOnHold = nil
}
req.res <- pathSourceStaticSetReadyRes{stream: pa.stream}
}
func (pa *path) doSourceStaticSetNotReady(req pathSourceStaticSetNotReadyReq) {
pa.setNotReady()
// send response before calling onDemandStaticSourceStop()
// in order to avoid a deadlock due to sourceStatic.stop()
close(req.res)
if pa.conf.HasOnDemandStaticSource() && pa.onDemandStaticSourceState != pathOnDemandStateInitial {
pa.onDemandStaticSourceStop()
}
}
func (pa *path) doDescribe(req pathDescribeReq) {
if _, ok := pa.source.(*sourceRedirect); ok {
req.res <- pathDescribeRes{
redirect: pa.conf.SourceRedirect,
}
return
}
if pa.stream != nil {
req.res <- pathDescribeRes{
stream: pa.stream,
}
return
}
if pa.conf.HasOnDemandStaticSource() {
if pa.onDemandStaticSourceState == pathOnDemandStateInitial {
pa.onDemandStaticSourceStart()
}
pa.describeRequestsOnHold = append(pa.describeRequestsOnHold, req)
return
}
if pa.conf.HasOnDemandPublisher() {
if pa.onDemandPublisherState == pathOnDemandStateInitial {
pa.onDemandStartPublisher()
}
pa.describeRequestsOnHold = append(pa.describeRequestsOnHold, req)
return
}
if pa.conf.Fallback != "" {
fallbackURL := func() string {
if strings.HasPrefix(pa.conf.Fallback, "/") {
ur := url.URL{
Scheme: req.url.Scheme,
User: req.url.User,
Host: req.url.Host,
Path: pa.conf.Fallback,
}
return ur.String()
}
return pa.conf.Fallback
}()
req.res <- pathDescribeRes{redirect: fallbackURL}
return
}
req.res <- pathDescribeRes{err: errPathNoOnePublishing{pathName: pa.name}}
}
func (pa *path) doRemovePublisher(req pathRemovePublisherReq) {
if pa.source == req.author {
pa.executeRemovePublisher()
}
close(req.res)
}
func (pa *path) doAddPublisher(req pathAddPublisherReq) {
if pa.conf.Source != "publisher" {
req.res <- pathAddPublisherRes{
err: fmt.Errorf("can't publish to path '%s' since 'source' is not 'publisher'", pa.name),
}
return
}
if pa.source != nil {
if !pa.conf.OverridePublisher {
req.res <- pathAddPublisherRes{err: fmt.Errorf("someone is already publishing to path '%s'", pa.name)}
return
}
pa.Log(logger.Info, "closing existing publisher")
pa.source.(publisher).close()
pa.executeRemovePublisher()
}
pa.source = req.author
req.res <- pathAddPublisherRes{path: pa}
}
func (pa *path) doStartPublisher(req pathStartPublisherReq) {
if pa.source != req.author {
req.res <- pathStartPublisherRes{err: fmt.Errorf("publisher is not assigned to this path anymore")}
return
}
err := pa.setReady(req.desc, req.generateRTPPackets)
if err != nil {
req.res <- pathStartPublisherRes{err: err}
return
}
req.author.Log(logger.Info, "is publishing to path '%s', %s",
pa.name,
sourceMediaInfo(req.desc.Medias))
if pa.conf.HasOnDemandPublisher() {
pa.onDemandPublisherReadyTimer.Stop()
pa.onDemandPublisherReadyTimer = newEmptyTimer()
pa.onDemandPublisherScheduleClose()
for _, req := range pa.describeRequestsOnHold {
req.res <- pathDescribeRes{
stream: pa.stream,
}
}
pa.describeRequestsOnHold = nil
for _, req := range pa.readerAddRequestsOnHold {
pa.addReaderPost(req)
}
pa.readerAddRequestsOnHold = nil
}
req.res <- pathStartPublisherRes{stream: pa.stream}
}
func (pa *path) doStopPublisher(req pathStopPublisherReq) {
if req.author == pa.source && pa.stream != nil {
pa.setNotReady()
}
close(req.res)
}
func (pa *path) doAddReader(req pathAddReaderReq) {
if pa.stream != nil {
pa.addReaderPost(req)
return
}
if pa.conf.HasOnDemandStaticSource() {
if pa.onDemandStaticSourceState == pathOnDemandStateInitial {
pa.onDemandStaticSourceStart()
}
pa.readerAddRequestsOnHold = append(pa.readerAddRequestsOnHold, req)
return
}
if pa.conf.HasOnDemandPublisher() {
if pa.onDemandPublisherState == pathOnDemandStateInitial {
pa.onDemandStartPublisher()
}
pa.readerAddRequestsOnHold = append(pa.readerAddRequestsOnHold, req)
return
}
req.res <- pathAddReaderRes{err: errPathNoOnePublishing{pathName: pa.name}}
}
func (pa *path) doRemoveReader(req pathRemoveReaderReq) {
if _, ok := pa.readers[req.author]; ok {
pa.executeRemoveReader(req.author)
}
close(req.res)
if len(pa.readers) == 0 {
if pa.conf.HasOnDemandStaticSource() {
if pa.onDemandStaticSourceState == pathOnDemandStateReady {
pa.onDemandStaticSourceScheduleClose()
}
} else if pa.conf.HasOnDemandPublisher() {
if pa.onDemandPublisherState == pathOnDemandStateReady {
pa.onDemandPublisherScheduleClose()
}
}
}
}
func (pa *path) doAPIPathsGet(req pathAPIPathsGetReq) {
req.res <- pathAPIPathsGetRes{
data: &apiPath{
Name: pa.name,
ConfName: pa.confName,
Conf: pa.conf,
Source: func() interface{} {
if pa.source == nil {
return nil
}
return pa.source.apiSourceDescribe()
}(),
SourceReady: pa.stream != nil,
Ready: pa.stream != nil,
ReadyTime: func() *time.Time {
if pa.stream == nil {
return nil
}
v := pa.readyTime
return &v
}(),
Tracks: func() []string {
if pa.stream == nil {
return []string{}
}
return mediasDescription(pa.stream.Desc().Medias)
}(),
BytesReceived: atomic.LoadUint64(pa.bytesReceived),
Readers: func() []interface{} {
ret := []interface{}{}
for r := range pa.readers {
ret = append(ret, r.apiReaderDescribe())
}
return ret
}(),
},
}
}
func (pa *path) shouldClose() bool {
return pa.conf.Regexp != nil &&
pa.source == nil &&
@ -572,7 +843,7 @@ func (pa *path) onDemandPublisherScheduleClose() {
func (pa *path) onDemandStopPublisher() {
if pa.source != nil {
pa.source.(publisher).close()
pa.doPublisherRemove()
pa.executeRemovePublisher()
}
if pa.onDemandPublisherState == pathOnDemandStateClosing {
@ -590,18 +861,18 @@ func (pa *path) onDemandStopPublisher() {
}
func (pa *path) setReady(desc *description.Session, allocateEncoder bool) error {
stream, err := stream.New(
var err error
pa.stream, err = stream.New(
pa.udpMaxPayloadSize,
desc,
allocateEncoder,
pa.bytesReceived,
newLimitedLogger(pa.source),
logger.NewLimitedLogger(pa.source),
)
if err != nil {
return err
}
pa.stream = stream
pa.readyTime = time.Now()
if pa.conf.RunOnReady != "" {
@ -625,7 +896,7 @@ func (pa *path) setNotReady() {
pa.parent.pathNotReady(pa)
for r := range pa.readers {
pa.doRemoveReader(r)
pa.executeRemoveReader(r)
r.close()
}
@ -641,11 +912,11 @@ func (pa *path) setNotReady() {
}
}
func (pa *path) doRemoveReader(r reader) {
func (pa *path) executeRemoveReader(r reader) {
delete(pa.readers, r)
}
func (pa *path) doPublisherRemove() {
func (pa *path) executeRemovePublisher() {
if pa.stream != nil {
pa.setNotReady()
}
@ -653,219 +924,7 @@ func (pa *path) doPublisherRemove() {
pa.source = nil
}
func (pa *path) handleSourceStaticSetReady(req pathSourceStaticSetReadyReq) {
err := pa.setReady(req.desc, req.generateRTPPackets)
if err != nil {
req.res <- pathSourceStaticSetReadyRes{err: err}
return
}
if pa.conf.HasOnDemandStaticSource() {
pa.onDemandStaticSourceReadyTimer.Stop()
pa.onDemandStaticSourceReadyTimer = newEmptyTimer()
pa.onDemandStaticSourceScheduleClose()
for _, req := range pa.describeRequestsOnHold {
req.res <- pathDescribeRes{
stream: pa.stream,
}
}
pa.describeRequestsOnHold = nil
for _, req := range pa.readerAddRequestsOnHold {
pa.handleAddReaderPost(req)
}
pa.readerAddRequestsOnHold = nil
}
req.res <- pathSourceStaticSetReadyRes{stream: pa.stream}
}
func (pa *path) handleSourceStaticSetNotReady(req pathSourceStaticSetNotReadyReq) {
pa.setNotReady()
// send response before calling onDemandStaticSourceStop()
// in order to avoid a deadlock due to sourceStatic.stop()
close(req.res)
if pa.conf.HasOnDemandStaticSource() && pa.onDemandStaticSourceState != pathOnDemandStateInitial {
pa.onDemandStaticSourceStop()
}
}
func (pa *path) handleDescribe(req pathDescribeReq) {
if _, ok := pa.source.(*sourceRedirect); ok {
req.res <- pathDescribeRes{
redirect: pa.conf.SourceRedirect,
}
return
}
if pa.stream != nil {
req.res <- pathDescribeRes{
stream: pa.stream,
}
return
}
if pa.conf.HasOnDemandStaticSource() {
if pa.onDemandStaticSourceState == pathOnDemandStateInitial {
pa.onDemandStaticSourceStart()
}
pa.describeRequestsOnHold = append(pa.describeRequestsOnHold, req)
return
}
if pa.conf.HasOnDemandPublisher() {
if pa.onDemandPublisherState == pathOnDemandStateInitial {
pa.onDemandStartPublisher()
}
pa.describeRequestsOnHold = append(pa.describeRequestsOnHold, req)
return
}
if pa.conf.Fallback != "" {
fallbackURL := func() string {
if strings.HasPrefix(pa.conf.Fallback, "/") {
ur := url.URL{
Scheme: req.url.Scheme,
User: req.url.User,
Host: req.url.Host,
Path: pa.conf.Fallback,
}
return ur.String()
}
return pa.conf.Fallback
}()
req.res <- pathDescribeRes{redirect: fallbackURL}
return
}
req.res <- pathDescribeRes{err: errPathNoOnePublishing{pathName: pa.name}}
}
func (pa *path) handleRemovePublisher(req pathRemovePublisherReq) {
if pa.source == req.author {
pa.doPublisherRemove()
}
close(req.res)
}
func (pa *path) handleAddPublisher(req pathAddPublisherReq) {
if pa.conf.Source != "publisher" {
req.res <- pathAddPublisherRes{
err: fmt.Errorf("can't publish to path '%s' since 'source' is not 'publisher'", pa.name),
}
return
}
if pa.source != nil {
if !pa.conf.OverridePublisher {
req.res <- pathAddPublisherRes{err: fmt.Errorf("someone is already publishing to path '%s'", pa.name)}
return
}
pa.Log(logger.Info, "closing existing publisher")
pa.source.(publisher).close()
pa.doPublisherRemove()
}
pa.source = req.author
req.res <- pathAddPublisherRes{path: pa}
}
func (pa *path) handleStartPublisher(req pathStartPublisherReq) {
if pa.source != req.author {
req.res <- pathStartPublisherRes{err: fmt.Errorf("publisher is not assigned to this path anymore")}
return
}
err := pa.setReady(req.desc, req.generateRTPPackets)
if err != nil {
req.res <- pathStartPublisherRes{err: err}
return
}
req.author.Log(logger.Info, "is publishing to path '%s', %s",
pa.name,
sourceMediaInfo(req.desc.Medias))
if pa.conf.HasOnDemandPublisher() {
pa.onDemandPublisherReadyTimer.Stop()
pa.onDemandPublisherReadyTimer = newEmptyTimer()
pa.onDemandPublisherScheduleClose()
for _, req := range pa.describeRequestsOnHold {
req.res <- pathDescribeRes{
stream: pa.stream,
}
}
pa.describeRequestsOnHold = nil
for _, req := range pa.readerAddRequestsOnHold {
pa.handleAddReaderPost(req)
}
pa.readerAddRequestsOnHold = nil
}
req.res <- pathStartPublisherRes{stream: pa.stream}
}
func (pa *path) handleStopPublisher(req pathStopPublisherReq) {
if req.author == pa.source && pa.stream != nil {
pa.setNotReady()
}
close(req.res)
}
func (pa *path) handleRemoveReader(req pathRemoveReaderReq) {
if _, ok := pa.readers[req.author]; ok {
pa.doRemoveReader(req.author)
}
close(req.res)
if len(pa.readers) == 0 {
if pa.conf.HasOnDemandStaticSource() {
if pa.onDemandStaticSourceState == pathOnDemandStateReady {
pa.onDemandStaticSourceScheduleClose()
}
} else if pa.conf.HasOnDemandPublisher() {
if pa.onDemandPublisherState == pathOnDemandStateReady {
pa.onDemandPublisherScheduleClose()
}
}
}
}
func (pa *path) handleAddReader(req pathAddReaderReq) {
if pa.stream != nil {
pa.handleAddReaderPost(req)
return
}
if pa.conf.HasOnDemandStaticSource() {
if pa.onDemandStaticSourceState == pathOnDemandStateInitial {
pa.onDemandStaticSourceStart()
}
pa.readerAddRequestsOnHold = append(pa.readerAddRequestsOnHold, req)
return
}
if pa.conf.HasOnDemandPublisher() {
if pa.onDemandPublisherState == pathOnDemandStateInitial {
pa.onDemandStartPublisher()
}
pa.readerAddRequestsOnHold = append(pa.readerAddRequestsOnHold, req)
return
}
req.res <- pathAddReaderRes{err: errPathNoOnePublishing{pathName: pa.name}}
}
func (pa *path) handleAddReaderPost(req pathAddReaderReq) {
func (pa *path) addReaderPost(req pathAddReaderReq) {
if _, ok := pa.readers[req.author]; ok {
req.res <- pathAddReaderRes{
path: pa,
@ -903,45 +962,6 @@ func (pa *path) handleAddReaderPost(req pathAddReaderReq) {
}
}
func (pa *path) handleAPIPathsGet(req pathAPIPathsGetReq) {
req.res <- pathAPIPathsGetRes{
data: &apiPath{
Name: pa.name,
ConfName: pa.confName,
Conf: pa.conf,
Source: func() interface{} {
if pa.source == nil {
return nil
}
return pa.source.apiSourceDescribe()
}(),
SourceReady: pa.stream != nil,
Ready: pa.stream != nil,
ReadyTime: func() *time.Time {
if pa.stream == nil {
return nil
}
v := pa.readyTime
return &v
}(),
Tracks: func() []string {
if pa.stream == nil {
return []string{}
}
return mediasDescription(pa.stream.Desc().Medias)
}(),
BytesReceived: atomic.LoadUint64(pa.bytesReceived),
Readers: func() []interface{} {
ret := []interface{}{}
for r := range pa.readers {
ret = append(ret, r.apiReaderDescribe())
}
return ret
}(),
},
}
}
// reloadConf is called by pathManager.
func (pa *path) reloadConf(newConf *conf.PathConf) {
select {

View file

@ -85,6 +85,7 @@ type pathManager struct {
// in
chReloadConf chan map[string]*conf.PathConf
chSetHLSManager chan pathManagerHLSManager
chClosePath chan *path
chPathReady chan *path
chPathNotReady chan *path
@ -92,7 +93,6 @@ type pathManager struct {
chDescribe chan pathDescribeReq
chAddReader chan pathAddReaderReq
chAddPublisher chan pathAddPublisherReq
chSetHLSManager chan pathManagerHLSManager
chAPIPathsList chan pathAPIPathsListReq
chAPIPathsGet chan pathAPIPathsGetReq
}
@ -129,6 +129,7 @@ func newPathManager(
paths: make(map[string]*path),
pathsByConf: make(map[string]map[*path]struct{}),
chReloadConf: make(chan map[string]*conf.PathConf),
chSetHLSManager: make(chan pathManagerHLSManager),
chClosePath: make(chan *path),
chPathReady: make(chan *path),
chPathNotReady: make(chan *path),
@ -136,7 +137,6 @@ func newPathManager(
chDescribe: make(chan pathDescribeReq),
chAddReader: make(chan pathAddReaderReq),
chAddPublisher: make(chan pathAddPublisherReq),
chSetHLSManager: make(chan pathManagerHLSManager),
chAPIPathsList: make(chan pathAPIPathsListReq),
chAPIPathsGet: make(chan pathAPIPathsGetReq),
}
@ -177,157 +177,37 @@ outer:
for {
select {
case newPathConfs := <-pm.chReloadConf:
for confName, pathConf := range pm.pathConfs {
if newPathConf, ok := newPathConfs[confName]; ok {
// configuration has changed
if !newPathConf.Equal(pathConf) {
if pathConfCanBeUpdated(pathConf, newPathConf) { // paths associated with the configuration can be updated
for pa := range pm.pathsByConf[confName] {
go pa.reloadConf(newPathConf)
}
} else { // paths associated with the configuration must be recreated
for pa := range pm.pathsByConf[confName] {
pm.removePath(pa)
pa.close()
pa.wait() // avoid conflicts between sources
}
}
}
} else {
// configuration has been deleted, remove associated paths
for pa := range pm.pathsByConf[confName] {
pm.removePath(pa)
pa.close()
pa.wait() // avoid conflicts between sources
}
}
}
pm.doReloadConf(newPathConfs)
pm.pathConfs = newPathConfs
// add new paths
for pathConfName, pathConf := range pm.pathConfs {
if _, ok := pm.paths[pathConfName]; !ok && pathConf.Regexp == nil {
pm.createPath(pathConfName, pathConf, pathConfName, nil)
}
}
case m := <-pm.chSetHLSManager:
pm.doSetHLSManager(m)
case pa := <-pm.chClosePath:
if pmpa, ok := pm.paths[pa.name]; !ok || pmpa != pa {
continue
}
pm.removePath(pa)
pm.doClosePath(pa)
case pa := <-pm.chPathReady:
if pm.hlsManager != nil {
pm.hlsManager.pathReady(pa)
}
pm.doPathReady(pa)
case pa := <-pm.chPathNotReady:
if pm.hlsManager != nil {
pm.hlsManager.pathNotReady(pa)
}
pm.doPathNotReady(pa)
case req := <-pm.chGetConfForPath:
_, pathConf, _, err := getConfForPath(pm.pathConfs, req.name)
if err != nil {
req.res <- pathGetConfForPathRes{err: err}
continue
}
err = doAuthentication(pm.externalAuthenticationURL, pm.authMethods,
req.name, pathConf, req.publish, req.credentials)
if err != nil {
req.res <- pathGetConfForPathRes{err: err}
continue
}
req.res <- pathGetConfForPathRes{conf: pathConf}
pm.doGetConfForPath(req)
case req := <-pm.chDescribe:
pathConfName, pathConf, pathMatches, err := getConfForPath(pm.pathConfs, req.pathName)
if err != nil {
req.res <- pathDescribeRes{err: err}
continue
}
err = doAuthentication(pm.externalAuthenticationURL, pm.authMethods, req.pathName, pathConf, false, req.credentials)
if err != nil {
req.res <- pathDescribeRes{err: err}
continue
}
// create path if it doesn't exist
if _, ok := pm.paths[req.pathName]; !ok {
pm.createPath(pathConfName, pathConf, req.pathName, pathMatches)
}
req.res <- pathDescribeRes{path: pm.paths[req.pathName]}
pm.doDescribe(req)
case req := <-pm.chAddReader:
pathConfName, pathConf, pathMatches, err := getConfForPath(pm.pathConfs, req.pathName)
if err != nil {
req.res <- pathAddReaderRes{err: err}
continue
}
if !req.skipAuth {
err = doAuthentication(pm.externalAuthenticationURL, pm.authMethods, req.pathName, pathConf, false, req.credentials)
if err != nil {
req.res <- pathAddReaderRes{err: err}
continue
}
}
// create path if it doesn't exist
if _, ok := pm.paths[req.pathName]; !ok {
pm.createPath(pathConfName, pathConf, req.pathName, pathMatches)
}
req.res <- pathAddReaderRes{path: pm.paths[req.pathName]}
pm.doAddReader(req)
case req := <-pm.chAddPublisher:
pathConfName, pathConf, pathMatches, err := getConfForPath(pm.pathConfs, req.pathName)
if err != nil {
req.res <- pathAddPublisherRes{err: err}
continue
}
if !req.skipAuth {
err = doAuthentication(pm.externalAuthenticationURL, pm.authMethods, req.pathName, pathConf, true, req.credentials)
if err != nil {
req.res <- pathAddPublisherRes{err: err}
continue
}
}
// create path if it doesn't exist
if _, ok := pm.paths[req.pathName]; !ok {
pm.createPath(pathConfName, pathConf, req.pathName, pathMatches)
}
req.res <- pathAddPublisherRes{path: pm.paths[req.pathName]}
case s := <-pm.chSetHLSManager:
pm.hlsManager = s
pm.doAddPublisher(req)
case req := <-pm.chAPIPathsList:
paths := make(map[string]*path)
for name, pa := range pm.paths {
paths[name] = pa
}
req.res <- pathAPIPathsListRes{paths: paths}
pm.doAPIPathsList(req)
case req := <-pm.chAPIPathsGet:
path, ok := pm.paths[req.name]
if !ok {
req.res <- pathAPIPathsGetRes{err: errAPINotFound}
continue
}
req.res <- pathAPIPathsGetRes{path: path}
pm.doAPIPathsGet(req)
case <-pm.ctx.Done():
break outer
@ -341,6 +221,170 @@ outer:
}
}
func (pm *pathManager) doReloadConf(newPathConfs map[string]*conf.PathConf) {
for confName, pathConf := range pm.pathConfs {
if newPathConf, ok := newPathConfs[confName]; ok {
// configuration has changed
if !newPathConf.Equal(pathConf) {
if pathConfCanBeUpdated(pathConf, newPathConf) { // paths associated with the configuration can be updated
for pa := range pm.pathsByConf[confName] {
go pa.reloadConf(newPathConf)
}
} else { // paths associated with the configuration must be recreated
for pa := range pm.pathsByConf[confName] {
pm.removePath(pa)
pa.close()
pa.wait() // avoid conflicts between sources
}
}
}
} else {
// configuration has been deleted, remove associated paths
for pa := range pm.pathsByConf[confName] {
pm.removePath(pa)
pa.close()
pa.wait() // avoid conflicts between sources
}
}
}
pm.pathConfs = newPathConfs
// add new paths
for pathConfName, pathConf := range pm.pathConfs {
if _, ok := pm.paths[pathConfName]; !ok && pathConf.Regexp == nil {
pm.createPath(pathConfName, pathConf, pathConfName, nil)
}
}
}
func (pm *pathManager) doSetHLSManager(m pathManagerHLSManager) {
pm.hlsManager = m
}
func (pm *pathManager) doClosePath(pa *path) {
if pmpa, ok := pm.paths[pa.name]; !ok || pmpa != pa {
return
}
pm.removePath(pa)
}
func (pm *pathManager) doPathReady(pa *path) {
if pm.hlsManager != nil {
pm.hlsManager.pathReady(pa)
}
}
func (pm *pathManager) doPathNotReady(pa *path) {
if pm.hlsManager != nil {
pm.hlsManager.pathNotReady(pa)
}
}
func (pm *pathManager) doGetConfForPath(req pathGetConfForPathReq) {
_, pathConf, _, err := getConfForPath(pm.pathConfs, req.name)
if err != nil {
req.res <- pathGetConfForPathRes{err: err}
return
}
err = doAuthentication(pm.externalAuthenticationURL, pm.authMethods,
req.name, pathConf, req.publish, req.credentials)
if err != nil {
req.res <- pathGetConfForPathRes{err: err}
return
}
req.res <- pathGetConfForPathRes{conf: pathConf}
}
func (pm *pathManager) doDescribe(req pathDescribeReq) {
pathConfName, pathConf, pathMatches, err := getConfForPath(pm.pathConfs, req.pathName)
if err != nil {
req.res <- pathDescribeRes{err: err}
return
}
err = doAuthentication(pm.externalAuthenticationURL, pm.authMethods, req.pathName, pathConf, false, req.credentials)
if err != nil {
req.res <- pathDescribeRes{err: err}
return
}
// create path if it doesn't exist
if _, ok := pm.paths[req.pathName]; !ok {
pm.createPath(pathConfName, pathConf, req.pathName, pathMatches)
}
req.res <- pathDescribeRes{path: pm.paths[req.pathName]}
}
func (pm *pathManager) doAddReader(req pathAddReaderReq) {
pathConfName, pathConf, pathMatches, err := getConfForPath(pm.pathConfs, req.pathName)
if err != nil {
req.res <- pathAddReaderRes{err: err}
return
}
if !req.skipAuth {
err = doAuthentication(pm.externalAuthenticationURL, pm.authMethods, req.pathName, pathConf, false, req.credentials)
if err != nil {
req.res <- pathAddReaderRes{err: err}
return
}
}
// create path if it doesn't exist
if _, ok := pm.paths[req.pathName]; !ok {
pm.createPath(pathConfName, pathConf, req.pathName, pathMatches)
}
req.res <- pathAddReaderRes{path: pm.paths[req.pathName]}
}
func (pm *pathManager) doAddPublisher(req pathAddPublisherReq) {
pathConfName, pathConf, pathMatches, err := getConfForPath(pm.pathConfs, req.pathName)
if err != nil {
req.res <- pathAddPublisherRes{err: err}
return
}
if !req.skipAuth {
err = doAuthentication(pm.externalAuthenticationURL, pm.authMethods, req.pathName, pathConf, true, req.credentials)
if err != nil {
req.res <- pathAddPublisherRes{err: err}
return
}
}
// create path if it doesn't exist
if _, ok := pm.paths[req.pathName]; !ok {
pm.createPath(pathConfName, pathConf, req.pathName, pathMatches)
}
req.res <- pathAddPublisherRes{path: pm.paths[req.pathName]}
}
func (pm *pathManager) doAPIPathsList(req pathAPIPathsListReq) {
paths := make(map[string]*path)
for name, pa := range pm.paths {
paths[name] = pa
}
req.res <- pathAPIPathsListRes{paths: paths}
}
func (pm *pathManager) doAPIPathsGet(req pathAPIPathsGetReq) {
path, ok := pm.paths[req.name]
if !ok {
req.res <- pathAPIPathsGetRes{err: errAPINotFound}
return
}
req.res <- pathAPIPathsGetRes{path: path}
}
func (pm *pathManager) createPath(
pathConfName string,
pathConf *conf.PathConf,

View file

@ -17,6 +17,7 @@ import (
"github.com/bluenviron/mediacommon/pkg/codecs/mpeg4audio"
"github.com/google/uuid"
"github.com/bluenviron/mediamtx/internal/asyncwriter"
"github.com/bluenviron/mediamtx/internal/conf"
"github.com/bluenviron/mediamtx/internal/externalcmd"
"github.com/bluenviron/mediamtx/internal/logger"
@ -240,7 +241,7 @@ func (c *rtmpConn) runRead(conn *rtmp.Conn, u *url.URL) error {
c.pathName = pathName
c.mutex.Unlock()
writer := newAsyncWriter(c.writeQueueSize, c)
writer := asyncwriter.New(c.writeQueueSize, c)
var medias []*description.Media
var w *rtmp.Writer
@ -266,7 +267,7 @@ func (c *rtmpConn) runRead(conn *rtmp.Conn, u *url.URL) error {
"the stream doesn't contain any supported codec, which are currently H264, MPEG-4 Audio, MPEG-1/2 Audio")
}
defer res.stream.RemoveReader(c)
defer res.stream.RemoveReader(writer)
c.Log(logger.Info, "is reading from path '%s', %s",
res.path.name, sourceMediaInfo(medias))
@ -298,14 +299,14 @@ func (c *rtmpConn) runRead(conn *rtmp.Conn, u *url.URL) error {
// disable read deadline
c.nconn.SetReadDeadline(time.Time{})
writer.start()
writer.Start()
select {
case <-c.ctx.Done():
writer.stop()
writer.Stop()
return fmt.Errorf("terminated")
case err := <-writer.error():
case err := <-writer.Error():
return err
}
}
@ -313,7 +314,7 @@ func (c *rtmpConn) runRead(conn *rtmp.Conn, u *url.URL) error {
func (c *rtmpConn) setupVideo(
w **rtmp.Writer,
stream *stream.Stream,
writer *asyncWriter,
writer *asyncwriter.Writer,
) (*description.Media, format.Format) {
var videoFormatH264 *format.H264
videoMedia := stream.Desc().FindFormat(&videoFormatH264)
@ -321,60 +322,56 @@ func (c *rtmpConn) setupVideo(
if videoFormatH264 != nil {
var videoDTSExtractor *h264.DTSExtractor
stream.AddReader(c, videoMedia, videoFormatH264, func(u unit.Unit) {
writer.push(func() error {
tunit := u.(*unit.H264)
stream.AddReader(writer, videoMedia, videoFormatH264, func(u unit.Unit) error {
tunit := u.(*unit.H264)
if tunit.AU == nil {
if tunit.AU == nil {
return nil
}
idrPresent := false
nonIDRPresent := false
for _, nalu := range tunit.AU {
typ := h264.NALUType(nalu[0] & 0x1F)
switch typ {
case h264.NALUTypeIDR:
idrPresent = true
case h264.NALUTypeNonIDR:
nonIDRPresent = true
}
}
var dts time.Duration
// wait until we receive an IDR
if videoDTSExtractor == nil {
if !idrPresent {
return nil
}
pts := tunit.PTS
videoDTSExtractor = h264.NewDTSExtractor()
idrPresent := false
nonIDRPresent := false
for _, nalu := range tunit.AU {
typ := h264.NALUType(nalu[0] & 0x1F)
switch typ {
case h264.NALUTypeIDR:
idrPresent = true
case h264.NALUTypeNonIDR:
nonIDRPresent = true
}
var err error
dts, err = videoDTSExtractor.Extract(tunit.AU, tunit.PTS)
if err != nil {
return err
}
} else {
if !idrPresent && !nonIDRPresent {
return nil
}
var dts time.Duration
// wait until we receive an IDR
if videoDTSExtractor == nil {
if !idrPresent {
return nil
}
videoDTSExtractor = h264.NewDTSExtractor()
var err error
dts, err = videoDTSExtractor.Extract(tunit.AU, pts)
if err != nil {
return err
}
} else {
if !idrPresent && !nonIDRPresent {
return nil
}
var err error
dts, err = videoDTSExtractor.Extract(tunit.AU, pts)
if err != nil {
return err
}
var err error
dts, err = videoDTSExtractor.Extract(tunit.AU, tunit.PTS)
if err != nil {
return err
}
}
c.nconn.SetWriteDeadline(time.Now().Add(time.Duration(c.writeTimeout)))
return (*w).WriteH264(pts, dts, idrPresent, tunit.AU)
})
c.nconn.SetWriteDeadline(time.Now().Add(time.Duration(c.writeTimeout)))
return (*w).WriteH264(tunit.PTS, dts, idrPresent, tunit.AU)
})
return videoMedia, videoFormatH264
@ -386,36 +383,32 @@ func (c *rtmpConn) setupVideo(
func (c *rtmpConn) setupAudio(
w **rtmp.Writer,
stream *stream.Stream,
writer *asyncWriter,
writer *asyncwriter.Writer,
) (*description.Media, format.Format) {
var audioFormatMPEG4Generic *format.MPEG4AudioGeneric
audioMedia := stream.Desc().FindFormat(&audioFormatMPEG4Generic)
if audioMedia != nil {
stream.AddReader(c, audioMedia, audioFormatMPEG4Generic, func(u unit.Unit) {
writer.push(func() error {
tunit := u.(*unit.MPEG4AudioGeneric)
if tunit.AUs == nil {
return nil
}
pts := tunit.PTS
for i, au := range tunit.AUs {
c.nconn.SetWriteDeadline(time.Now().Add(time.Duration(c.writeTimeout)))
err := (*w).WriteMPEG4Audio(
pts+time.Duration(i)*mpeg4audio.SamplesPerAccessUnit*
time.Second/time.Duration(audioFormatMPEG4Generic.ClockRate()),
au,
)
if err != nil {
return err
}
}
stream.AddReader(writer, audioMedia, audioFormatMPEG4Generic, func(u unit.Unit) error {
tunit := u.(*unit.MPEG4AudioGeneric)
if tunit.AUs == nil {
return nil
})
}
for i, au := range tunit.AUs {
c.nconn.SetWriteDeadline(time.Now().Add(time.Duration(c.writeTimeout)))
err := (*w).WriteMPEG4Audio(
tunit.PTS+time.Duration(i)*mpeg4audio.SamplesPerAccessUnit*
time.Second/time.Duration(audioFormatMPEG4Generic.ClockRate()),
au,
)
if err != nil {
return err
}
}
return nil
})
return audioMedia, audioFormatMPEG4Generic
@ -428,19 +421,15 @@ func (c *rtmpConn) setupAudio(
audioFormatMPEG4AudioLATM.Config != nil &&
len(audioFormatMPEG4AudioLATM.Config.Programs) == 1 &&
len(audioFormatMPEG4AudioLATM.Config.Programs[0].Layers) == 1 {
stream.AddReader(c, audioMedia, audioFormatMPEG4AudioLATM, func(u unit.Unit) {
writer.push(func() error {
tunit := u.(*unit.MPEG4AudioLATM)
stream.AddReader(writer, audioMedia, audioFormatMPEG4AudioLATM, func(u unit.Unit) error {
tunit := u.(*unit.MPEG4AudioLATM)
if tunit.AU == nil {
return nil
}
if tunit.AU == nil {
return nil
}
pts := tunit.PTS
c.nconn.SetWriteDeadline(time.Now().Add(time.Duration(c.writeTimeout)))
return (*w).WriteMPEG4Audio(pts, tunit.AU)
})
c.nconn.SetWriteDeadline(time.Now().Add(time.Duration(c.writeTimeout)))
return (*w).WriteMPEG4Audio(tunit.PTS, tunit.AU)
})
return audioMedia, audioFormatMPEG4AudioLATM
@ -450,35 +439,33 @@ func (c *rtmpConn) setupAudio(
audioMedia = stream.Desc().FindFormat(&audioFormatMPEG1)
if audioMedia != nil {
stream.AddReader(c, audioMedia, audioFormatMPEG1, func(u unit.Unit) {
writer.push(func() error {
tunit := u.(*unit.MPEG1Audio)
stream.AddReader(writer, audioMedia, audioFormatMPEG1, func(u unit.Unit) error {
tunit := u.(*unit.MPEG1Audio)
pts := tunit.PTS
pts := tunit.PTS
for _, frame := range tunit.Frames {
var h mpeg1audio.FrameHeader
err := h.Unmarshal(frame)
if err != nil {
return err
}
if !(!h.MPEG2 && h.Layer == 3) {
return fmt.Errorf("RTMP only supports MPEG-1 layer 3 audio")
}
c.nconn.SetWriteDeadline(time.Now().Add(time.Duration(c.writeTimeout)))
err = (*w).WriteMPEG1Audio(pts, &h, frame)
if err != nil {
return err
}
pts += time.Duration(h.SampleCount()) *
time.Second / time.Duration(h.SampleRate)
for _, frame := range tunit.Frames {
var h mpeg1audio.FrameHeader
err := h.Unmarshal(frame)
if err != nil {
return err
}
return nil
})
if !(!h.MPEG2 && h.Layer == 3) {
return fmt.Errorf("RTMP only supports MPEG-1 layer 3 audio")
}
c.nconn.SetWriteDeadline(time.Now().Add(time.Duration(c.writeTimeout)))
err = (*w).WriteMPEG1Audio(pts, &h, frame)
if err != nil {
return err
}
pts += time.Duration(h.SampleCount()) *
time.Second / time.Duration(h.SampleRate)
}
return nil
})
return audioMedia, audioFormatMPEG1

View file

@ -74,8 +74,8 @@ func newRTSPSession(
created: time.Now(),
}
s.decodeErrLogger = newLimitedLogger(s)
s.writeErrLogger = newLimitedLogger(s)
s.decodeErrLogger = logger.NewLimitedLogger(s)
s.writeErrLogger = logger.NewLimitedLogger(s)
s.Log(logger.Info, "created by %v", s.author.NetConn().RemoteAddr())

View file

@ -94,7 +94,7 @@ func (s *rtspSource) Log(level logger.Level, format string, args ...interface{})
func (s *rtspSource) run(ctx context.Context, cnf *conf.PathConf, reloadConf chan *conf.PathConf) error {
s.Log(logger.Debug, "connecting")
decodeErrLogger := newLimitedLogger(s)
decodeErrLogger := logger.NewLimitedLogger(s)
c := &gortsplib.Client{
Transport: cnf.SourceProtocol.Transport,

View file

@ -167,7 +167,7 @@ func (s *sourceStatic) run() {
select {
case err := <-implErr:
innerCtxCancel()
s.impl.Log(logger.Info, "ERR: %v", err)
s.impl.Log(logger.Error, err.Error())
recreating = true
recreateTimer = time.NewTimer(sourceStaticRetryPause)

View file

@ -18,6 +18,7 @@ import (
"github.com/datarhei/gosrt"
"github.com/google/uuid"
"github.com/bluenviron/mediamtx/internal/asyncwriter"
"github.com/bluenviron/mediamtx/internal/conf"
"github.com/bluenviron/mediamtx/internal/externalcmd"
"github.com/bluenviron/mediamtx/internal/logger"
@ -234,7 +235,7 @@ func (c *srtConn) runPublishReader(sconn srt.Conn, path *path) error {
return err
}
decodeErrLogger := newLimitedLogger(c)
decodeErrLogger := logger.NewLimitedLogger(c)
r.OnDecodeError(func(err error) {
decodeErrLogger.Log(logger.Warn, err.Error())
@ -421,7 +422,7 @@ func (c *srtConn) runRead(req srtNewConnReq, pathName string, user string, pass
c.conn = sconn
c.mutex.Unlock()
writer := newAsyncWriter(c.writeQueueSize, c)
writer := asyncwriter.New(c.writeQueueSize, c)
var w *mpegts.Writer
var tracks []*mpegts.Track
@ -443,75 +444,67 @@ func (c *srtConn) runRead(req srtNewConnReq, pathName string, user string, pass
case *format.H265: //nolint:dupl
track := addTrack(medi, &mpegts.CodecH265{})
randomAccessReceived := false
dtsExtractor := h265.NewDTSExtractor()
var dtsExtractor *h265.DTSExtractor
res.stream.AddReader(c, medi, forma, func(u unit.Unit) {
writer.push(func() error {
tunit := u.(*unit.H265)
if tunit.AU == nil {
res.stream.AddReader(writer, medi, forma, func(u unit.Unit) error {
tunit := u.(*unit.H265)
if tunit.AU == nil {
return nil
}
randomAccess := h265.IsRandomAccess(tunit.AU)
if dtsExtractor == nil {
if !randomAccess {
return nil
}
dtsExtractor = h265.NewDTSExtractor()
}
randomAccess := h265.IsRandomAccess(tunit.AU)
dts, err := dtsExtractor.Extract(tunit.AU, tunit.PTS)
if err != nil {
return err
}
if !randomAccessReceived {
if !randomAccess {
return nil
}
randomAccessReceived = true
}
pts := tunit.PTS
dts, err := dtsExtractor.Extract(tunit.AU, pts)
if err != nil {
return err
}
sconn.SetWriteDeadline(time.Now().Add(time.Duration(c.writeTimeout)))
err = w.WriteH26x(track, durationGoToMPEGTS(pts), durationGoToMPEGTS(dts), randomAccess, tunit.AU)
if err != nil {
return err
}
return bw.Flush()
})
sconn.SetWriteDeadline(time.Now().Add(time.Duration(c.writeTimeout)))
err = w.WriteH26x(track, durationGoToMPEGTS(tunit.PTS), durationGoToMPEGTS(dts), randomAccess, tunit.AU)
if err != nil {
return err
}
return bw.Flush()
})
case *format.H264: //nolint:dupl
track := addTrack(medi, &mpegts.CodecH264{})
firstIDRReceived := false
dtsExtractor := h264.NewDTSExtractor()
var dtsExtractor *h264.DTSExtractor
res.stream.AddReader(c, medi, forma, func(u unit.Unit) {
writer.push(func() error {
tunit := u.(*unit.H264)
if tunit.AU == nil {
res.stream.AddReader(writer, medi, forma, func(u unit.Unit) error {
tunit := u.(*unit.H264)
if tunit.AU == nil {
return nil
}
idrPresent := h264.IDRPresent(tunit.AU)
if dtsExtractor == nil {
if !idrPresent {
return nil
}
dtsExtractor = h264.NewDTSExtractor()
}
idrPresent := h264.IDRPresent(tunit.AU)
dts, err := dtsExtractor.Extract(tunit.AU, tunit.PTS)
if err != nil {
return err
}
if !firstIDRReceived {
if !idrPresent {
return nil
}
firstIDRReceived = true
}
pts := tunit.PTS
dts, err := dtsExtractor.Extract(tunit.AU, pts)
if err != nil {
return err
}
sconn.SetWriteDeadline(time.Now().Add(time.Duration(c.writeTimeout)))
err = w.WriteH26x(track, durationGoToMPEGTS(pts), durationGoToMPEGTS(dts), idrPresent, tunit.AU)
if err != nil {
return err
}
return bw.Flush()
})
sconn.SetWriteDeadline(time.Now().Add(time.Duration(c.writeTimeout)))
err = w.WriteH26x(track, durationGoToMPEGTS(tunit.PTS), durationGoToMPEGTS(dts), idrPresent, tunit.AU)
if err != nil {
return err
}
return bw.Flush()
})
case *format.MPEG4AudioGeneric:
@ -519,22 +512,18 @@ func (c *srtConn) runRead(req srtNewConnReq, pathName string, user string, pass
Config: *forma.Config,
})
res.stream.AddReader(c, medi, forma, func(u unit.Unit) {
writer.push(func() error {
tunit := u.(*unit.MPEG4AudioGeneric)
if tunit.AUs == nil {
return nil
}
res.stream.AddReader(writer, medi, forma, func(u unit.Unit) error {
tunit := u.(*unit.MPEG4AudioGeneric)
if tunit.AUs == nil {
return nil
}
pts := tunit.PTS
sconn.SetWriteDeadline(time.Now().Add(time.Duration(c.writeTimeout)))
err = w.WriteMPEG4Audio(track, durationGoToMPEGTS(pts), tunit.AUs)
if err != nil {
return err
}
return bw.Flush()
})
sconn.SetWriteDeadline(time.Now().Add(time.Duration(c.writeTimeout)))
err = w.WriteMPEG4Audio(track, durationGoToMPEGTS(tunit.PTS), tunit.AUs)
if err != nil {
return err
}
return bw.Flush()
})
case *format.MPEG4AudioLATM:
@ -545,22 +534,18 @@ func (c *srtConn) runRead(req srtNewConnReq, pathName string, user string, pass
Config: *forma.Config.Programs[0].Layers[0].AudioSpecificConfig,
})
res.stream.AddReader(c, medi, forma, func(u unit.Unit) {
writer.push(func() error {
tunit := u.(*unit.MPEG4AudioLATM)
if tunit.AU == nil {
return nil
}
res.stream.AddReader(writer, medi, forma, func(u unit.Unit) error {
tunit := u.(*unit.MPEG4AudioLATM)
if tunit.AU == nil {
return nil
}
pts := tunit.PTS
sconn.SetWriteDeadline(time.Now().Add(time.Duration(c.writeTimeout)))
err = w.WriteMPEG4Audio(track, durationGoToMPEGTS(pts), [][]byte{tunit.AU})
if err != nil {
return err
}
return bw.Flush()
})
sconn.SetWriteDeadline(time.Now().Add(time.Duration(c.writeTimeout)))
err = w.WriteMPEG4Audio(track, durationGoToMPEGTS(tunit.PTS), [][]byte{tunit.AU})
if err != nil {
return err
}
return bw.Flush()
})
}
@ -574,43 +559,35 @@ func (c *srtConn) runRead(req srtNewConnReq, pathName string, user string, pass
}(),
})
res.stream.AddReader(c, medi, forma, func(u unit.Unit) {
writer.push(func() error {
tunit := u.(*unit.Opus)
if tunit.Packets == nil {
return nil
}
res.stream.AddReader(writer, medi, forma, func(u unit.Unit) error {
tunit := u.(*unit.Opus)
if tunit.Packets == nil {
return nil
}
pts := tunit.PTS
sconn.SetWriteDeadline(time.Now().Add(time.Duration(c.writeTimeout)))
err = w.WriteOpus(track, durationGoToMPEGTS(pts), tunit.Packets)
if err != nil {
return err
}
return bw.Flush()
})
sconn.SetWriteDeadline(time.Now().Add(time.Duration(c.writeTimeout)))
err = w.WriteOpus(track, durationGoToMPEGTS(tunit.PTS), tunit.Packets)
if err != nil {
return err
}
return bw.Flush()
})
case *format.MPEG1Audio:
track := addTrack(medi, &mpegts.CodecMPEG1Audio{})
res.stream.AddReader(c, medi, forma, func(u unit.Unit) {
writer.push(func() error {
tunit := u.(*unit.MPEG1Audio)
if tunit.Frames == nil {
return nil
}
res.stream.AddReader(writer, medi, forma, func(u unit.Unit) error {
tunit := u.(*unit.MPEG1Audio)
if tunit.Frames == nil {
return nil
}
pts := tunit.PTS
sconn.SetWriteDeadline(time.Now().Add(time.Duration(c.writeTimeout)))
err = w.WriteMPEG1Audio(track, durationGoToMPEGTS(pts), tunit.Frames)
if err != nil {
return err
}
return bw.Flush()
})
sconn.SetWriteDeadline(time.Now().Add(time.Duration(c.writeTimeout)))
err = w.WriteMPEG1Audio(track, durationGoToMPEGTS(tunit.PTS), tunit.Frames)
if err != nil {
return err
}
return bw.Flush()
})
}
}
@ -647,14 +624,14 @@ func (c *srtConn) runRead(req srtNewConnReq, pathName string, user string, pass
// disable read deadline
sconn.SetReadDeadline(time.Time{})
writer.start()
writer.Start()
select {
case <-c.ctx.Done():
writer.stop()
writer.Stop()
return true, fmt.Errorf("terminated")
case err := <-writer.error():
case err := <-writer.Error():
return true, err
}
}

View file

@ -137,7 +137,7 @@ func newSRTServer(
// Log is the main logging function.
func (s *srtServer) Log(level logger.Level, format string, args ...interface{}) {
s.parent.Log(level, "[SRT] "+format, append([]interface{}{}, args...)...)
s.parent.Log(level, "[SRT] "+format, args...)
}
func (s *srtServer) close() {

View file

@ -91,7 +91,7 @@ func (s *srtSource) runReader(sconn srt.Conn) error {
return err
}
decodeErrLogger := newLimitedLogger(s)
decodeErrLogger := logger.NewLimitedLogger(s)
r.OnDecodeError(func(err error) {
decodeErrLogger.Log(logger.Warn, err.Error())

View file

@ -140,7 +140,7 @@ func (s *udpSource) runReader(pc net.PacketConn) error {
return err
}
decodeErrLogger := newLimitedLogger(s)
decodeErrLogger := logger.NewLimitedLogger(s)
r.OnDecodeError(func(err error) {
decodeErrLogger.Log(logger.Warn, err.Error())

View file

@ -416,7 +416,7 @@ func newWebRTCManager(
// Log is the main logging function.
func (m *webRTCManager) Log(level logger.Level, format string, args ...interface{}) {
m.parent.Log(level, "[WebRTC] "+format, append([]interface{}{}, args...)...)
m.parent.Log(level, "[WebRTC] "+format, args...)
}
func (m *webRTCManager) close() {

View file

@ -12,6 +12,7 @@ import (
"github.com/bluenviron/gortsplib/v4/pkg/format/rtpvp9"
"github.com/pion/webrtc/v3"
"github.com/bluenviron/mediamtx/internal/asyncwriter"
"github.com/bluenviron/mediamtx/internal/stream"
"github.com/bluenviron/mediamtx/internal/unit"
)
@ -348,9 +349,8 @@ func newWebRTCOutgoingTrackAudio(desc *description.Session) (*webRTCOutgoingTrac
}
func (t *webRTCOutgoingTrack) start(
r reader,
stream *stream.Stream,
writer *asyncWriter,
writer *asyncwriter.Writer,
) {
// read incoming RTCP packets to make interceptors work
go func() {
@ -363,9 +363,7 @@ func (t *webRTCOutgoingTrack) start(
}
}()
stream.AddReader(r, t.media, t.format, func(u unit.Unit) {
writer.push(func() error {
return t.cb(u)
})
stream.AddReader(writer, t.media, t.format, func(u unit.Unit) error {
return t.cb(u)
})
}

View file

@ -16,6 +16,7 @@ import (
"github.com/pion/sdp/v3"
"github.com/pion/webrtc/v3"
"github.com/bluenviron/mediamtx/internal/asyncwriter"
"github.com/bluenviron/mediamtx/internal/logger"
"github.com/bluenviron/mediamtx/internal/webrtcpc"
)
@ -511,29 +512,29 @@ func (s *webRTCSession) runRead() (int, error) {
s.pc = pc
s.mutex.Unlock()
writer := newAsyncWriter(s.writeQueueSize, s)
writer := asyncwriter.New(s.writeQueueSize, s)
for _, track := range tracks {
track.start(s, res.stream, writer)
track.start(res.stream, writer)
}
defer res.stream.RemoveReader(s)
defer res.stream.RemoveReader(writer)
s.Log(logger.Info, "is reading from path '%s', %s",
res.path.name, sourceMediaInfo(webrtcMediasOfOutgoingTracks(tracks)))
writer.start()
writer.Start()
select {
case <-pc.Disconnected():
writer.stop()
writer.Stop()
return 0, fmt.Errorf("peer connection closed")
case err := <-writer.error():
case err := <-writer.Error():
return 0, err
case <-s.ctx.Done():
writer.stop()
writer.Stop()
return 0, fmt.Errorf("terminated")
}
}