diff --git a/.golangci.yml b/.golangci.yml index 257373b..150c7e5 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -1,11 +1,14 @@ +version: "2" + run: - go: '1.19' - deadline: 210s + go: '1.24' timeout: 10m - skip-dirs: + +issues: + exclude-dirs: - mocks - '.*_mock' - skip-files: + exclude-files: - '.*_mock.go' - ".*\\.pb\\.go$" diff --git a/Makefile b/Makefile index d9b1d48..a4c5484 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,6 @@ NAME = stream-cli -GOLANGCI_VERSION = 1.55.2 +GOLANGCI_VERSION = 2.8.0 GOLANGCI = .bin/golangci/$(GOLANGCI_VERSION)/golangci-lint $(GOLANGCI): @curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(dir $(GOLANGCI)) v$(GOLANGCI_VERSION) diff --git a/go.mod b/go.mod index 1677c79..8f2b506 100644 --- a/go.mod +++ b/go.mod @@ -1,19 +1,63 @@ module github.com/GetStream/stream-cli -go 1.22 +go 1.24 require ( github.com/AlecAivazis/survey/v2 v2.3.4 github.com/GetStream/stream-chat-go/v5 v5.8.1 github.com/MakeNowJust/heredoc v1.0.0 + github.com/aws/aws-sdk-go-v2/config v1.32.7 + github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.21.0 + github.com/aws/aws-sdk-go-v2/service/s3 v1.95.1 github.com/cheynewallace/tabby v1.1.1 github.com/gizak/termui/v3 v3.1.0 github.com/gorilla/websocket v1.5.0 + github.com/pion/rtp v1.10.0 + github.com/pion/webrtc/v4 v4.2.3 github.com/spf13/cobra v1.4.0 github.com/spf13/pflag v1.0.5 github.com/spf13/viper v1.11.0 ) +require ( + github.com/aws/aws-sdk-go-v2 v1.41.1 // indirect + github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.4 // indirect + github.com/aws/aws-sdk-go-v2/credentials v1.19.7 // indirect + github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.17 // indirect + github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.17 // indirect + github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.17 // indirect + github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4 // indirect + github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.17 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.4 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.8 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.17 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.17 // indirect + github.com/aws/aws-sdk-go-v2/service/signin v1.0.5 // indirect + github.com/aws/aws-sdk-go-v2/service/sso v1.30.9 // indirect + github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.13 // indirect + github.com/aws/aws-sdk-go-v2/service/sts v1.41.6 // indirect + github.com/aws/smithy-go v1.24.0 // indirect + github.com/google/uuid v1.6.0 // indirect + github.com/pion/datachannel v1.6.0 // indirect + github.com/pion/dtls/v3 v3.0.10 // indirect + github.com/pion/ice/v4 v4.2.0 // indirect + github.com/pion/interceptor v0.1.43 // indirect + github.com/pion/logging v0.2.4 // indirect + github.com/pion/mdns/v2 v2.1.0 // indirect + github.com/pion/randutil v0.1.0 // indirect + github.com/pion/rtcp v1.2.16 // indirect + github.com/pion/sctp v1.9.2 // indirect + github.com/pion/sdp/v3 v3.0.17 // indirect + github.com/pion/srtp/v3 v3.0.10 // indirect + github.com/pion/stun/v3 v3.1.1 // indirect + github.com/pion/transport/v4 v4.0.1 // indirect + github.com/pion/turn/v4 v4.1.4 // indirect + github.com/wlynxg/anet v0.0.5 // indirect + golang.org/x/crypto v0.33.0 // indirect + golang.org/x/net v0.35.0 // indirect + golang.org/x/time v0.10.0 // indirect +) + require ( github.com/cpuguy83/go-md2man/v2 v2.0.2 // indirect github.com/davecgh/go-spew v1.1.1 // indirect @@ -38,12 +82,12 @@ require ( github.com/spf13/afero v1.8.2 // indirect github.com/spf13/cast v1.5.0 // indirect github.com/spf13/jwalterweatherman v1.1.0 // indirect - github.com/stretchr/testify v1.7.1 + github.com/stretchr/testify v1.11.1 github.com/subosito/gotenv v1.2.0 // indirect - golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f // indirect - golang.org/x/term v0.0.0-20220411215600-e5f449aeb171 // indirect - golang.org/x/text v0.3.8 // indirect + golang.org/x/sys v0.30.0 // indirect + golang.org/x/term v0.29.0 // indirect + golang.org/x/text v0.22.0 // indirect gopkg.in/ini.v1 v1.66.4 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect - gopkg.in/yaml.v3 v3.0.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index f11ddd2..7dbe48c 100644 --- a/go.sum +++ b/go.sum @@ -46,6 +46,46 @@ github.com/MakeNowJust/heredoc v1.0.0 h1:cXCdzVdstXyiTqTvfqk9SDHpKNjxuom+DOlyEeQ github.com/MakeNowJust/heredoc v1.0.0/go.mod h1:mG5amYoWBHf8vpLOuehzbGGw0EHxpZZ6lCpQ4fNJ8LE= github.com/Netflix/go-expect v0.0.0-20220104043353-73e0943537d2 h1:+vx7roKuyA63nhn5WAunQHLTznkw5W8b1Xc0dNjp83s= github.com/Netflix/go-expect v0.0.0-20220104043353-73e0943537d2/go.mod h1:HBCaDeC1lPdgDeDbhX8XFpy1jqjK0IBG8W5K+xYqA0w= +github.com/aws/aws-sdk-go-v2 v1.41.1 h1:ABlyEARCDLN034NhxlRUSZr4l71mh+T5KAeGh6cerhU= +github.com/aws/aws-sdk-go-v2 v1.41.1/go.mod h1:MayyLB8y+buD9hZqkCW3kX1AKq07Y5pXxtgB+rRFhz0= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.4 h1:489krEF9xIGkOaaX3CE/Be2uWjiXrkCH6gUX+bZA/BU= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.4/go.mod h1:IOAPF6oT9KCsceNTvvYMNHy0+kMF8akOjeDvPENWxp4= +github.com/aws/aws-sdk-go-v2/config v1.32.7 h1:vxUyWGUwmkQ2g19n7JY/9YL8MfAIl7bTesIUykECXmY= +github.com/aws/aws-sdk-go-v2/config v1.32.7/go.mod h1:2/Qm5vKUU/r7Y+zUk/Ptt2MDAEKAfUtKc1+3U1Mo3oY= +github.com/aws/aws-sdk-go-v2/credentials v1.19.7 h1:tHK47VqqtJxOymRrNtUXN5SP/zUTvZKeLx4tH6PGQc8= +github.com/aws/aws-sdk-go-v2/credentials v1.19.7/go.mod h1:qOZk8sPDrxhf+4Wf4oT2urYJrYt3RejHSzgAquYeppw= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.17 h1:I0GyV8wiYrP8XpA70g1HBcQO1JlQxCMTW9npl5UbDHY= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.17/go.mod h1:tyw7BOl5bBe/oqvoIeECFJjMdzXoa/dfVz3QQ5lgHGA= +github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.21.0 h1:pQZGI0qQXeCHZHMeWzhwPu+4jkWrdrIb2dgpG4OKmco= +github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.21.0/go.mod h1:XGq5kImVqQT4HUNbbG+0Y8O74URsPNH7CGPg1s1HW5E= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.17 h1:xOLELNKGp2vsiteLsvLPwxC+mYmO6OZ8PYgiuPJzF8U= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.17/go.mod h1:5M5CI3D12dNOtH3/mk6minaRwI2/37ifCURZISxA/IQ= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.17 h1:WWLqlh79iO48yLkj1v3ISRNiv+3KdQoZ6JWyfcsyQik= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.17/go.mod h1:EhG22vHRrvF8oXSTYStZhJc1aUgKtnJe+aOiFEV90cM= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4 h1:WKuaxf++XKWlHWu9ECbMlha8WOEGm0OUEZqm4K/Gcfk= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4/go.mod h1:ZWy7j6v1vWGmPReu0iSGvRiise4YI5SkR3OHKTZ6Wuc= +github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.17 h1:JqcdRG//czea7Ppjb+g/n4o8i/R50aTBHkA7vu0lK+k= +github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.17/go.mod h1:CO+WeGmIdj/MlPel2KwID9Gt7CNq4M65HUfBW97liM0= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.4 h1:0ryTNEdJbzUCEWkVXEXoqlXV72J5keC1GvILMOuD00E= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.4/go.mod h1:HQ4qwNZh32C3CBeO6iJLQlgtMzqeG17ziAA/3KDJFow= +github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.8 h1:Z5EiPIzXKewUQK0QTMkutjiaPVeVYXX7KIqhXu/0fXs= +github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.8/go.mod h1:FsTpJtvC4U1fyDXk7c71XoDv3HlRm8V3NiYLeYLh5YE= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.17 h1:RuNSMoozM8oXlgLG/n6WLaFGoea7/CddrCfIiSA+xdY= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.17/go.mod h1:F2xxQ9TZz5gDWsclCtPQscGpP0VUOc8RqgFM3vDENmU= +github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.17 h1:bGeHBsGZx0Dvu/eJC0Lh9adJa3M1xREcndxLNZlve2U= +github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.17/go.mod h1:dcW24lbU0CzHusTE8LLHhRLI42ejmINN8Lcr22bwh/g= +github.com/aws/aws-sdk-go-v2/service/s3 v1.95.1 h1:C2dUPSnEpy4voWFIq3JNd8gN0Y5vYGDo44eUE58a/p8= +github.com/aws/aws-sdk-go-v2/service/s3 v1.95.1/go.mod h1:5jggDlZ2CLQhwJBiZJb4vfk4f0GxWdEDruWKEJ1xOdo= +github.com/aws/aws-sdk-go-v2/service/signin v1.0.5 h1:VrhDvQib/i0lxvr3zqlUwLwJP4fpmpyD9wYG1vfSu+Y= +github.com/aws/aws-sdk-go-v2/service/signin v1.0.5/go.mod h1:k029+U8SY30/3/ras4G/Fnv/b88N4mAfliNn08Dem4M= +github.com/aws/aws-sdk-go-v2/service/sso v1.30.9 h1:v6EiMvhEYBoHABfbGB4alOYmCIrcgyPPiBE1wZAEbqk= +github.com/aws/aws-sdk-go-v2/service/sso v1.30.9/go.mod h1:yifAsgBxgJWn3ggx70A3urX2AN49Y5sJTD1UQFlfqBw= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.13 h1:gd84Omyu9JLriJVCbGApcLzVR3XtmC4ZDPcAI6Ftvds= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.13/go.mod h1:sTGThjphYE4Ohw8vJiRStAcu3rbjtXRsdNB0TvZ5wwo= +github.com/aws/aws-sdk-go-v2/service/sts v1.41.6 h1:5fFjR/ToSOzB2OQ/XqWpZBmNvmP/pJ1jOWYlFDJTjRQ= +github.com/aws/aws-sdk-go-v2/service/sts v1.41.6/go.mod h1:qgFDZQSD/Kys7nJnVqYlWKnh0SSdMjAi0uSwON4wgYQ= +github.com/aws/smithy-go v1.24.0 h1:LpilSUItNPFr1eY85RYgTIg5eIEPtvFbskaFcmmIUnk= +github.com/aws/smithy-go v1.24.0/go.mod h1:LEj2LM3rBRQJxPZTB4KuzZkaZYnZPnvgIhb4pu07mx0= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/cheynewallace/tabby v1.1.1 h1:JvUR8waht4Y0S3JF17G6Vhyt+FRhnqVCkk8l4YrOU54= github.com/cheynewallace/tabby v1.1.1/go.mod h1:Pba/6cUL8uYqvOc9RkyvFbHGrQ9wShyrn6/S/1OYVys= @@ -135,6 +175,8 @@ github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLe github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= @@ -190,6 +232,40 @@ github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3v github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= github.com/pelletier/go-toml/v2 v2.0.1 h1:8e3L2cCQzLFi2CR4g7vGFuFxX7Jl1kKX8gW+iV0GUKU= github.com/pelletier/go-toml/v2 v2.0.1/go.mod h1:r9LEWfGN8R5k0VXJ+0BkIe7MYkRdwZOjgMj2KwnJFUo= +github.com/pion/datachannel v1.6.0 h1:XecBlj+cvsxhAMZWFfFcPyUaDZtd7IJvrXqlXD/53i0= +github.com/pion/datachannel v1.6.0/go.mod h1:ur+wzYF8mWdC+Mkis5Thosk+u/VOL287apDNEbFpsIk= +github.com/pion/dtls/v3 v3.0.10 h1:k9ekkq1kaZoxnNEbyLKI8DI37j/Nbk1HWmMuywpQJgg= +github.com/pion/dtls/v3 v3.0.10/go.mod h1:YEmmBYIoBsY3jmG56dsziTv/Lca9y4Om83370CXfqJ8= +github.com/pion/ice/v4 v4.2.0 h1:jJC8S+CvXCCvIQUgx+oNZnoUpt6zwc34FhjWwCU4nlw= +github.com/pion/ice/v4 v4.2.0/go.mod h1:EgjBGxDgmd8xB0OkYEVFlzQuEI7kWSCFu+mULqaisy4= +github.com/pion/interceptor v0.1.43 h1:6hmRfnmjogSs300xfkR0JxYFZ9k5blTEvCD7wxEDuNQ= +github.com/pion/interceptor v0.1.43/go.mod h1:BSiC1qKIJt1XVr3l3xQ2GEmCFStk9tx8fwtCZxxgR7M= +github.com/pion/logging v0.2.4 h1:tTew+7cmQ+Mc1pTBLKH2puKsOvhm32dROumOZ655zB8= +github.com/pion/logging v0.2.4/go.mod h1:DffhXTKYdNZU+KtJ5pyQDjvOAh/GsNSyv1lbkFbe3so= +github.com/pion/mdns/v2 v2.1.0 h1:3IJ9+Xio6tWYjhN6WwuY142P/1jA0D5ERaIqawg/fOY= +github.com/pion/mdns/v2 v2.1.0/go.mod h1:pcez23GdynwcfRU1977qKU0mDxSeucttSHbCSfFOd9A= +github.com/pion/randutil v0.1.0 h1:CFG1UdESneORglEsnimhUjf33Rwjubwj6xfiOXBa3mA= +github.com/pion/randutil v0.1.0/go.mod h1:XcJrSMMbbMRhASFVOlj/5hQial/Y8oH/HVo7TBZq+j8= +github.com/pion/rtcp v1.2.16 h1:fk1B1dNW4hsI78XUCljZJlC4kZOPk67mNRuQ0fcEkSo= +github.com/pion/rtcp v1.2.16/go.mod h1:/as7VKfYbs5NIb4h6muQ35kQF/J0ZVNz2Z3xKoCBYOo= +github.com/pion/rtp v1.10.0 h1:XN/xca4ho6ZEcijpdF2VGFbwuHUfiIMf3ew8eAAE43w= +github.com/pion/rtp v1.10.0/go.mod h1:rF5nS1GqbR7H/TCpKwylzeq6yDM+MM6k+On5EgeThEM= +github.com/pion/sctp v1.9.2 h1:HxsOzEV9pWoeggv7T5kewVkstFNcGvhMPx0GvUOUQXo= +github.com/pion/sctp v1.9.2/go.mod h1:OTOlsQ5EDQ6mQ0z4MUGXt2CgQmKyafBEXhUVqLRB6G8= +github.com/pion/sdp/v3 v3.0.17 h1:9SfLAW/fF1XC8yRqQ3iWGzxkySxup4k4V7yN8Fs8nuo= +github.com/pion/sdp/v3 v3.0.17/go.mod h1:9tyKzznud3qiweZcD86kS0ff1pGYB3VX+Bcsmkx6IXo= +github.com/pion/srtp/v3 v3.0.10 h1:tFirkpBb3XccP5VEXLi50GqXhv5SKPxqrdlhDCJlZrQ= +github.com/pion/srtp/v3 v3.0.10/go.mod h1:3mOTIB0cq9qlbn59V4ozvv9ClW/BSEbRp4cY0VtaR7M= +github.com/pion/stun/v3 v3.1.1 h1:CkQxveJ4xGQjulGSROXbXq94TAWu8gIX2dT+ePhUkqw= +github.com/pion/stun/v3 v3.1.1/go.mod h1:qC1DfmcCTQjl9PBaMa5wSn3x9IPmKxSdcCsxBcDBndM= +github.com/pion/transport/v3 v3.1.1 h1:Tr684+fnnKlhPceU+ICdrw6KKkTms+5qHMgw6bIkYOM= +github.com/pion/transport/v3 v3.1.1/go.mod h1:+c2eewC5WJQHiAA46fkMMzoYZSuGzA/7E2FPrOYHctQ= +github.com/pion/transport/v4 v4.0.1 h1:sdROELU6BZ63Ab7FrOLn13M6YdJLY20wldXW2Cu2k8o= +github.com/pion/transport/v4 v4.0.1/go.mod h1:nEuEA4AD5lPdcIegQDpVLgNoDGreqM/YqmEx3ovP4jM= +github.com/pion/turn/v4 v4.1.4 h1:EU11yMXKIsK43FhcUnjLlrhE4nboHZq+TXBIi3QpcxQ= +github.com/pion/turn/v4 v4.1.4/go.mod h1:ES1DXVFKnOhuDkqn9hn5VJlSWmZPaRJLyBXoOeO/BmQ= +github.com/pion/webrtc/v4 v4.2.3 h1:RtdWDnkenNQGxUrZqWa5gSkTm5ncsLg5d+zu0M4cXt4= +github.com/pion/webrtc/v4 v4.2.3/go.mod h1:7vsyFzRzaKP5IELUnj8zLcglPyIT6wWwqTppBZ1k6Kc= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= @@ -220,10 +296,13 @@ github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81P github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.7.1 h1:5TQK59W5E3v0r2duFAb7P95B6hEeOyEnHRa8MjYSMTY= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= +github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s= github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= +github.com/wlynxg/anet v0.0.5 h1:J3VJGi1gvo0JwZ/P1/Yc/8p63SoW98B5dHkYDmpgvvU= +github.com/wlynxg/anet v0.0.5/go.mod h1:eay5PRQr7fIVAMbTbchTnO9gG65Hg/uYGdc7mguHxoA= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= @@ -241,6 +320,8 @@ golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8U golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.33.0 h1:IOBPskki6Lysi0lo9qQvbxiQ+FvsCC/YWOecCHAixus= +golang.org/x/crypto v0.33.0/go.mod h1:bVdXmD7IV/4GdElGPozy6U7lWdRXA4qyRVGJV57uQ5M= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -305,6 +386,8 @@ golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwY golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.35.0 h1:T5GQRQb2y08kTAByq9L4/bz8cipCdA8FbRTXewonqY8= +golang.org/x/net v0.35.0/go.mod h1:EglIi67kWsHKlRzzVMUD93VMSWGFOMSZgxFjparz1Qk= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -362,23 +445,25 @@ golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f h1:v4INt8xihDGvnrfjMDVXGxw9wrfxYyCjk0KbXjhR55s= -golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc= +golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210503060354-a79de5458b56/go.mod h1:tfny5GFUkzUvx4ps4ajbZsCe5lw1metzhBm9T3x7oIY= -golang.org/x/term v0.0.0-20220411215600-e5f449aeb171 h1:EH1Deb8WZJ0xc0WK//leUHXcX9aLE5SymusoTmMZye8= -golang.org/x/term v0.0.0-20220411215600-e5f449aeb171/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.29.0 h1:L6pJp37ocefwRRtYPKSWOWzOtWSxVajvz2ldH/xi3iU= +golang.org/x/term v0.29.0/go.mod h1:6bl4lRlvVuDgSf3179VpIxBF0o10JUpXWOnI7nErv7s= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.8 h1:nAL+RVCQ9uMn3vJZbV+MRnydTJFPf8qqY42YiA6MrqY= -golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= +golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM= +golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.10.0 h1:3usCWA8tQn0L8+hFJQNgzpWbd89begxN66o1Ojdn5L4= +golang.org/x/time v0.10.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= @@ -521,8 +606,9 @@ google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpAD google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/ini.v1 v1.66.4 h1:SsAcf+mM7mRZo2nJNGt8mZCjG8ZRaNGMURJw7BsIST4= gopkg.in/ini.v1 v1.66.4/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= @@ -530,8 +616,8 @@ gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0 h1:hjy8E9ON/egN1tAYqKb61G10WtihqetD4sz2H+8nIeA= -gopkg.in/yaml.v3 v3.0.0/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= diff --git a/pkg/cmd/chat/channel/channel.go b/pkg/cmd/chat/channel/channel.go index 3887ad0..8437d0f 100644 --- a/pkg/cmd/chat/channel/channel.go +++ b/pkg/cmd/chat/channel/channel.go @@ -551,7 +551,7 @@ func hideCmd() *cobra.Command { return err } - cmd.Printf("Successfully hid channel for " + userID + "\n") + cmd.Printf("Successfully hid channel for %s\n", userID) return nil }, } diff --git a/pkg/cmd/chat/imports/imports.go b/pkg/cmd/chat/imports/imports.go index eb8a805..40b2eea 100644 --- a/pkg/cmd/chat/imports/imports.go +++ b/pkg/cmd/chat/imports/imports.go @@ -30,7 +30,9 @@ func validateFile(cmd *cobra.Command, c *stream.Client, filename string) (*valid if err != nil { return nil, err } - defer reader.Close() + defer func() { + _ = reader.Close() + }() rolesResp, err := c.Permissions().ListRoles(cmd.Context()) if err != nil { @@ -85,7 +87,9 @@ func uploadToS3(ctx context.Context, filename, url string) error { if err != nil { return err } - defer data.Close() + defer func() { + _ = data.Close() + }() stat, err := data.Stat() if err != nil { @@ -103,7 +107,9 @@ func uploadToS3(ctx context.Context, filename, url string) error { if err != nil { return err } - defer resp.Body.Close() + defer func() { + _ = resp.Body.Close() + }() return nil } diff --git a/pkg/cmd/chat/imports/validator/validator_test.go b/pkg/cmd/chat/imports/validator/validator_test.go index 6136b72..11f319a 100644 --- a/pkg/cmd/chat/imports/validator/validator_test.go +++ b/pkg/cmd/chat/imports/validator/validator_test.go @@ -113,7 +113,9 @@ func TestValidator_Validate(t *testing.T) { t.Run(tt.name, func(t *testing.T) { f, err := os.Open("testdata/" + tt.filename) require.NoError(t, err) - defer f.Close() + defer func() { + _ = f.Close() + }() var options []Options if tt.lighterChanIDValidation { diff --git a/pkg/cmd/chat/utils/fileupload.go b/pkg/cmd/chat/utils/fileupload.go index 5246df7..66dd753 100644 --- a/pkg/cmd/chat/utils/fileupload.go +++ b/pkg/cmd/chat/utils/fileupload.go @@ -28,7 +28,9 @@ func uploadFile(c *stream.Client, cmd *cobra.Command, uploadtype uploadType, chT if err != nil { return "", err } - defer file.Close() + defer func() { + _ = file.Close() + }() req := stream.SendFileRequest{ User: &stream.User{ID: userID}, diff --git a/pkg/cmd/config/config.go b/pkg/cmd/config/config.go index 2d37d64..cb2bcc9 100644 --- a/pkg/cmd/config/config.go +++ b/pkg/cmd/config/config.go @@ -6,7 +6,7 @@ import ( "net/url" "text/tabwriter" - "github.com/AlecAivazis/survey/v2" + survey "github.com/AlecAivazis/survey/v2" "github.com/MakeNowJust/heredoc" "github.com/cheynewallace/tabby" "github.com/spf13/cobra" diff --git a/pkg/cmd/raw-recording/README.md b/pkg/cmd/raw-recording/README.md new file mode 100644 index 0000000..3b9cad4 --- /dev/null +++ b/pkg/cmd/raw-recording/README.md @@ -0,0 +1,297 @@ +# Raw Recording CLI + +Post-processing tools for Stream Video raw call recordings. Extract, process, and combine audio/video tracks from raw recording archives. + +## Features + +- **Discovery**: Use `list-tracks` to explore recording contents with screenshare detection +- **Smart Completion**: Shell completion with dynamic values from actual recordings +- **Validation**: Automatic validation of user inputs against available data +- **Multiple Formats**: Support for different output formats (table, JSON, completion) +- **Advanced Processing**: Extract, mux, mix and process audio/video with gap filling +- **S3 Support**: Download recordings directly from S3 or presigned URLs with caching + +## Commands + +### `list-tracks` - Discovery & Exploration + +The `list-tracks` command shows all tracks in a recording with their metadata. + +```bash +# List all tracks in table format +stream-cli video raw-recording list-tracks --input-file recording.tar.gz + +# Get JSON output for programmatic use +stream-cli video raw-recording list-tracks --input-file recording.tar.gz --format json + +# Get user IDs only +stream-cli video raw-recording list-tracks --input-file recording.tar.gz --format users + +# Get session IDs only +stream-cli video raw-recording list-tracks --input-file recording.tar.gz --format sessions + +# Get track IDs only +stream-cli video raw-recording list-tracks --input-file recording.tar.gz --format tracks + +# Filter by track type +stream-cli video raw-recording list-tracks --input-file recording.tar.gz --track-type audio +``` + +**Options:** +- `--format ` - Output format: `table` (default), `json`, `users`, `sessions`, `tracks`, `completion` +- `--track-type ` - Filter by track type: `audio`, `video` + +**Output Formats:** +- `table` - Human-readable table with screenshare detection (default) +- `json` - Full metadata in JSON format for scripting +- `users` - List of user IDs only (for shell scripts) +- `sessions` - List of session IDs only (for automation) +- `tracks` - List of track IDs only (for filtering) + +### `extract-audio` - Extract Audio Tracks + +Extract and convert audio tracks from raw recordings to playable MKV format. + +```bash +# Extract audio for all users +stream-cli video raw-recording extract-audio --input-file recording.tar.gz --output ./out + +# Extract audio for specific user +stream-cli video raw-recording extract-audio --input-file recording.tar.gz --output ./out --user-id user123 + +# Extract audio for specific session +stream-cli video raw-recording extract-audio --input-file recording.tar.gz --output ./out --session-id session456 + +# Extract a specific track +stream-cli video raw-recording extract-audio --input-file recording.tar.gz --output ./out --track-id track789 + +# Disable gap filling +stream-cli video raw-recording extract-audio --input-file recording.tar.gz --output ./out --fill-gaps=false +``` + +**Options:** +- `--user-id ` - Filter by user ID (all tracks for that user) +- `--session-id ` - Filter by session ID (all tracks for that session) +- `--track-id ` - Filter by track ID (specific track only) +- `--fill-gaps` - Fill temporal gaps with silence when track was muted (default: true) +- `--fix-dtx` - Fix DTX (Discontinuous Transmission) shrink audio (default: true) + +**Note**: Filters are mutually exclusive - only one of `--user-id`, `--session-id`, or `--track-id` can be specified at a time. + +### `extract-video` - Extract Video Tracks + +Extract and convert video tracks from raw recordings to playable MKV format. + +```bash +# Extract video for all users +stream-cli video raw-recording extract-video --input-file recording.tar.gz --output ./out + +# Extract video for specific user +stream-cli video raw-recording extract-video --input-file recording.tar.gz --output ./out --user-id user123 + +# Extract video for specific session +stream-cli video raw-recording extract-video --input-file recording.tar.gz --output ./out --session-id session456 + +# Extract a specific track +stream-cli video raw-recording extract-video --input-file recording.tar.gz --output ./out --track-id track789 + +# Disable gap filling +stream-cli video raw-recording extract-video --input-file recording.tar.gz --output ./out --fill-gaps=false +``` + +**Options:** +- `--user-id ` - Filter by user ID (all tracks for that user) +- `--session-id ` - Filter by session ID (all tracks for that session) +- `--track-id ` - Filter by track ID (specific track only) +- `--fill-gaps` - Fill temporal gaps with black frames when track was muted (default: true) + +**Note**: Filters are mutually exclusive - only one of `--user-id`, `--session-id`, or `--track-id` can be specified at a time. + +### `mux-av` - Combine Audio and Video + +Combine audio and video tracks into synchronized files. + +```bash +# Mux all tracks +stream-cli video raw-recording mux-av --input-file recording.tar.gz --output ./out + +# Mux tracks for specific user +stream-cli video raw-recording mux-av --input-file recording.tar.gz --output ./out --user-id user123 + +# Mux only user camera tracks (not screenshare) +stream-cli video raw-recording mux-av --input-file recording.tar.gz --output ./out --media user + +# Mux only display/screenshare tracks +stream-cli video raw-recording mux-av --input-file recording.tar.gz --output ./out --media display +``` + +**Options:** +- `--user-id ` - Filter by user ID +- `--session-id ` - Filter by session ID +- `--track-id ` - Filter by track ID +- `--media ` - Filter by media type: `user` (camera/microphone), `display` (screenshare), or `both` (default) + +**Note**: Filters are mutually exclusive. + +### `mix-audio` - Mix Multiple Audio Tracks + +Mix audio from multiple users/sessions into a single synchronized audio file. + +```bash +# Mix all audio tracks from all users +stream-cli video raw-recording mix-audio --input-file recording.tar.gz --output ./out + +# Mix with verbose logging +stream-cli video raw-recording mix-audio --input-file recording.tar.gz --output ./out --verbose +``` + +Creates `composite_{callType}_{callId}_audio_{timestamp}.mkv` with all tracks properly synchronized based on original timing. + +### `process-all` - Complete Workflow + +Execute audio extraction, video extraction, and muxing in a single command. + +```bash +# Process all tracks +stream-cli video raw-recording process-all --input-file recording.tar.gz --output ./out + +# Process tracks for specific user +stream-cli video raw-recording process-all --input-file recording.tar.gz --output ./out --user-id user123 + +# Process tracks for specific session +stream-cli video raw-recording process-all --input-file recording.tar.gz --output ./out --session-id session456 +``` + +**Options:** +- `--user-id ` - Filter by user ID +- `--session-id ` - Filter by session ID +- `--track-id ` - Filter by track ID + +**Output files:** +- `individual_{callType}_{callId}_{userId}_{sessionId}_audio_only_{timestamp}.mkv` - Audio-only files +- `individual_{callType}_{callId}_{userId}_{sessionId}_video_only_{timestamp}.mkv` - Video-only files +- `individual_{callType}_{callId}_{userId}_{sessionId}_audio_video_{timestamp}.mkv` - Combined audio+video files +- `composite_{callType}_{callId}_audio_{timestamp}.mkv` - Mixed audio from all participants + +## Global Options + +These options are available for all commands: + +- `--input-file ` - Path to raw recording tar.gz archive +- `--input-dir ` - Path to extracted raw recording directory +- `--input-s3 ` - S3 URL (`s3://bucket/path`) or presigned HTTPS URL +- `--output ` - Output directory (required for most commands) +- `--verbose` - Enable verbose logging +- `--cache-dir ` - Cache directory for S3 downloads + +**Input options**: Only one of `--input-file`, `--input-dir`, or `--input-s3` can be specified. + +## S3 Support + +Download recordings directly from S3: + +```bash +# Using S3 URL (requires AWS credentials) +stream-cli video raw-recording list-tracks --input-s3 s3://mybucket/recordings/call.tar.gz + +# Using presigned HTTPS URL +stream-cli video raw-recording list-tracks --input-s3 "https://mybucket.s3.amazonaws.com/recordings/call.tar.gz?..." + +# S3 downloads are cached locally to avoid re-downloading +stream-cli video raw-recording process-all --input-s3 s3://mybucket/call.tar.gz --output ./out +``` + +## Workflow Examples + +### Extract Audio for Each Participant + +```bash +# 1. Discover participants +stream-cli video raw-recording list-tracks --input-file call.tar.gz --format users + +# 2. Extract each participant's audio +for user in $(stream-cli video raw-recording list-tracks --input-file call.tar.gz --format users); do + echo "Extracting audio for user: $user" + stream-cli video raw-recording extract-audio --input-file call.tar.gz --output ./extracted --user-id "$user" +done +``` + +### Conference Call Audio Mixing + +```bash +# Mix all participants into single audio file +stream-cli video raw-recording mix-audio --input-file conference.tar.gz --output ./mixed + +# Create session-by-session mixed audio +for session in $(stream-cli video raw-recording list-tracks --input-file conference.tar.gz --format sessions); do + stream-cli video raw-recording mix-audio --input-file conference.tar.gz --output "./mixed/$session" +done +``` + +### Complete Processing Pipeline + +```bash +# All-in-one processing +stream-cli video raw-recording process-all --input-file recording.tar.gz --output ./complete + +# Results: +# - ./complete/individual_*_audio_only_*.mkv (individual audio tracks) +# - ./complete/individual_*_video_only_*.mkv (individual video tracks) +# - ./complete/individual_*_audio_video_*.mkv (combined A/V tracks) +# - ./complete/composite_*_audio_*.mkv (mixed audio) +``` + +## Dependencies + +### FFmpeg + +Required for media processing and conversion. Must be compiled with the following libraries: + +- `libopus` - Opus audio codec +- `libvpx` - VP8/VP9 video codecs +- `libx264` - H.264 video codec +- `libaom` - AV1 video codec (libaom-av1) +- `libmp3lame` - MP3 audio codec (optional, for MP3 output) + +**macOS:** +```bash +brew install ffmpeg +``` + +**Ubuntu/Debian:** +```bash +sudo apt install ffmpeg +``` + +### GStreamer + +Required for RTP dump to container conversion. Install GStreamer 1.0 with the following plugin packages: + +**macOS:** +```bash +brew install gstreamer gst-plugins-base gst-plugins-good gst-plugins-bad gst-plugins-ugly +``` + +**Ubuntu/Debian:** +```bash +sudo apt install gstreamer1.0-tools gstreamer1.0-plugins-base gstreamer1.0-plugins-good gstreamer1.0-plugins-bad gstreamer1.0-plugins-ugly +``` + +**Required GStreamer plugins:** +- `gst-plugins-base` - Core elements (tcpserversrc, filesink) +- `gst-plugins-good` - RTP plugins (rtpjitterbuffer, rtpvp8depay, rtpvp9depay, rtpopusdepay) +- `gst-plugins-bad` - Additional codecs (rtpav1depay, av1parse, matroskamux) +- `gst-plugins-ugly` - H.264 support (rtph264depay, h264parse) + +### AWS Credentials (Optional) + +Required for S3 URL support (`s3://...`). Not needed for presigned HTTPS URLs. + +Configure via: +- Environment variables: `AWS_ACCESS_KEY_ID`, `AWS_SECRET_ACCESS_KEY` +- AWS credentials file: `~/.aws/credentials` +- IAM role (when running on AWS infrastructure) + +### Go + +Go 1.19+ required for building the CLI tool from source. diff --git a/pkg/cmd/raw-recording/constants.go b/pkg/cmd/raw-recording/constants.go new file mode 100644 index 0000000..3e22861 --- /dev/null +++ b/pkg/cmd/raw-recording/constants.go @@ -0,0 +1,85 @@ +package rawrecording + +// Flag names for global/persistent flags +const ( + FlagInputFile = "input-file" + FlagInputDir = "input-dir" + FlagInputS3 = "input-s3" + FlagOutput = "output" + FlagVerbose = "verbose" + FlagCacheDir = "cache-dir" +) + +// Flag names for filter flags (used across multiple commands) +const ( + FlagUserID = "user-id" + FlagSessionID = "session-id" + FlagTrackID = "track-id" +) + +// Flag names for processing options +const ( + FlagFillGaps = "fill-gaps" + FlagFixDtx = "fix-dtx" + FlagMedia = "media" +) + +// Flag names for list-tracks command +const ( + FlagFormat = "format" + FlagTrackType = "track-type" + FlagCompletionType = "completion-type" +) + +// Flag descriptions for global/persistent flags +const ( + DescInputFile = "Raw recording tar.gz archive path" + DescInputDir = "Raw recording extracted directory path" + DescInputS3 = "Raw recording S3 URL (s3://bucket/path or presigned HTTPS URL)" + DescOutput = "Output directory" + DescVerbose = "Enable verbose logging" + DescCacheDir = "Cache directory for S3 downloads" +) + +// Flag descriptions for filter flags +const ( + DescUserID = "Filter by user ID" + DescSessionID = "Filter by session ID" + DescTrackID = "Filter by track ID" +) + +// Flag descriptions for processing options +const ( + DescFillGapsAudio = "Fill with silence when track was muted" + DescFillGapsVideo = "Fill with black frame when track was muted" + DescFixDtx = "Restore original audio duration by filling DTX silence gaps (required for A/V sync)" + DescMedia = "Filter by media type: 'user', 'display', or 'both'" +) + +// Flag descriptions for list-tracks command +const ( + DescFormat = "Output format: table, json, users, sessions, tracks, completion" + DescTrackType = "Filter by track type: audio, video" + DescCompletionType = "For completion format: users, sessions, tracks" +) + +// Default values +const ( + DefaultFormat = "table" + DefaultCompletionType = "tracks" + DefaultMedia = "both" + DefaultCacheSubdir = "stream-cli/raw-recordings" +) + +// Media type values +const ( + MediaUser = "user" + MediaDisplay = "display" + MediaBoth = "both" +) + +// Track type values +const ( + TrackTypeAudio = "audio" + TrackTypeVideo = "video" +) diff --git a/pkg/cmd/raw-recording/extract_audio.go b/pkg/cmd/raw-recording/extract_audio.go new file mode 100644 index 0000000..93734a8 --- /dev/null +++ b/pkg/cmd/raw-recording/extract_audio.go @@ -0,0 +1,144 @@ +package rawrecording + +import ( + "fmt" + "os" + + "github.com/GetStream/stream-cli/pkg/cmd/raw-recording/processing" + "github.com/MakeNowJust/heredoc" + "github.com/spf13/cobra" +) + +func extractAudioCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "extract-audio", + Short: "Generate playable audio files from raw recording tracks", + Long: heredoc.Doc(` + Generate playable audio files from raw recording tracks. + + Output format: MKV container with Opus audio codec. + + Filters are mutually exclusive: you can only specify one of + --user-id, --session-id, or --track-id at a time. + `), + Example: heredoc.Doc(` + # Extract audio for all users (no filters) + $ stream-cli video raw-recording extract-audio --input-file recording.tar.gz --output ./out + + # Extract audio for specific user (all their tracks) + $ stream-cli video raw-recording extract-audio --input-file recording.tar.gz --output ./out --user-id user123 + + # Extract audio for specific session + $ stream-cli video raw-recording extract-audio --input-file recording.tar.gz --output ./out --session-id session456 + + # Extract a specific track + $ stream-cli video raw-recording extract-audio --input-file recording.tar.gz --output ./out --track-id track1 + `), + RunE: runExtractAudio, + } + + fl := cmd.Flags() + fl.String(FlagUserID, "", DescUserID) + fl.String(FlagSessionID, "", DescSessionID) + fl.String(FlagTrackID, "", DescTrackID) + fl.Bool(FlagFillGaps, true, DescFillGapsAudio) + fl.Bool(FlagFixDtx, true, DescFixDtx) + + // Register completions + _ = cmd.RegisterFlagCompletionFunc(FlagUserID, completeUserIDs) + _ = cmd.RegisterFlagCompletionFunc(FlagSessionID, completeSessionIDs) + _ = cmd.RegisterFlagCompletionFunc(FlagTrackID, completeTrackIDs) + + return cmd +} + +func runExtractAudio(cmd *cobra.Command, args []string) error { + globalArgs, err := getGlobalArgs(cmd) + if err != nil { + return err + } + + // Validate global args (output is required for extract-audio) + if err := validateGlobalArgs(globalArgs, true); err != nil { + return err + } + + userID, _ := cmd.Flags().GetString(FlagUserID) + sessionID, _ := cmd.Flags().GetString(FlagSessionID) + trackID, _ := cmd.Flags().GetString(FlagTrackID) + fillGaps, _ := cmd.Flags().GetBool(FlagFillGaps) + fixDtx, _ := cmd.Flags().GetBool(FlagFixDtx) + + // Validate input arguments against actual recording data + metadata, err := validateInputArgs(globalArgs, userID, sessionID, trackID) + if err != nil { + return fmt.Errorf("validation error: %w", err) + } + + logger := setupLogger(globalArgs.Verbose) + logger.Info("Starting extract-audio command") + + // Print banner + printExtractAudioBanner(cmd, globalArgs, userID, sessionID, trackID, fillGaps, fixDtx) + + // Prepare working directory + workDir, cleanup, err := prepareWorkDir(globalArgs, logger) + if err != nil { + return err + } + defer cleanup() + globalArgs.WorkDir = workDir + + // Create output directory if it doesn't exist + if err := os.MkdirAll(globalArgs.Output, 0755); err != nil { + return fmt.Errorf("failed to create output directory: %w", err) + } + + // Extract audio tracks + extractor := processing.NewTrackExtractor(logger) + if _, err := extractor.ExtractTracks(&processing.TrackExtractorConfig{ + WorkDir: globalArgs.WorkDir, + OutputDir: globalArgs.Output, + UserID: userID, + SessionID: sessionID, + TrackID: trackID, + TrackKind: TrackTypeAudio, + MediaType: "both", + FillGap: fillGaps, + FillDtx: fixDtx, + }, metadata); err != nil { + return fmt.Errorf("failed to extract audio: %w", err) + } + + logger.Info("Extract audio command completed") + return nil +} + +func printExtractAudioBanner(cmd *cobra.Command, globalArgs *GlobalArgs, userID, sessionID, trackID string, fillGaps, fixDtx bool) { + cmd.Println("Extract audio command with mutually exclusive filtering:") + if globalArgs.InputFile != "" { + cmd.Printf(" Input file: %s\n", globalArgs.InputFile) + } + if globalArgs.InputDir != "" { + cmd.Printf(" Input directory: %s\n", globalArgs.InputDir) + } + if globalArgs.InputS3 != "" { + cmd.Printf(" Input S3: %s\n", globalArgs.InputS3) + } + cmd.Printf(" Output directory: %s\n", globalArgs.Output) + cmd.Printf(" User ID filter: %s\n", userID) + cmd.Printf(" Session ID filter: %s\n", sessionID) + cmd.Printf(" Track ID filter: %s\n", trackID) + + if trackID != "" { + cmd.Printf(" -> Processing specific track '%s'\n", trackID) + } else if sessionID != "" { + cmd.Printf(" -> Processing all audio tracks for session '%s'\n", sessionID) + } else if userID != "" { + cmd.Printf(" -> Processing all audio tracks for user '%s'\n", userID) + } else { + cmd.Println(" -> Processing all audio tracks (no filters)") + } + cmd.Printf(" Fill gaps: %t\n", fillGaps) + cmd.Printf(" Fix DTX: %t\n", fixDtx) +} diff --git a/pkg/cmd/raw-recording/extract_video.go b/pkg/cmd/raw-recording/extract_video.go new file mode 100644 index 0000000..bc10b2f --- /dev/null +++ b/pkg/cmd/raw-recording/extract_video.go @@ -0,0 +1,141 @@ +package rawrecording + +import ( + "fmt" + "os" + + "github.com/GetStream/stream-cli/pkg/cmd/raw-recording/processing" + "github.com/MakeNowJust/heredoc" + "github.com/spf13/cobra" +) + +func extractVideoCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "extract-video", + Short: "Generate playable video files from raw recording tracks", + Long: heredoc.Doc(` + Generate playable video files from raw recording tracks. + + Output format: MKV container with original video codec (VP8, VP9, H264, or AV1). + + Filters are mutually exclusive: you can only specify one of + --user-id, --session-id, or --track-id at a time. + `), + Example: heredoc.Doc(` + # Extract video for all users (no filters) + $ stream-cli video raw-recording extract-video --input-file recording.tar.gz --output ./out + + # Extract video for specific user (all their tracks) + $ stream-cli video raw-recording extract-video --input-file recording.tar.gz --output ./out --user-id user123 + + # Extract video for specific session + $ stream-cli video raw-recording extract-video --input-file recording.tar.gz --output ./out --session-id session456 + + # Extract a specific track + $ stream-cli video raw-recording extract-video --input-file recording.tar.gz --output ./out --track-id track1 + `), + RunE: runExtractVideo, + } + + fl := cmd.Flags() + fl.String(FlagUserID, "", DescUserID) + fl.String(FlagSessionID, "", DescSessionID) + fl.String(FlagTrackID, "", DescTrackID) + fl.Bool(FlagFillGaps, true, DescFillGapsVideo) + + // Register completions + _ = cmd.RegisterFlagCompletionFunc(FlagUserID, completeUserIDs) + _ = cmd.RegisterFlagCompletionFunc(FlagSessionID, completeSessionIDs) + _ = cmd.RegisterFlagCompletionFunc(FlagTrackID, completeTrackIDs) + + return cmd +} + +func runExtractVideo(cmd *cobra.Command, args []string) error { + globalArgs, err := getGlobalArgs(cmd) + if err != nil { + return err + } + + // Validate global args (output is required for extract-video) + if err := validateGlobalArgs(globalArgs, true); err != nil { + return err + } + + userID, _ := cmd.Flags().GetString(FlagUserID) + sessionID, _ := cmd.Flags().GetString(FlagSessionID) + trackID, _ := cmd.Flags().GetString(FlagTrackID) + fillGaps, _ := cmd.Flags().GetBool(FlagFillGaps) + + // Validate input arguments against actual recording data + metadata, err := validateInputArgs(globalArgs, userID, sessionID, trackID) + if err != nil { + return fmt.Errorf("validation error: %w", err) + } + + logger := setupLogger(globalArgs.Verbose) + logger.Info("Starting extract-video command") + + // Print banner + printExtractVideoBanner(cmd, globalArgs, userID, sessionID, trackID, fillGaps) + + // Prepare working directory + workDir, cleanup, err := prepareWorkDir(globalArgs, logger) + if err != nil { + return err + } + defer cleanup() + globalArgs.WorkDir = workDir + + // Create output directory if it doesn't exist + if err := os.MkdirAll(globalArgs.Output, 0755); err != nil { + return fmt.Errorf("failed to create output directory: %w", err) + } + + // Extract video tracks + extractor := processing.NewTrackExtractor(logger) + if _, err := extractor.ExtractTracks(&processing.TrackExtractorConfig{ + WorkDir: globalArgs.WorkDir, + OutputDir: globalArgs.Output, + UserID: userID, + SessionID: sessionID, + TrackID: trackID, + TrackKind: TrackTypeVideo, + MediaType: "both", + FillGap: fillGaps, + FillDtx: false, + }, metadata); err != nil { + return fmt.Errorf("failed to extract video tracks: %w", err) + } + + logger.Info("Extract video command completed successfully") + return nil +} + +func printExtractVideoBanner(cmd *cobra.Command, globalArgs *GlobalArgs, userID, sessionID, trackID string, fillGaps bool) { + cmd.Println("Extract video command with mutually exclusive filtering:") + if globalArgs.InputFile != "" { + cmd.Printf(" Input file: %s\n", globalArgs.InputFile) + } + if globalArgs.InputDir != "" { + cmd.Printf(" Input directory: %s\n", globalArgs.InputDir) + } + if globalArgs.InputS3 != "" { + cmd.Printf(" Input S3: %s\n", globalArgs.InputS3) + } + cmd.Printf(" Output directory: %s\n", globalArgs.Output) + cmd.Printf(" User ID filter: %s\n", userID) + cmd.Printf(" Session ID filter: %s\n", sessionID) + cmd.Printf(" Track ID filter: %s\n", trackID) + + if trackID != "" { + cmd.Printf(" -> Processing specific track '%s'\n", trackID) + } else if sessionID != "" { + cmd.Printf(" -> Processing all video tracks for session '%s'\n", sessionID) + } else if userID != "" { + cmd.Printf(" -> Processing all video tracks for user '%s'\n", userID) + } else { + cmd.Println(" -> Processing all video tracks (no filters)") + } + cmd.Printf(" Fill gaps: %t\n", fillGaps) +} diff --git a/pkg/cmd/raw-recording/list_tracks.go b/pkg/cmd/raw-recording/list_tracks.go new file mode 100644 index 0000000..586de4f --- /dev/null +++ b/pkg/cmd/raw-recording/list_tracks.go @@ -0,0 +1,205 @@ +package rawrecording + +import ( + "context" + "encoding/json" + "fmt" + "sort" + "strings" + + "github.com/GetStream/stream-cli/pkg/cmd/raw-recording/processing" + "github.com/MakeNowJust/heredoc" + "github.com/spf13/cobra" +) + +func listTracksCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "list-tracks", + Short: "List all tracks in the raw recording with their metadata", + Long: heredoc.Doc(` + List all tracks in the raw recording with their metadata. + + This command displays information about all audio and video tracks + in the recording, including user IDs, session IDs, track IDs, and codecs. + + Note: --output is optional for this command (only displays information). + `), + Example: heredoc.Doc(` + # List all tracks in table format + $ stream-cli video raw-recording list-tracks --input-file recording.tar.gz + + # Get JSON output for programmatic use + $ stream-cli video raw-recording list-tracks --input-file recording.tar.gz --format json + + # Get user IDs only + $ stream-cli video raw-recording list-tracks --input-file recording.tar.gz --format users + + # Filter by track type + $ stream-cli video raw-recording list-tracks --input-file recording.tar.gz --track-type audio + `), + RunE: runListTracks, + } + + fl := cmd.Flags() + fl.String(FlagFormat, DefaultFormat, DescFormat) + fl.String(FlagTrackType, "", DescTrackType) + fl.String(FlagCompletionType, DefaultCompletionType, DescCompletionType) + + return cmd +} + +func runListTracks(cmd *cobra.Command, args []string) error { + globalArgs, err := getGlobalArgs(cmd) + if err != nil { + return err + } + + // Validate global args (output is optional for list-tracks) + if err := validateGlobalArgs(globalArgs, false); err != nil { + return err + } + + format, _ := cmd.Flags().GetString(FlagFormat) + trackType, _ := cmd.Flags().GetString(FlagTrackType) + completionType, _ := cmd.Flags().GetString(FlagCompletionType) + + logger := setupLogger(globalArgs.Verbose) + logger.Info("Starting list-tracks command") + + // Resolve input path (download from S3 if needed) + inputPath, err := resolveInputPath(context.Background(), globalArgs) + if err != nil { + return err + } + + parser := processing.NewMetadataParser(logger) + metadata, err := parser.ParseMetadataOnly(inputPath) + if err != nil { + return fmt.Errorf("failed to parse recording: %w", err) + } + + // Filter tracks if track type is specified + tracks := processing.FilterTracks(metadata.Tracks, "", "", "", trackType, "") + + // Output in requested format + switch format { + case "table": + printTracksTable(cmd, tracks) + case "json": + printTracksJSON(cmd, metadata) + case "completion": + printCompletion(cmd, metadata, completionType) + case "users": + printUsers(cmd, metadata.UserIDs) + case "sessions": + printSessions(cmd, metadata.Sessions) + case "tracks": + printTrackIDs(cmd, tracks) + default: + return fmt.Errorf("unknown format: %s", format) + } + + logger.Info("List tracks command completed") + return nil +} + +// printTracksTable prints tracks in a human-readable table format +func printTracksTable(cmd *cobra.Command, tracks []*processing.TrackInfo) { + if len(tracks) == 0 { + cmd.Println("No tracks found.") + return + } + + // Print header + cmd.Printf("%-22s %-38s %-38s %-6s %-12s %-15s %-8s\n", "USER ID", "SESSION ID", "TRACK ID", "TYPE", "SCREENSHARE", "CODEC", "SEGMENTS") + cmd.Printf("%-22s %-38s %-38s %-6s %-12s %-15s %-8s\n", + strings.Repeat("-", 22), + strings.Repeat("-", 38), + strings.Repeat("-", 38), + strings.Repeat("-", 6), + strings.Repeat("-", 12), + strings.Repeat("-", 15), + strings.Repeat("-", 8)) + + // Print tracks + for _, track := range tracks { + screenshareStatus := "No" + if track.IsScreenshare { + screenshareStatus = "Yes" + } + cmd.Printf("%-22s %-38s %-38s %-6s %-12s %-15s %-8d\n", + truncateString(track.UserID, 22), + truncateString(track.SessionID, 38), + truncateString(track.TrackID, 38), + track.TrackType, + screenshareStatus, + track.Codec, + track.SegmentCount) + } +} + +// truncateString truncates a string to a maximum length, adding "..." if needed +func truncateString(s string, maxLen int) string { + if len(s) <= maxLen { + return s + } + return s[:maxLen-3] + "..." +} + +// printTracksJSON prints the full metadata in JSON format +func printTracksJSON(cmd *cobra.Command, metadata *processing.RecordingMetadata) { + data, err := json.MarshalIndent(metadata, "", " ") + if err != nil { + cmd.PrintErrf("Error marshaling JSON: %v\n", err) + return + } + cmd.Println(string(data)) +} + +// printCompletion prints completion-friendly output +func printCompletion(cmd *cobra.Command, metadata *processing.RecordingMetadata, completionType string) { + switch completionType { + case "users": + printUsers(cmd, metadata.UserIDs) + case "sessions": + printSessions(cmd, metadata.Sessions) + case "tracks": + printTrackIDs(cmd, metadata.Tracks) + default: + cmd.PrintErrf("Unknown completion type: %s\n", completionType) + } +} + +// printUsers prints user IDs, one per line +func printUsers(cmd *cobra.Command, userIDs []string) { + sort.Strings(userIDs) + for _, userID := range userIDs { + cmd.Println(userID) + } +} + +// printSessions prints session IDs, one per line +func printSessions(cmd *cobra.Command, sessions []string) { + sort.Strings(sessions) + for _, session := range sessions { + cmd.Println(session) + } +} + +// printTrackIDs prints unique track IDs, one per line +func printTrackIDs(cmd *cobra.Command, tracks []*processing.TrackInfo) { + trackIDs := make([]string, 0) + seen := make(map[string]bool) + + for _, track := range tracks { + if !seen[track.TrackID] { + trackIDs = append(trackIDs, track.TrackID) + seen[track.TrackID] = true + } + } + + sort.Strings(trackIDs) + for _, trackID := range trackIDs { + cmd.Println(trackID) + } +} diff --git a/pkg/cmd/raw-recording/mix_audio.go b/pkg/cmd/raw-recording/mix_audio.go new file mode 100644 index 0000000..0a3f601 --- /dev/null +++ b/pkg/cmd/raw-recording/mix_audio.go @@ -0,0 +1,83 @@ +package rawrecording + +import ( + "fmt" + "os" + + "github.com/GetStream/stream-cli/pkg/cmd/raw-recording/processing" + "github.com/MakeNowJust/heredoc" + "github.com/spf13/cobra" +) + +func mixAudioCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "mix-audio", + Short: "Mix multiple audio tracks into one file", + Long: heredoc.Doc(` + Mix all audio tracks from multiple users/sessions into a single audio file + with proper timing synchronization (like a conference call recording). + + Creates a composite audio file (MKV format by default) containing all mixed + tracks with proper timing synchronization based on the original recording timeline. + + Output: composite_{callType}_{callId}_audio_{timestamp}.mkv + `), + Example: heredoc.Doc(` + # Mix all audio tracks from all users and sessions + $ stream-cli video raw-recording mix-audio --input-file recording.tar.gz --output ./out + + # Mix with verbose logging + $ stream-cli video raw-recording mix-audio --input-file recording.tar.gz --output ./out --verbose + `), + RunE: runMixAudio, + } + + return cmd +} + +func runMixAudio(cmd *cobra.Command, args []string) error { + globalArgs, err := getGlobalArgs(cmd) + if err != nil { + return err + } + + // Validate global args (output is required for mix-audio) + if err := validateGlobalArgs(globalArgs, true); err != nil { + return err + } + + // Validate input arguments against actual recording data + metadata, err := validateInputArgs(globalArgs, "", "", "") + if err != nil { + return fmt.Errorf("validation error: %w", err) + } + + logger := setupLogger(globalArgs.Verbose) + logger.Info("Starting mix-audio command") + + // Prepare working directory + workDir, cleanup, err := prepareWorkDir(globalArgs, logger) + if err != nil { + return err + } + defer cleanup() + globalArgs.WorkDir = workDir + + // Create output directory if it doesn't exist + if err := os.MkdirAll(globalArgs.Output, 0755); err != nil { + return fmt.Errorf("failed to create output directory: %w", err) + } + + // Mix all audio tracks + mixer := processing.NewAudioMixer(logger) + _, _ = mixer.MixAllAudioTracks(&processing.AudioMixerConfig{ + WorkDir: globalArgs.WorkDir, + OutputDir: globalArgs.Output, + WithScreenshare: true, + WithExtract: true, + WithCleanup: false, + }, metadata) + + logger.Info("Mix-audio command completed successfully") + return nil +} diff --git a/pkg/cmd/raw-recording/mux_av.go b/pkg/cmd/raw-recording/mux_av.go new file mode 100644 index 0000000..9a87383 --- /dev/null +++ b/pkg/cmd/raw-recording/mux_av.go @@ -0,0 +1,137 @@ +package rawrecording + +import ( + "fmt" + "os" + + "github.com/GetStream/stream-cli/pkg/cmd/raw-recording/processing" + "github.com/MakeNowJust/heredoc" + "github.com/spf13/cobra" +) + +func muxAVCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "mux-av", + Short: "Mux audio and video tracks into a single file", + Long: heredoc.Doc(` + Mux audio and video tracks into a single file. + + This command combines audio and video tracks from the same + user/session into a single playable file. + + Filters are mutually exclusive: you can only specify one of + --user-id, --session-id, or --track-id at a time. + + Media filtering: + --media user Only mux user camera audio/video pairs + --media display Only mux display sharing audio/video pairs + --media both Mux both types (default) + `), + Example: heredoc.Doc(` + # Mux all tracks + $ stream-cli video raw-recording mux-av --input-file recording.tar.gz --output ./out + + # Mux tracks for specific user + $ stream-cli video raw-recording mux-av --input-file recording.tar.gz --output ./out --user-id user123 + + # Mux only user camera tracks + $ stream-cli video raw-recording mux-av --input-file recording.tar.gz --output ./out --media user + + # Mux only display sharing tracks + $ stream-cli video raw-recording mux-av --input-file recording.tar.gz --output ./out --media display + `), + RunE: runMuxAV, + } + + fl := cmd.Flags() + fl.String(FlagUserID, "", DescUserID) + fl.String(FlagSessionID, "", DescSessionID) + fl.String(FlagTrackID, "", DescTrackID) + fl.String(FlagMedia, DefaultMedia, DescMedia) + + // Register completions + _ = cmd.RegisterFlagCompletionFunc(FlagUserID, completeUserIDs) + _ = cmd.RegisterFlagCompletionFunc(FlagSessionID, completeSessionIDs) + _ = cmd.RegisterFlagCompletionFunc(FlagTrackID, completeTrackIDs) + _ = cmd.RegisterFlagCompletionFunc(FlagMedia, func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + return []string{MediaUser, MediaDisplay, MediaBoth}, cobra.ShellCompDirectiveNoFileComp + }) + + return cmd +} + +func runMuxAV(cmd *cobra.Command, args []string) error { + globalArgs, err := getGlobalArgs(cmd) + if err != nil { + return err + } + + // Validate global args (output is required for mux-av) + if err := validateGlobalArgs(globalArgs, true); err != nil { + return err + } + + userID, _ := cmd.Flags().GetString(FlagUserID) + sessionID, _ := cmd.Flags().GetString(FlagSessionID) + trackID, _ := cmd.Flags().GetString(FlagTrackID) + media, _ := cmd.Flags().GetString(FlagMedia) + + // Validate input arguments against actual recording data + metadata, err := validateInputArgs(globalArgs, userID, sessionID, trackID) + if err != nil { + return fmt.Errorf("validation error: %w", err) + } + + logger := setupLogger(globalArgs.Verbose) + logger.Info("Starting mux-av command") + + // Print banner + cmd.Println("Mux audio and video command with hierarchical filtering:") + cmd.Printf(" Input file: %s\n", globalArgs.InputFile) + cmd.Printf(" Output directory: %s\n", globalArgs.Output) + cmd.Printf(" User ID filter: %s\n", userID) + cmd.Printf(" Session ID filter: %s\n", sessionID) + cmd.Printf(" Track ID filter: %s\n", trackID) + cmd.Printf(" Media filter: %s\n", media) + + if trackID != "" { + cmd.Printf(" -> Processing specific track '%s'\n", trackID) + } else if sessionID != "" { + cmd.Printf(" -> Processing all tracks for session '%s'\n", sessionID) + } else if userID != "" { + cmd.Printf(" -> Processing all tracks for user '%s'\n", userID) + } else { + cmd.Println(" -> Processing all tracks (no filters)") + } + + // Prepare working directory + workDir, cleanup, err := prepareWorkDir(globalArgs, logger) + if err != nil { + return err + } + defer cleanup() + globalArgs.WorkDir = workDir + + // Create output directory if it doesn't exist + if err := os.MkdirAll(globalArgs.Output, 0755); err != nil { + return fmt.Errorf("failed to create output directory: %w", err) + } + + // Mux audio/video tracks + muxer := processing.NewAudioVideoMuxer(logger) + if _, err := muxer.MuxAudioVideoTracks(&processing.AudioVideoMuxerConfig{ + WorkDir: globalArgs.WorkDir, + OutputDir: globalArgs.Output, + UserID: userID, + SessionID: sessionID, + TrackID: trackID, + MediaType: media, + WithExtract: true, + WithCleanup: false, + }, metadata); err != nil { + return fmt.Errorf("failed to mux audio/video tracks: %w", err) + } + + logger.Info("Mux audio and video command completed successfully") + return nil +} diff --git a/pkg/cmd/raw-recording/process_all.go b/pkg/cmd/raw-recording/process_all.go new file mode 100644 index 0000000..192aead --- /dev/null +++ b/pkg/cmd/raw-recording/process_all.go @@ -0,0 +1,171 @@ +package rawrecording + +import ( + "fmt" + "os" + + "github.com/GetStream/stream-cli/pkg/cmd/raw-recording/processing" + "github.com/MakeNowJust/heredoc" + "github.com/spf13/cobra" +) + +func processAllCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "process-all", + Short: "Process audio, video, and mux (all-in-one)", + Long: heredoc.Doc(` + Process audio, video, and mux them into combined files (all-in-one workflow). + + Outputs multiple MKV files: individual audio/video tracks, muxed A/V, and mixed audio. + Gap filling and DTX fix are always enabled for seamless playback and proper A/V sync. + + Filters are mutually exclusive: you can only specify one of + --user-id, --session-id, or --track-id at a time. + + Output files: + individual_{callType}_{callId}_{userId}_{sessionId}_audio_only_{timestamp}.mkv + individual_{callType}_{callId}_{userId}_{sessionId}_video_only_{timestamp}.mkv + individual_{callType}_{callId}_{userId}_{sessionId}_audio_video_{timestamp}.mkv + composite_{callType}_{callId}_audio_{timestamp}.mkv + `), + Example: heredoc.Doc(` + # Process all tracks + $ stream-cli video raw-recording process-all --input-file recording.tar.gz --output ./out + + # Process tracks for specific user + $ stream-cli video raw-recording process-all --input-file recording.tar.gz --output ./out --user-id user123 + + # Process tracks for specific session + $ stream-cli video raw-recording process-all --input-file recording.tar.gz --output ./out --session-id session456 + `), + RunE: runProcessAll, + } + + fl := cmd.Flags() + fl.String(FlagUserID, "", DescUserID) + fl.String(FlagSessionID, "", DescSessionID) + fl.String(FlagTrackID, "", DescTrackID) + + // Register completions + _ = cmd.RegisterFlagCompletionFunc(FlagUserID, completeUserIDs) + _ = cmd.RegisterFlagCompletionFunc(FlagSessionID, completeSessionIDs) + _ = cmd.RegisterFlagCompletionFunc(FlagTrackID, completeTrackIDs) + + return cmd +} + +func runProcessAll(cmd *cobra.Command, args []string) error { + globalArgs, err := getGlobalArgs(cmd) + if err != nil { + return err + } + + // Validate global args (output is required for process-all) + if err := validateGlobalArgs(globalArgs, true); err != nil { + return err + } + + userID, _ := cmd.Flags().GetString(FlagUserID) + sessionID, _ := cmd.Flags().GetString(FlagSessionID) + trackID, _ := cmd.Flags().GetString(FlagTrackID) + + // Validate input arguments against actual recording data + metadata, err := validateInputArgs(globalArgs, userID, sessionID, trackID) + if err != nil { + return fmt.Errorf("validation error: %w", err) + } + + logger := setupLogger(globalArgs.Verbose) + logger.Info("Starting process-all command") + + // Print banner + cmd.Println("Process-all command (audio + video + mux) with hierarchical filtering:") + cmd.Printf(" Input file: %s\n", globalArgs.InputFile) + cmd.Printf(" Output directory: %s\n", globalArgs.Output) + cmd.Printf(" User ID filter: %s\n", userID) + cmd.Printf(" Session ID filter: %s\n", sessionID) + cmd.Printf(" Track ID filter: %s\n", trackID) + cmd.Println(" Gap filling: always enabled") + + if trackID != "" { + cmd.Printf(" -> Processing specific track '%s'\n", trackID) + } else if sessionID != "" { + cmd.Printf(" -> Processing all tracks for session '%s'\n", sessionID) + } else if userID != "" { + cmd.Printf(" -> Processing all tracks for user '%s'\n", userID) + } else { + cmd.Println(" -> Processing all tracks (no filters)") + } + + // Prepare working directory + workDir, cleanup, err := prepareWorkDir(globalArgs, logger) + if err != nil { + return err + } + defer cleanup() + globalArgs.WorkDir = workDir + + // Create output directory if it doesn't exist + if err := os.MkdirAll(globalArgs.Output, 0755); err != nil { + return fmt.Errorf("failed to create output directory: %w", err) + } + + // Extract audio tracks + extractor := processing.NewTrackExtractor(logger) + if _, err := extractor.ExtractTracks(&processing.TrackExtractorConfig{ + WorkDir: globalArgs.WorkDir, + OutputDir: globalArgs.Output, + UserID: "", + SessionID: "", + TrackID: "", + TrackKind: TrackTypeAudio, + MediaType: "both", + FillGap: true, + FillDtx: true, + }, metadata); err != nil { + return fmt.Errorf("failed to extract audio tracks: %w", err) + } + + // Extract video tracks + if _, err := extractor.ExtractTracks(&processing.TrackExtractorConfig{ + WorkDir: globalArgs.WorkDir, + OutputDir: globalArgs.Output, + UserID: "", + SessionID: "", + TrackID: "", + TrackKind: TrackTypeVideo, + MediaType: "both", + FillGap: true, + FillDtx: true, + }, metadata); err != nil { + return fmt.Errorf("failed to extract video tracks: %w", err) + } + + // Mix all audio tracks + mixer := processing.NewAudioMixer(logger) + _, _ = mixer.MixAllAudioTracks(&processing.AudioMixerConfig{ + WorkDir: globalArgs.WorkDir, + OutputDir: globalArgs.Output, + WithScreenshare: false, + WithExtract: false, + WithCleanup: false, + }, metadata) + + // Mux audio/video tracks + muxer := processing.NewAudioVideoMuxer(logger) + if _, err := muxer.MuxAudioVideoTracks(&processing.AudioVideoMuxerConfig{ + WorkDir: globalArgs.WorkDir, + OutputDir: globalArgs.Output, + UserID: "", + SessionID: "", + TrackID: "", + MediaType: "", + WithExtract: false, + WithCleanup: false, + }, metadata); err != nil { + return fmt.Errorf("failed to mux audio/video tracks: %w", err) + } + + logger.Info("Process-all command completed successfully") + return nil +} diff --git a/pkg/cmd/raw-recording/processing/archive_input.go b/pkg/cmd/raw-recording/processing/archive_input.go new file mode 100644 index 0000000..b2a0369 --- /dev/null +++ b/pkg/cmd/raw-recording/processing/archive_input.go @@ -0,0 +1,104 @@ +package processing + +import ( + "archive/tar" + "compress/gzip" + "fmt" + "io" + "os" + "path/filepath" + "strings" +) + +// extractToTempDir extracts archive to temp directory or returns the directory path +// Returns: (workingDir, cleanupFunc, error) +func ExtractToTempDir(inputPath string, logger *ProcessingLogger) (string, func(), error) { + // If it's already a directory, just return it + if stat, err := os.Stat(inputPath); err == nil && stat.IsDir() { + logger.Debug("Input is already a directory: %s", inputPath) + return inputPath, func() {}, nil + } + + // If it's a tar.gz file, extract it to temp directory + if strings.HasSuffix(strings.ToLower(inputPath), ".tar.gz") { + logger.Info("Extracting tar.gz archive to temporary directory...") + + tempDir, err := os.MkdirTemp("", "raw-tools-*") + if err != nil { + return "", nil, fmt.Errorf("failed to create temp directory: %w", err) + } + + cleanup := func() { + _ = os.RemoveAll(tempDir) + } + + err = extractTarGzToDir(inputPath, tempDir, logger) + if err != nil { + cleanup() + return "", nil, fmt.Errorf("failed to extract tar.gz: %w", err) + } + + logger.Debug("Extracted archive to: %s", tempDir) + return tempDir, cleanup, nil + } + + return "", nil, fmt.Errorf("unsupported input format: %s (only tar.gz files and directories supported)", inputPath) +} + +// extractTarGzToDir extracts a tar.gz file to the specified directory +func extractTarGzToDir(tarGzPath, destDir string, logger *ProcessingLogger) error { + file, err := os.Open(tarGzPath) + if err != nil { + return fmt.Errorf("failed to open tar.gz file: %w", err) + } + defer func() { + _ = file.Close() + }() + + gzReader, err := gzip.NewReader(file) + if err != nil { + return fmt.Errorf("failed to create gzip reader: %w", err) + } + defer func() { + _ = gzReader.Close() + }() + + tarReader := tar.NewReader(gzReader) + + for { + header, err := tarReader.Next() + if err == io.EOF { + break + } + if err != nil { + return fmt.Errorf("failed to read tar entry: %w", err) + } + + // Skip directories + if header.FileInfo().IsDir() { + continue + } + + // Create destination file + destPath := filepath.Join(destDir, header.Name) + + // Create directory structure if needed + if err := os.MkdirAll(filepath.Dir(destPath), 0755); err != nil { + return fmt.Errorf("failed to create directory structure: %w", err) + } + + // Extract file + outFile, err := os.Create(destPath) + if err != nil { + return fmt.Errorf("failed to create file %s: %w", destPath, err) + } + + _, err = io.Copy(outFile, tarReader) + _ = outFile.Close() + if err != nil { + return fmt.Errorf("failed to extract file %s: %w", destPath, err) + } + } + + return nil +} diff --git a/pkg/cmd/raw-recording/processing/archive_json.go b/pkg/cmd/raw-recording/processing/archive_json.go new file mode 100644 index 0000000..3718890 --- /dev/null +++ b/pkg/cmd/raw-recording/processing/archive_json.go @@ -0,0 +1,44 @@ +package processing + +import "time" + +type SessionTimingMetadata struct { + CallType string `json:"call_type"` + CallID string `json:"call_id"` + CallSessionID string `json:"call_session_id"` + CallStartTime time.Time `json:"call_start_time"` + ParticipantID string `json:"participant_id"` + UserSessionID string `json:"user_session_id"` + Segments struct { + Audio []*SegmentMetadata `json:"audio"` + Video []*SegmentMetadata `json:"video"` + } `json:"segments"` +} + +type SegmentMetadata struct { + // Global information + BaseFilename string `json:"base_filename"` + + // Track information + Codec string `json:"codec"` + TrackID string `json:"track_id"` + TrackType string `json:"track_type"` + + // Packet timing information + FirstRtpRtpTimestamp uint32 `json:"first_rtp_rtp_timestamp"` + FirstRtpUnixTimestamp int64 `json:"first_rtp_unix_timestamp"` + LastRtpRtpTimestamp uint32 `json:"last_rtp_rtp_timestamp,omitempty"` + LastRtpUnixTimestamp int64 `json:"last_rtp_unix_timestamp,omitempty"` + FirstRtcpRtpTimestamp uint32 `json:"first_rtcp_rtp_timestamp,omitempty"` + FirstRtcpNtpTimestamp int64 `json:"first_rtcp_ntp_timestamp,omitempty"` + LastRtcpRtpTimestamp uint32 `json:"last_rtcp_rtp_timestamp,omitempty"` + LastRtcpNtpTimestamp int64 `json:"last_rtcp_ntp_timestamp,omitempty"` + + FirstKeyFrameOffsetMs *int64 `json:"first_key_frame_offset_ms,omitempty"` + MaxFrameDimension *SegmentFrameDimension `json:"max_frame_dimension,omitempty"` +} + +type SegmentFrameDimension struct { + Width uint32 `json:"width,omitempty"` + Height uint32 `json:"height,omitempty"` +} diff --git a/pkg/cmd/raw-recording/processing/archive_metadata.go b/pkg/cmd/raw-recording/processing/archive_metadata.go new file mode 100644 index 0000000..3d63a5e --- /dev/null +++ b/pkg/cmd/raw-recording/processing/archive_metadata.go @@ -0,0 +1,402 @@ +package processing + +import ( + "archive/tar" + "compress/gzip" + "encoding/json" + "fmt" + "io" + "os" + "path/filepath" + "sort" + "strings" + "time" +) + +// TrackInfo represents a single track with its metadata (deduplicated across segments) +type TrackInfo struct { + CallType string `json:"callType"` // call_type from timing metadata + CallID string `json:"callId"` // call_id from timing metadata + CallSessionID string `json:"callSessionId"` // call_session_id from timing metadata + CallStartTime time.Time `json:"callStartTime"` // call_start_time from timing metadata + UserID string `json:"userId"` // participant_id from timing metadata + SessionID string `json:"sessionId"` // user_session_id from timing metadata + TrackID string `json:"trackId"` // track_id from segment + TrackType string `json:"trackType"` // track_type from segment + TrackKind string `json:"trackKind"` // "audio" or "video" (cleaned from TRACK_TYPE_*) + IsScreenshare bool `json:"isScreenshare"` // true if this is a screenshare track + Codec string `json:"codec"` // codec info + SegmentCount int `json:"segmentCount"` // number of segments for this track + TrackStartTime time.Time `json:"trackStartTime"` // first_rtp_unix_timestamp from segment + TrackEndTime time.Time `json:"trackEndTime"` // last_rtp_unix_timestamp from segment + Segments []*SegmentInfo `json:"segments"` // list of filenames (for JSON output only) + + ConcatenatedTrackFileInfo *TrackFileInfo +} + +type SegmentInfo struct { + metadata *SegmentMetadata + + RtpDumpPath string + SdpPath string + ContainerPath string + ContainerExt string +} + +type TrackFileInfo struct { + Name string + StartAt time.Time + EndAt time.Time + MaxFrameDimension SegmentFrameDimension + AudioTrack *TrackInfo + VideoTrack *TrackInfo +} + +// RecordingMetadata contains all tracks and session information +type RecordingMetadata struct { + Tracks []*TrackInfo `json:"tracks"` + UserIDs []string `json:"userIds"` + Sessions []string `json:"sessions"` +} + +// MetadataParser handles parsing of raw recording files +type MetadataParser struct { + logger *ProcessingLogger +} + +// NewMetadataParser creates a new metadata parser +func NewMetadataParser(logger *ProcessingLogger) *MetadataParser { + return &MetadataParser{ + logger: logger, + } +} + +// ParseMetadataOnly efficiently extracts only metadata from archives (optimized for list-tracks) +// This is much faster than full extraction when you only need timing metadata +func (p *MetadataParser) ParseMetadataOnly(inputPath string) (*RecordingMetadata, error) { + // If it's already a directory, use the normal path + if stat, err := os.Stat(inputPath); err == nil && stat.IsDir() { + return p.parseDirectory(inputPath) + } + + // If it's a tar.gz file, use selective extraction (much faster) + if strings.HasSuffix(strings.ToLower(inputPath), ".tar.gz") { + return p.parseMetadataOnlyFromTarGz(inputPath) + } + + return nil, fmt.Errorf("unsupported input format: %s (only tar.gz files and directories supported)", inputPath) +} + +// parseDirectory processes a directory containing recording files +func (p *MetadataParser) parseDirectory(dirPath string) (*RecordingMetadata, error) { + metadata := &RecordingMetadata{ + Tracks: make([]*TrackInfo, 0), + UserIDs: make([]string, 0), + Sessions: make([]string, 0), + } + + // Find and process timing metadata files + err := filepath.Walk(dirPath, func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + + if !info.IsDir() && strings.HasSuffix(strings.ToLower(info.Name()), "_timing_metadata.json") { + p.logger.Debugf("Processing metadata file: %s", path) + + data, err := os.ReadFile(path) + if err != nil { + p.logger.Warnf("Failed to read metadata file %s: %v", path, err) + return nil + } + + tracks, err := p.parseTimingMetadataFile(data) + if err != nil { + p.logger.Warnf("Failed to parse metadata file %s: %v", path, err) + return nil + } + + metadata.Tracks = append(metadata.Tracks, tracks...) + } + + return nil + }) + if err != nil { + return nil, fmt.Errorf("failed to process directory: %w", err) + } + + // Build unique lists + metadata.UserIDs = p.extractUniqueUserIDs(metadata.Tracks) + metadata.Sessions = p.extractUniqueSessions(metadata.Tracks) + + return metadata, nil +} + +// parseMetadataOnlyFromTarGz efficiently extracts only timing metadata from tar.gz files +// This is optimized for list-tracks - only reads JSON files, skips all .rtpdump/.sdp files +func (p *MetadataParser) parseMetadataOnlyFromTarGz(tarGzPath string) (*RecordingMetadata, error) { + p.logger.Debugf("Reading metadata directly from tar.gz (efficient mode): %s", tarGzPath) + + file, err := os.Open(tarGzPath) + if err != nil { + return nil, fmt.Errorf("failed to open tar.gz file: %w", err) + } + defer func() { + _ = file.Close() + }() + + gzReader, err := gzip.NewReader(file) + if err != nil { + return nil, fmt.Errorf("failed to create gzip reader: %w", err) + } + defer func() { + _ = gzReader.Close() + }() + + tarReader := tar.NewReader(gzReader) + + metadata := &RecordingMetadata{ + Tracks: make([]*TrackInfo, 0), + UserIDs: make([]string, 0), + Sessions: make([]string, 0), + } + + filesRead := 0 + for { + header, err := tarReader.Next() + if err == io.EOF { + break + } else if err != nil { + return nil, fmt.Errorf("failed to read tar entry: %w", err) + } else if header.FileInfo().IsDir() { + continue + } + + // Only process timing metadata JSON files (skip all .rtpdump/.sdp files) + if strings.HasSuffix(strings.ToLower(header.Name), "_timing_metadata.json") { + p.logger.Debugf("Processing metadata file: %s", header.Name) + + data, err := io.ReadAll(tarReader) + if err != nil { + p.logger.Warnf("Failed to read metadata file %s: %v", header.Name, err) + continue + } + + tracks, err := p.parseTimingMetadataFile(data) + if err != nil { + p.logger.Warnf("Failed to parse metadata file %s: %v", header.Name, err) + continue + } + + metadata.Tracks = append(metadata.Tracks, tracks...) + filesRead++ + } + // Skip all other files (.rtpdump, .sdp, etc.) - huge efficiency gain! + } + + p.logger.Debugf("Efficiently read %d metadata files from archive (skipped all media data files)", filesRead) + + // Extract unique user IDs and sessions + metadata.UserIDs = p.extractUniqueUserIDs(metadata.Tracks) + metadata.Sessions = p.extractUniqueSessions(metadata.Tracks) + + return metadata, nil +} + +// parseTimingMetadataFile parses a timing metadata JSON file and extracts tracks +func (p *MetadataParser) parseTimingMetadataFile(data []byte) ([]*TrackInfo, error) { + var sessionMetadata SessionTimingMetadata + err := json.Unmarshal(data, &sessionMetadata) + if err != nil { + return nil, fmt.Errorf("failed to unmarshal session metadata: %w", err) + } + + // Use a map to deduplicate tracks by unique key + trackMap := make(map[string]*TrackInfo) + + processSegment := func(segment *SegmentMetadata, trackKind string) { + key := fmt.Sprintf("%s|%s|%s|%s", + sessionMetadata.ParticipantID, + sessionMetadata.UserSessionID, + segment.TrackID, + trackKind) + + if existingTrack, exists := trackMap[key]; exists { + existingTrack.Segments = append(existingTrack.Segments, &SegmentInfo{metadata: segment}) + existingTrack.SegmentCount++ + + ts, te := time.UnixMilli(segment.FirstRtpUnixTimestamp), time.UnixMilli(segment.LastRtpUnixTimestamp) + if ts.Before(existingTrack.TrackStartTime) { + existingTrack.TrackStartTime = ts + } + if te.After(existingTrack.TrackEndTime) { + existingTrack.TrackEndTime = te + } + } else { + // Create new track + track := &TrackInfo{ + CallType: sessionMetadata.CallType, + CallID: sessionMetadata.CallID, + CallSessionID: sessionMetadata.CallSessionID, + CallStartTime: sessionMetadata.CallStartTime, + UserID: sessionMetadata.ParticipantID, + SessionID: sessionMetadata.UserSessionID, + TrackID: segment.TrackID, + TrackType: segment.TrackType, + TrackKind: p.cleanTrackType(segment.TrackType), + IsScreenshare: p.isScreenshareTrack(segment.TrackType), + Codec: segment.Codec, + SegmentCount: 1, + TrackStartTime: time.UnixMilli(segment.FirstRtpUnixTimestamp), + TrackEndTime: time.UnixMilli(segment.LastRtpUnixTimestamp), + Segments: []*SegmentInfo{{metadata: segment}}, + } + trackMap[key] = track + } + } + + // Process audio segments + for _, segment := range sessionMetadata.Segments.Audio { + processSegment(segment, p.cleanTrackType(segment.TrackType)) + } + + // Process video segments + for _, segment := range sessionMetadata.Segments.Video { + processSegment(segment, p.cleanTrackType(segment.TrackType)) + } + + // Convert map to slice + tracks := make([]*TrackInfo, 0, len(trackMap)) + for _, track := range trackMap { + sort.Slice(track.Segments, func(i, j int) bool { + return track.Segments[i].metadata.FirstRtpUnixTimestamp < track.Segments[j].metadata.FirstRtpUnixTimestamp + }) + tracks = append(tracks, track) + } + + return tracks, nil +} + +// isScreenshareTrack detects if a track is screenshare-related +func (p *MetadataParser) isScreenshareTrack(trackType string) bool { + return trackType == "TRACK_TYPE_SCREEN_SHARE_AUDIO" || trackType == "TRACK_TYPE_SCREEN_SHARE" +} + +// cleanTrackType converts TRACK_TYPE_* to simple "audio" or "video" +func (p *MetadataParser) cleanTrackType(trackType string) string { + switch trackType { + case "TRACK_TYPE_AUDIO", "TRACK_TYPE_SCREEN_SHARE_AUDIO": + return trackKindAudio + case "TRACK_TYPE_VIDEO", "TRACK_TYPE_SCREEN_SHARE": + return trackKindVideo + default: + return strings.ToLower(trackType) + } +} + +// extractUniqueUserIDs returns a sorted list of unique user IDs +func (p *MetadataParser) extractUniqueUserIDs(tracks []*TrackInfo) []string { + userIDMap := make(map[string]bool) + for _, track := range tracks { + userIDMap[track.UserID] = true + } + + userIDs := make([]string, 0, len(userIDMap)) + for userID := range userIDMap { + userIDs = append(userIDs, userID) + } + + return userIDs +} + +// NOTE: ExtractTrackFiles and extractTrackFromTarGz removed - no longer needed since we always work with directories + +// extractUniqueSessions returns a sorted list of unique session IDs +func (p *MetadataParser) extractUniqueSessions(tracks []*TrackInfo) []string { + sessionMap := make(map[string]bool) + for _, track := range tracks { + sessionMap[track.SessionID] = true + } + + sessions := make([]string, 0, len(sessionMap)) + for session := range sessionMap { + sessions = append(sessions, session) + } + + return sessions +} + +// FilterTracks filters tracks based on mutually exclusive criteria +// Only one filter (userID, sessionID, or trackID) can be specified at a time +// Empty values are ignored, specific values must match +// If all are empty, all tracks are returned +func FilterTracks(tracks []*TrackInfo, userID, sessionID, trackID, trackKind, mediaType string) []*TrackInfo { + filtered := make([]*TrackInfo, 0) + + for _, track := range tracks { + if trackKind != "" && track.TrackKind != trackKind { + continue // Skip tracks with wrong trackKind + } + + // Apply media type filtering if specified + if mediaType != "" && mediaType != mediaTypeBoth { + if mediaType == mediaTypeUser && track.IsScreenshare { + continue // Skip display tracks when only user requested + } + if mediaType == mediaTypeDisplay && !track.IsScreenshare { + continue // Skip user tracks when only display requested + } + } + + // Apply the single specified filter (mutually exclusive) + if trackID != "" { + // Filter by trackID - return only that specific track + if track.TrackID == trackID { + filtered = append(filtered, track) + } + } else if sessionID != "" { + // Filter by sessionID - return all tracks for that session + if track.SessionID == sessionID { + filtered = append(filtered, track) + } + } else if userID != "" { + // Filter by userID - return all tracks for that user + if track.UserID == userID { + filtered = append(filtered, track) + } + } else { + // No filters specified - return all tracks + filtered = append(filtered, track) + } + } + + return filtered +} + +func firstPacketNtpTimestamp(segment *SegmentMetadata) int64 { + if segment.FirstRtcpNtpTimestamp != 0 && segment.FirstRtcpRtpTimestamp != 0 { + rtpNtpTs := (segment.FirstRtcpRtpTimestamp - segment.FirstRtpRtpTimestamp) / sampleRate(segment) + return segment.FirstRtcpNtpTimestamp - int64(rtpNtpTs) + } else { + return segment.FirstRtpUnixTimestamp + } +} + +func lastPacketNtpTimestamp(segment *SegmentMetadata) int64 { + if segment.LastRtcpNtpTimestamp != 0 && segment.LastRtcpRtpTimestamp != 0 { + rtpNtpTs := (segment.LastRtpRtpTimestamp - segment.LastRtcpRtpTimestamp) / sampleRate(segment) + return segment.LastRtcpNtpTimestamp + int64(rtpNtpTs) + } else { + return segment.LastRtpUnixTimestamp + } +} + +func sampleRate(segment *SegmentMetadata) uint32 { + switch segment.TrackType { + case "TRACK_TYPE_AUDIO", + "TRACK_TYPE_SCREEN_SHARE_AUDIO": + return 48 + default: + return 90 + } +} diff --git a/pkg/cmd/raw-recording/processing/audio_mixer.go b/pkg/cmd/raw-recording/processing/audio_mixer.go new file mode 100644 index 0000000..f502b92 --- /dev/null +++ b/pkg/cmd/raw-recording/processing/audio_mixer.go @@ -0,0 +1,144 @@ +package processing + +import ( + "fmt" + "os" + "path/filepath" + "slices" +) + +const ( + FormatMp3 = "mp3" + FormatWeba = "weba" + FormatWebm = "webm" + FormatMka = "mka" + FormatMkv = "mkv" + DefaultFormat = FormatMkv +) + +var supportedFormats = [5]string{FormatMp3, FormatWeba, FormatWebm, FormatMka, FormatMkv} + +type AudioMixerConfig struct { + WorkDir string + OutputDir string + Format string + WithScreenshare bool + + WithExtract bool + WithCleanup bool +} + +type AudioMixer struct { + logger *ProcessingLogger +} + +func NewAudioMixer(logger *ProcessingLogger) *AudioMixer { + return &AudioMixer{logger: logger} +} + +// MixAllAudioTracks orchestrates the entire audio mixing workflow using existing extraction logic +func (p *AudioMixer) MixAllAudioTracks(config *AudioMixerConfig, metadata *RecordingMetadata) (*string, error) { + p.overrideConfig(config) + + // Step 1: Extract all matching audio tracks using existing ExtractTracks function + p.logger.Info("Extracting all matching audio tracks...") + + if config.WithExtract { + mediaType := "" + if !config.WithScreenshare { + mediaType = "user" + } + + cfg := &TrackExtractorConfig{ + WorkDir: config.WorkDir, + OutputDir: config.OutputDir, + UserID: "", + SessionID: "", + TrackID: "", + TrackKind: trackKindAudio, + MediaType: mediaType, + FillDtx: true, + FillGap: true, + + Cleanup: config.WithCleanup, + } + + extractor := NewTrackExtractor(p.logger) + if _, err := extractor.ExtractTracks(cfg, metadata); err != nil { + return nil, fmt.Errorf("failed to extract audio tracks: %w", err) + } + } + + fileOffsets := p.offset(metadata, config.WithScreenshare) + if len(fileOffsets) == 0 { + p.logger.Warn("No audio tracks found") + return nil, nil + } + + p.logger.Info("Found %d extracted audio files to mix", len(fileOffsets)) + + //// Clean up individual audio files (optional) + if config.WithCleanup { + defer func(offsets *[]*FileOffset) { + for _, fileOffset := range *offsets { + p.logger.Info("Cleaning up temporary file: %s", fileOffset.Name) + if err := os.Remove(fileOffset.Name); err != nil { + p.logger.Warn("Failed to clean up temporary file %s: %v", fileOffset.Name, err) + } + } + }(&fileOffsets) + } + + // Step 3: Mix all discovered audio files using existing webm.mixAudioFiles + outputFile := p.buildFilename(config, metadata) + + err := runFFmpegCommand(generateMixAudioFilesArguments(outputFile, config.Format, fileOffsets), p.logger) + if err != nil { + return nil, fmt.Errorf("failed to mix audio files: %w", err) + } + + p.logger.Info("Successfully created mixed audio file: %s", outputFile) + + return &outputFile, nil +} + +func (p *AudioMixer) overrideConfig(config *AudioMixerConfig) { + if !slices.Contains(supportedFormats[:], config.Format) { + p.logger.Warn("Audio format %s not supported, fallback to default %s", config.Format, DefaultFormat) + config.Format = DefaultFormat + } +} + +func (p *AudioMixer) offset(metadata *RecordingMetadata, withScreenshare bool) []*FileOffset { + var offsets []*FileOffset + var firstTrack *TrackInfo + for _, t := range metadata.Tracks { + if t.TrackKind == trackKindAudio && (!t.IsScreenshare || withScreenshare) { + if firstTrack == nil { + firstTrack = t + offsets = append(offsets, &FileOffset{ + Name: t.ConcatenatedTrackFileInfo.Name, + Offset: 0, // Will be sorted later and rearranged + }) + } else { + offset, err := calculateSyncOffsetFromFiles(t, firstTrack) + if err != nil { + p.logger.Warn("Failed to calculate sync offset for audio tracks: %v", err) + continue + } + + offsets = append(offsets, &FileOffset{ + Name: t.ConcatenatedTrackFileInfo.Name, + Offset: offset, + }) + } + } + } + + return offsets +} + +func (p *AudioMixer) buildFilename(config *AudioMixerConfig, metadata *RecordingMetadata) string { + tr := metadata.Tracks[0] + return filepath.Join(config.OutputDir, fmt.Sprintf("composite_%s_%s_%s_%d.%s", tr.CallType, tr.CallID, trackKindAudio, tr.CallStartTime.UTC().UnixMilli(), config.Format)) +} diff --git a/pkg/cmd/raw-recording/processing/audio_video_muxer.go b/pkg/cmd/raw-recording/processing/audio_video_muxer.go new file mode 100644 index 0000000..8365bbe --- /dev/null +++ b/pkg/cmd/raw-recording/processing/audio_video_muxer.go @@ -0,0 +1,187 @@ +package processing + +import ( + "fmt" + "os" + "path/filepath" + "time" +) + +type AudioVideoMuxerConfig struct { + WorkDir string + OutputDir string + UserID string + SessionID string + TrackID string + MediaType string + + WithExtract bool + WithCleanup bool +} + +type AudioVideoMuxer struct { + logger *ProcessingLogger +} + +func NewAudioVideoMuxer(logger *ProcessingLogger) *AudioVideoMuxer { + return &AudioVideoMuxer{logger: logger} +} + +func (p *AudioVideoMuxer) MuxAudioVideoTracks(config *AudioVideoMuxerConfig, metadata *RecordingMetadata) ([]*TrackFileInfo, error) { + if config.WithExtract { + cfg := &TrackExtractorConfig{ + WorkDir: config.WorkDir, + OutputDir: config.OutputDir, + UserID: config.UserID, + SessionID: config.SessionID, + TrackID: config.TrackID, + TrackKind: "", + MediaType: config.MediaType, + FillGap: true, + FillDtx: true, + + Cleanup: config.WithCleanup, + } + + extractor := NewTrackExtractor(p.logger) + + // Extract tracks with gap filling enabled + p.logger.Infof("Extracting tracks with gap filling...") + _, err := extractor.ExtractTracks(cfg, metadata) + if err != nil { + return nil, fmt.Errorf("failed to extract audio tracks: %w", err) + } + } + + var infos []*TrackFileInfo // Group files by media type for proper pairing + pairedTracks := p.groupFilesByMediaType(config, metadata) + + for audioTrack, videoTrack := range pairedTracks { + // logger.Infof("Muxing %d user audio/video pairs", len(userAudio)) + info, err := p.muxTrackPairs(audioTrack, videoTrack, config) + if err != nil { + p.logger.Errorf("Failed to mux user tracks: %v", err) + } + infos = append(infos, info) + } + + return infos, nil +} + +// calculateSyncOffsetFromFiles calculates sync offset between audio and video files using metadata +func calculateSyncOffsetFromFiles(audioTrack, videoTrack *TrackInfo) (int64, error) { + // Calculate offset: positive means video starts before audio + audioOffset, videoOffset := int64(0), int64(0) + if audioTrack.Segments[0].metadata.FirstKeyFrameOffsetMs != nil { + audioOffset = *audioTrack.Segments[0].metadata.FirstKeyFrameOffsetMs + } + if videoTrack.Segments[0].metadata.FirstKeyFrameOffsetMs != nil { + videoOffset = *videoTrack.Segments[0].metadata.FirstKeyFrameOffsetMs + } + + audioTs := audioOffset + firstPacketNtpTimestamp(audioTrack.Segments[0].metadata) + videoTs := videoOffset + firstPacketNtpTimestamp(videoTrack.Segments[0].metadata) + offset := audioTs - videoTs + + return offset, nil +} + +// groupFilesByMediaType groups audio and video files by media type (user vs display) +func (p *AudioVideoMuxer) groupFilesByMediaType(config *AudioVideoMuxerConfig, metadata *RecordingMetadata) map[*TrackInfo]*TrackInfo { + pairedTracks := make(map[*TrackInfo]*TrackInfo) + + matches := func(audio *TrackInfo, video *TrackInfo) bool { + return audio.UserID == video.UserID && + audio.SessionID == video.SessionID && + audio.IsScreenshare == video.IsScreenshare + } + + filteredTracks := FilterTracks(metadata.Tracks, config.UserID, config.SessionID, config.TrackID, "", config.MediaType) + for _, at := range filteredTracks { + if at.TrackKind == trackKindAudio { + for _, vt := range filteredTracks { + if vt.TrackKind == trackKindVideo && matches(at, vt) { + pairedTracks[at] = vt + break + } + } + } + } + + return pairedTracks +} + +// muxTrackPairs muxes audio/video pairs of the same media type +func (p *AudioVideoMuxer) muxTrackPairs(audio, video *TrackInfo, config *AudioVideoMuxerConfig) (*TrackFileInfo, error) { + // Calculate sync offset using segment timing information + offset, err := calculateSyncOffsetFromFiles(audio, video) + if err != nil { + p.logger.Warnf("Failed to calculate sync offset, using 0: %v", err) + offset = 0 + } + + // Generate output filename with media type indicator + outputFile := p.buildFilename(config.OutputDir, video) + + audioFile := audio.ConcatenatedTrackFileInfo.Name + videoFile := video.ConcatenatedTrackFileInfo.Name + + // Mux the audio and video files + p.logger.Debugf("Muxing %s + %s → %s (offset: %dms)", + filepath.Base(audioFile), filepath.Base(videoFile), filepath.Base(outputFile), offset) + + err = runFFmpegCommand(generateMuxFilesArguments(outputFile, audioFile, videoFile, float64(offset)), p.logger) + if err != nil { + p.logger.Errorf("Failed to mux %s + %s: %v", audioFile, videoFile, err) + return nil, err + } + + p.logger.Infof("Successfully created muxed file: %s", outputFile) + + // Clean up individual track files to avoid clutter + if config.WithCleanup { + defer func() { + for _, file := range []string{audioFile, videoFile} { + p.logger.Infof("Cleaning up temporary file: %s", file) + if err := os.Remove(file); err != nil { + p.logger.Warnf("Failed to clean up temporary file %s: %v", file, err) + } + } + }() + } + + return &TrackFileInfo{ + Name: outputFile, + StartAt: p.getTime(audio.ConcatenatedTrackFileInfo.StartAt, video.ConcatenatedTrackFileInfo.StartAt, true), + EndAt: p.getTime(audio.ConcatenatedTrackFileInfo.EndAt, video.ConcatenatedTrackFileInfo.EndAt, false), + MaxFrameDimension: video.ConcatenatedTrackFileInfo.MaxFrameDimension, + AudioTrack: audio, + VideoTrack: video, + }, nil +} + +func (p *AudioVideoMuxer) getTime(d1, d2 time.Time, first bool) time.Time { + if d1.Before(d2) { + if first { + return d1 + } else { + return d2 + } + } else { + if first { + return d2 + } else { + return d1 + } + } +} + +// buildFilename creates output filename that indicates media type +func (p *AudioVideoMuxer) buildFilename(outputDir string, track *TrackInfo) string { + media := "audio_video" + if track.IsScreenshare { + media = "shared_" + media + } + + return filepath.Join(outputDir, fmt.Sprintf("individual_%s_%s_%s_%s_%s_%d.%s", track.CallType, track.CallID, track.UserID, track.SessionID, media, track.CallStartTime.UnixMilli(), track.Segments[0].ContainerExt)) +} diff --git a/pkg/cmd/raw-recording/processing/constants.go b/pkg/cmd/raw-recording/processing/constants.go new file mode 100644 index 0000000..0c66b01 --- /dev/null +++ b/pkg/cmd/raw-recording/processing/constants.go @@ -0,0 +1,16 @@ +package processing + +const ( + trackKindAudio = "audio" + trackKindVideo = "video" + + mediaTypeUser = "user" + mediaTypeDisplay = "display" + mediaTypeBoth = "both" + + suffixRtpDump = ".rtpdump" + suffixSdp = ".sdp" + + mkvExtension = "mkv" + mkvSuffix = "." + mkvExtension +) diff --git a/pkg/cmd/raw-recording/processing/container_converter.go b/pkg/cmd/raw-recording/processing/container_converter.go new file mode 100644 index 0000000..f3bcd51 --- /dev/null +++ b/pkg/cmd/raw-recording/processing/container_converter.go @@ -0,0 +1,353 @@ +package processing + +import ( + "errors" + "fmt" + "io" + "os" + "path/filepath" + "strings" + "time" + + "github.com/pion/rtp" + "github.com/pion/rtp/codecs" + webrtc "github.com/pion/webrtc/v4" + "github.com/pion/webrtc/v4/pkg/media/rtpdump" + "github.com/pion/webrtc/v4/pkg/media/samplebuilder" +) + +const ( + audioMaxLate = 200 // 4sec + videoMaxLate = 1000 // 4sec +) + +type RTPDump2WebMConverter struct { + logger *ProcessingLogger + reader *rtpdump.Reader + recorder WebmRecorder + sampleBuilder *samplebuilder.SampleBuilder + + lastPkt *rtp.Packet + lastPktDuration uint32 + dtxInserted uint64 + + totalFrames int +} + +type WebmRecorder interface { + OnRTP(pkt *rtp.Packet) error + PushRtpBuf(payload []byte) error + Close() error +} + +func newRTPDump2WebMConverter(logger *ProcessingLogger) *RTPDump2WebMConverter { + return &RTPDump2WebMConverter{ + logger: logger, + } +} + +func ConvertDirectory(directory string, accept func(path string, info os.FileInfo) (*SegmentInfo, bool), fixDtx bool, logger *ProcessingLogger) error { + rtpdumpFiles := make(map[string]*SegmentInfo) + + // Walk through directory to find .rtpdump files + err := filepath.Walk(directory, func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + + if !info.IsDir() && strings.HasSuffix(strings.ToLower(info.Name()), suffixRtpDump) { + segment, accepted := accept(path, info) + if accepted { + rtpdumpFiles[path] = segment + } + } + + return nil + }) + if err != nil { + return err + } + + for rtpdumpFile := range rtpdumpFiles { + c := newRTPDump2WebMConverter(logger) + if err := c.ConvertFile(rtpdumpFile, fixDtx); err != nil { + c.logger.Errorf("Failed to convert %s: %v", rtpdumpFile, err) + continue + } + } + + return nil +} + +func (c *RTPDump2WebMConverter) ConvertFile(inputFile string, fixDtx bool) error { + c.logger.Debugf("Converting %s", inputFile) + + // Parse the RTP dump file + // Open the file + file, err := os.Open(inputFile) + if err != nil { + return fmt.Errorf("failed to open rtpdump file: %w", err) + } + defer func() { + _ = file.Close() + }() + + // Create standardized reader + reader, _, _ := rtpdump.NewReader(file) + c.reader = reader + + sdpContent, _ := readSDP(strings.Replace(inputFile, suffixRtpDump, suffixSdp, 1)) + mType, _ := mimeType(sdpContent) + _, suffix := outputFormatForMimeType(mType) + + switch mType { + case webrtc.MimeTypeAV1: + releasePacketHandler := samplebuilder.WithPacketReleaseHandler(c.buildDefaultReleasePacketHandler()) + c.sampleBuilder = samplebuilder.New(videoMaxLate, &codecs.AV1Depacketizer{}, 90000, releasePacketHandler) + c.recorder, err = NewGstreamerConverter(strings.Replace(inputFile, suffixRtpDump, suffix, 1), sdpContent, c.logger) + case webrtc.MimeTypeVP9: + releasePacketHandler := samplebuilder.WithPacketReleaseHandler(c.buildDefaultReleasePacketHandler()) + c.sampleBuilder = samplebuilder.New(videoMaxLate, &codecs.VP9Packet{}, 90000, releasePacketHandler) + c.recorder, err = NewGstreamerConverter(strings.Replace(inputFile, suffixRtpDump, suffix, 1), sdpContent, c.logger) + case webrtc.MimeTypeH264: + releasePacketHandler := samplebuilder.WithPacketReleaseHandler(c.buildDefaultReleasePacketHandler()) + c.sampleBuilder = samplebuilder.New(videoMaxLate, &codecs.H264Packet{}, 90000, releasePacketHandler) + c.recorder, err = NewGstreamerConverter(strings.Replace(inputFile, suffixRtpDump, suffix, 1), sdpContent, c.logger) + case webrtc.MimeTypeVP8: + releasePacketHandler := samplebuilder.WithPacketReleaseHandler(c.buildDefaultReleasePacketHandler()) + c.sampleBuilder = samplebuilder.New(videoMaxLate, &codecs.VP8Packet{}, 90000, releasePacketHandler) + c.recorder, err = NewGstreamerConverter(strings.Replace(inputFile, suffixRtpDump, suffix, 1), sdpContent, c.logger) + case webrtc.MimeTypeOpus: + releasePacketHandler := samplebuilder.WithPacketReleaseHandler(c.buildOpusReleasePacketHandler(fixDtx)) + c.sampleBuilder = samplebuilder.New(audioMaxLate, &codecs.OpusPacket{}, 48000, releasePacketHandler) + c.recorder, err = NewGstreamerConverter(strings.Replace(inputFile, suffixRtpDump, suffix, 1), sdpContent, c.logger) + default: + return fmt.Errorf("unsupported codec type: %s", mType) + } + if err != nil { + return fmt.Errorf("failed to create WebM recorder: %w", err) + } + defer func() { + _ = c.recorder.Close() + }() + + // Convert and feed RTP packets + return c.feedPackets(mType, reader) +} + +func (c *RTPDump2WebMConverter) feedPackets(mType string, reader *rtpdump.Reader) error { + startTime := time.Now() + + i := uint64(0) + for ; ; i++ { + packet, err := reader.Next() + if errors.Is(err, io.EOF) { + break + } else if err != nil { + return err + } else if packet.IsRTCP { + // _ = c.recorder.PushRtcpBuf(packet.Payload) + continue + } + + // Unmarshal the RTP packet from the raw payload + rtpPacket := &rtp.Packet{} + if err := rtpPacket.Unmarshal(packet.Payload); err != nil { + c.logger.Warnf("Failed to unmarshal RTP packet %d: %v", i, err) + continue + } + + // Push packet to samplebuilder for reordering + c.sampleBuilder.Push(rtpPacket) + + // Log progress + if i%10000 == 0 && i > 0 { + c.logger.Debugf("Processed %d packets", i) + } + } + + if c.sampleBuilder != nil { + c.sampleBuilder.Flush() + } + + duration := time.Since(startTime).Round(time.Millisecond) + + c.logger.Infof("Finished feeding %d packets (%d dtxInserted, %d real) (frames: %d total, codec: %s) in %v ", i+c.dtxInserted, c.dtxInserted, i, c.totalFrames, mType, duration) + + return nil +} + +func (c *RTPDump2WebMConverter) buildDefaultReleasePacketHandler() func(pkt *rtp.Packet) { + return func(pkt *rtp.Packet) { + if pkt.Marker { + c.totalFrames++ + } + + if c.lastPkt != nil { + if pkt.SequenceNumber-c.lastPkt.SequenceNumber > 1 { + c.logger.Infof("Missing Packet Detected, Previous SeqNum: %d RtpTs: %d - Last SeqNum: %d RtpTs: %d", c.lastPkt.SequenceNumber, c.lastPkt.Timestamp, pkt.SequenceNumber, pkt.Timestamp) + } + } + + c.lastPkt = pkt + + if e := c.recorder.OnRTP(pkt); e != nil { + c.logger.Warnf("Failed to record RTP packet SeqNum: %d RtpTs: %d: %v", pkt.SequenceNumber, pkt.Timestamp, e) + } + } +} + +func (c *RTPDump2WebMConverter) buildOpusReleasePacketHandler(fixDtx bool) func(pkt *rtp.Packet) { + return func(pkt *rtp.Packet) { + pkt.SequenceNumber += uint16(c.dtxInserted) + + if c.lastPkt != nil { + if pkt.SequenceNumber-c.lastPkt.SequenceNumber > 1 { + c.logger.Infof("Missing Packet Detected, Previous SeqNum: %d RtpTs: %d - Last SeqNum: %d RtpTs: %d", c.lastPkt.SequenceNumber, c.lastPkt.Timestamp, pkt.SequenceNumber, pkt.Timestamp) + } + + if fixDtx { + tsDiff := c.timestampDiff(pkt.Timestamp, c.lastPkt.Timestamp) + lastPktDuration := opusPacketDurationMs(c.lastPkt) + rtpDuration := uint32(lastPktDuration * 48) + + if rtpDuration == 0 { + rtpDuration = c.lastPktDuration + c.logger.Infof("LastPacket with no duration, Previous SeqNum: %d RtpTs: %d - Last SeqNum: %d RtpTs: %d", c.lastPkt.SequenceNumber, c.lastPkt.Timestamp, pkt.SequenceNumber, pkt.Timestamp) + } else { + c.lastPktDuration = rtpDuration + } + + if rtpDuration > 0 && tsDiff > rtpDuration { + + // Calculate how many packets we need to insert, taking care of packet losses + var toAdd uint16 + if uint32(c.sequenceNumberDiff(pkt.SequenceNumber, c.lastPkt.SequenceNumber))*rtpDuration != tsDiff { + toAdd = uint16(tsDiff/rtpDuration) - c.sequenceNumberDiff(pkt.SequenceNumber, c.lastPkt.SequenceNumber) + } + + c.logger.Debugf("Gap detected, inserting %d packets tsDiff %d, Previous SeqNum: %d RtpTs: %d - Last SeqNum: %d RtpTs: %d", + toAdd, tsDiff, c.lastPkt.SequenceNumber, c.lastPkt.Timestamp, pkt.SequenceNumber, pkt.Timestamp) + + for i := 1; i <= int(toAdd); i++ { + ins := c.lastPkt.Clone() + ins.Payload = ins.Payload[:1] // Keeping only TOC byte + ins.SequenceNumber += uint16(i) + ins.Timestamp += uint32(i) * rtpDuration + + c.logger.Debugf("Writing dtxInserted Packet %v", ins) + if e := c.recorder.OnRTP(ins); e != nil { + c.logger.Warnf("Failed to record dtxInserted RTP packet SeqNum: %d RtpTs: %d: %v", ins.SequenceNumber, ins.Timestamp, e) + } + } + + c.dtxInserted += uint64(toAdd) + pkt.SequenceNumber += toAdd + } + } + } + + c.lastPkt = pkt + + c.logger.Debugf("Writing real Packet Last SeqNum: %d RtpTs: %d", pkt.SequenceNumber, pkt.Timestamp) + if e := c.recorder.OnRTP(pkt); e != nil { + c.logger.Warnf("Failed to record RTP packet SeqNum: %d RtpTs: %d: %v", pkt.SequenceNumber, pkt.Timestamp, e) + } + } +} + +func getMaxFrameDimension(f1, f2 SegmentFrameDimension) SegmentFrameDimension { + if f1.Width*f1.Height > f2.Width*f2.Height { + return f1 + } + return f2 +} + +func opusPacketDurationMs(pkt *rtp.Packet) int { + // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + // | config |s|1|1|0|p| M | + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + payload := pkt.Payload + if len(payload) < 1 { + return 0 + } + + toc := payload[0] + config := (toc >> 3) & 0x1F + c := toc & 0x03 + + // Calculate frame duration according to OPUS RFC 6716 table (use x10 factor) + // Frame duration is determined by the config value + duration := opusFrameDurationFactor10(config) + frameDuration := float32(duration) / 10 + frameCount := opusFrameCount(c, payload) + + return int(frameDuration * float32(frameCount)) +} + +func opusFrameDurationFactor10(config byte) int { + switch { + case config < 3: + // SILK-only NB: 10, 20, 40 ms + return 100 * (1 << (config & 0x03)) + case config == 3: + // SILK-only NB: 60 ms + return 600 + case config < 7: + // SILK-only MB: 10, 20, 40 ms + return 100 * (1 << (config & 0x03)) + case config == 7: + // SILK-only MB: 60 ms + return 600 + case config < 11: + // SILK-only WB: 10, 20, 40 ms + return 100 * (1 << (config & 0x03)) + case config == 11: + // SILK-only WB: 60 ms + return 600 + case config <= 13: + // Hybrid SWB: 10, 20 ms + return 100 * (1 << (config & 0x01)) + case config <= 15: + // Hybrid FB: 10, 20 ms + return 100 * (1 << (config & 0x01)) + case config <= 19: + // CELT-only NB: 2.5, 5, 10, 20 ms + return 25 * (1 << (config & 0x03)) // 2.5ms * 10 for integer math + case config <= 23: + // CELT-only WB: 2.5, 5, 10, 20 ms + return 25 * (1 << (config & 0x03)) // 2.5ms * 10 for integer math + case config <= 27: + // CELT-only SWB: 2.5, 5, 10, 20 ms + return 25 * (1 << (config & 0x03)) // 2.5ms * 10 for integer math + case config <= 31: + // CELT-only FB: 2.5, 5, 10, 20 ms + return 25 * (1 << (config & 0x03)) // 2.5ms * 10 for integer math + default: + // MUST NOT HAPPEN + return 0 + } +} + +func opusFrameCount(c byte, payload []byte) int { + switch c { + case 0: + return 1 + case 1, 2: + return 2 + case 3: + if len(payload) > 1 { + return int(payload[1] & 0x3F) + } + } + return 0 +} + +func (c *RTPDump2WebMConverter) timestampDiff(pts, fts uint32) uint32 { + return pts - fts +} + +func (c *RTPDump2WebMConverter) sequenceNumberDiff(psq, fsq uint16) uint16 { + return psq - fsq +} diff --git a/pkg/cmd/raw-recording/processing/ffmpeg_helper.go b/pkg/cmd/raw-recording/processing/ffmpeg_helper.go new file mode 100644 index 0000000..96a064c --- /dev/null +++ b/pkg/cmd/raw-recording/processing/ffmpeg_helper.go @@ -0,0 +1,206 @@ +package processing + +import ( + "fmt" + "os/exec" + "sort" + "strings" + "time" +) + +type FileOffset struct { + Name string + Offset int64 +} + +func generateConcatFileArguments(outputPath, concatPath string) ([]string, error) { + args := defaultArgs() + args = append(args, "-f", "concat") + args = append(args, "-safe", "0") + args = append(args, "-i", concatPath) + args = append(args, "-c", "copy") + args = append(args, "-y", outputPath) + return args, nil +} + +func generateMuxFilesArguments(fileName string, audioFile string, videoFile string, offsetMs float64) []string { + args := defaultArgs() + + // Apply offset using itsoffset + // If offset is positive (video ahead), delay audio + // If offset is negative (audio ahead), delay video + if offsetMs != 0 { + offsetSeconds := offsetMs / 1000.0 + + if offsetMs > 0 { + // Video is ahead, delay audio + args = append(args, "-itsoffset", fmt.Sprintf("%.3f", offsetSeconds)) + args = append(args, "-i", audioFile) + args = append(args, "-i", videoFile) + } else { + args = append(args, "-i", audioFile) + args = append(args, "-itsoffset", fmt.Sprintf("%.3f", -offsetSeconds)) + args = append(args, "-i", videoFile) + } + } else { + args = append(args, "-i", audioFile) + args = append(args, "-i", videoFile) + } + + args = append(args, "-map", "0:a") + args = append(args, "-map", "1:v") + args = append(args, "-c", "copy") + args = append(args, "-y", fileName) + return args +} + +func generateMixAudioFilesArguments(fileName, format string, files []*FileOffset) []string { + var filterParts []string + var mixParts []string + args := defaultArgs() + + sort.Slice(files, func(i, j int) bool { + return files[i].Offset < files[j].Offset + }) + + var offsetToAdd int64 + for i, fo := range files { + args = append(args, "-i", fo.Name) + + if len(files) > 1 { + if i == 0 { + offsetToAdd = -fo.Offset + } + offset := fo.Offset + offsetToAdd + + if offset > 0 { + // for stereo: offset|offset + label := fmt.Sprintf("a%d", i) + filterParts = append(filterParts, + fmt.Sprintf("[%d:a]adelay=%d|%d[%s]", i, offset, offset, label)) + mixParts = append(mixParts, fmt.Sprintf("[%s]", label)) + } else { + mixParts = append(mixParts, fmt.Sprintf("[%d:a]", i)) + } + } + } + + if len(files) > 1 { + // Build amix filter + filter := strings.Join(filterParts, "; ") + if filter != "" { + filter += "; " + } + filter += strings.Join(mixParts, "") + + fmt.Sprintf("amix=inputs=%d:normalize=0", len(files)) + + args = append(args, "-filter_complex", filter) + } + + audioLib := audioLibForExtension(format) + mkvAudioLib := audioLibForExtension(FormatMkv) + // Copy is enough in case of webm, weba, mka, mkv when len == 1 + if audioLib != mkvAudioLib || len(files) > 1 { + args = append(args, "-c:a", audioLibForExtension(format)) + args = append(args, "-b:a", "128k") + } else { + args = append(args, "-c", "copy") + } + + if format == FormatWeba { + args = append(args, "-f", "webm") + } + + args = append(args, "-y", fileName) + + fmt.Println(strings.Join(args, " ")) + return args +} + +func audioLibForExtension(str string) string { + switch str { + case FormatMp3: + return "libmp3lame" + case FormatWeba, FormatWebm, FormatMkv, FormatMka: + return "libopus" + default: + return "libopus" + } +} + +func generateSilenceArguments(fileName string, duration float64) []string { + args := defaultArgs() + args = append(args, "-f", "lavfi") + args = append(args, "-t", fmt.Sprintf("%.3f", duration)) + args = append(args, "-i", "anullsrc=cl=stereo:r=48000") + args = append(args, "-c:a", "libopus") + args = append(args, "-b:a", "32k") + args = append(args, "-y", fileName) + return args +} + +func generateBlackVideoArguments(fileName, mimeType string, duration float64, width, height, frameRate int) []string { + args := defaultArgs() + args = append(args, "-f", "lavfi") + args = append(args, "-t", fmt.Sprintf("%.3f", duration)) + args = append(args, "-i", fmt.Sprintf("color=c=black:s=%dx%d:r=%d", width, height, frameRate)) + args = append(args, "-c:v", videoLibForMimeType(mimeType)) + + if strings.ToLower(mimeType) == "video/h264" { + args = append(args, "-preset", "ultrafast") + } else { + args = append(args, "-b:v", "0") + args = append(args, "-cpu-used", "8") + } + + args = append(args, "-crf", "45") + args = append(args, "-y", fileName) + return args +} + +func videoLibForMimeType(str string) string { + switch strings.ToLower(str) { + case "video/vp8": + return "libvpx" + case "video/vp9": + return "libvpx-vp9" + case "video/h264": + return "libx264" + case "video/av1": + return "libaom-av1" + default: + return "libvpx" + } +} + +func outputFormatForMimeType(str string) (extension, suffix string) { + extension = mkvExtension + suffix = mkvSuffix + return +} + +func defaultArgs() []string { + var args []string + args = append(args, "-hide_banner") + args = append(args, "-threads", "1") + args = append(args, "-filter_threads", "1") + return args +} + +func runFFmpegCommand(args []string, logger *ProcessingLogger) error { + startAt := time.Now() + cmd := exec.Command("ffmpeg", args...) + + // Capture output for debugging + output, err := cmd.CombinedOutput() + logger.Infof("FFmpeg process pid<%d> with args: %s", cmd.Process.Pid, args) + logger.Infof("FFmpeg process pid<%d> output:\n%s", cmd.Process.Pid, string(output)) + + if err != nil { + logger.Errorf("FFmpeg process pid<%d> failed: %v", cmd.Process.Pid, err) + return fmt.Errorf("FFmpeg process pid<%d> failed in %s: %w", cmd.Process.Pid, time.Since(startAt).Round(time.Millisecond), err) + } + + logger.Infof("FFmpeg process pid<%d> ended successfully in %s", cmd.Process.Pid, time.Since(startAt).Round(time.Millisecond)) + return nil +} diff --git a/pkg/cmd/raw-recording/processing/gstreamer_converter.go b/pkg/cmd/raw-recording/processing/gstreamer_converter.go new file mode 100644 index 0000000..5d064e8 --- /dev/null +++ b/pkg/cmd/raw-recording/processing/gstreamer_converter.go @@ -0,0 +1,263 @@ +package processing + +import ( + "context" + "encoding/binary" + "fmt" + "math/rand" + "net" + "os" + "os/exec" + "strconv" + "strings" + "sync" + "time" + + "github.com/pion/rtp" +) + +type GstreamerConverter struct { + logger *ProcessingLogger + outputPath string + rtpConn net.Conn + gstreamerCmd *exec.Cmd + mu sync.Mutex + ctx context.Context + cancel context.CancelFunc + port int + startAt time.Time +} + +func NewGstreamerConverter(outputPath, sdpContent string, logger *ProcessingLogger) (*GstreamerConverter, error) { + ctx, cancel := context.WithCancel(context.Background()) + + r := &GstreamerConverter{ + logger: logger, + outputPath: outputPath, + ctx: ctx, + cancel: cancel, + } + + // Choose TCP listen port for GStreamer tcpserversrc + r.port = rand.Intn(10000) + 10000 + + // Start GStreamer with codec detection + if err := r.startGStreamer(sdpContent, outputPath); err != nil { + cancel() + return nil, err + } + + // Establish TCP client connection to the local tcpserversrc + if err := r.setupConnections(r.port); err != nil { + cancel() + return nil, err + } + + return r, nil +} + +func (r *GstreamerConverter) setupConnections(port int) error { + // Setup TCP connection with retry to match GStreamer tcpserversrc readiness + address := "127.0.0.1:" + strconv.Itoa(port) + deadline := time.Now().Add(10 * time.Second) + var conn net.Conn + var err error + for { + conn, err = net.DialTimeout("tcp", address, 500*time.Millisecond) + if err == nil { + break + } + if time.Now().After(deadline) { + return fmt.Errorf("failed to connect to tcpserversrc at %s: %w", address, err) + } + time.Sleep(50 * time.Millisecond) + } + r.rtpConn = conn + return nil +} + +func (r *GstreamerConverter) startGStreamer(sdpContent, outputFilePath string) error { + r.startAt = time.Now() + + // Start with common GStreamer arguments optimized for RTP dump replay + args, err := r.generateArgs(sdpContent, outputFilePath) + if err != nil { + return err + } + + r.gstreamerCmd = exec.Command("gst-launch-1.0", args...) + // Redirect output for debugging + r.gstreamerCmd.Stdout = os.Stdout + r.gstreamerCmd.Stderr = os.Stderr + + // Start GStreamer process + if err := r.gstreamerCmd.Start(); err != nil { + return err + } + + r.logger.Infof("GStreamer process pid<%d> with pipeline: %s", r.gstreamerCmd.Process.Pid, strings.Join(args, " ")) + + return nil +} + +func (r *GstreamerConverter) generateArgs(sdpContent, outputFilePath string) ([]string, error) { + // Parse SDP to determine RTP caps for rtpstreamdepay + media, encodingName, payloadType, clockRate, err := parseRtpCapsFromSDP(sdpContent) + if err != nil { + return nil, err + } + + // Start with common GStreamer arguments optimized for RTP dump replay + args := []string{} + args = append(args, "-e") + // args = append(args, "--gst-debug-level=3") + // args = append(args, "--gst-debug=tcpserversrc:5,rtp*:5,webm*:5,identity:5,jitterbuffer:5,av1*:5") + // args = append(args, "--gst-debug-no-color") + args = append(args, "tcpserversrc", "host=127.0.0.1", fmt.Sprintf("port=%d", r.port), "!") + args = append(args, "application/x-rtp-stream", "!") + args = append(args, "rtpstreamdepay", "!") + args = append(args, fmt.Sprintf("application/x-rtp,media=%s,encoding-name=%s,clock-rate=%s,payload=%s", media, encodingName, clockRate, payloadType), "!") + + // Simplified approach for RTP dump replay: + // - rtpjitterbuffer: Basic packet reordering with minimal interference + // - mode=none: Don't override timing, let depayloaders handle it + // - latency=0: No artificial latency, process packets as they come + // - do-retransmission=false: No retransmission for dump replay + args = append(args, "rtpjitterbuffer", "mode=none", "latency=0", "do-lost=false", "do-retransmission=false", "drop-on-latency=false", "!") + + switch encodingName { + case "VP9", "AV1", "H264": + args = append(args, fmt.Sprintf("rtp%sdepay", strings.ToLower(encodingName)), "!") + args = append(args, fmt.Sprintf("%sparse", strings.ToLower(encodingName)), "!") + case "OPUS", "VP8": + args = append(args, fmt.Sprintf("rtp%sdepay", strings.ToLower(encodingName)), "!") + default: + return nil, fmt.Errorf("unsupported encoding: %s", encodingName) + } + + args = append(args, "matroskamux", "streamable=false", "!") + args = append(args, "filesink", fmt.Sprintf("location=%s", outputFilePath)) + + return args, nil +} + +func parseRtpCapsFromSDP(sdp string) (media string, encodingName string, payload string, clockRate string, err error) { + // Expect one m= line and one a=rtpmap line; return error if missing or malformed + mLineFound := false + rtpmapLineFound := false + for _, raw := range strings.Split(sdp, "\n") { + //line := strings.TrimSpace(raw) + lower := strings.ToLower(raw) + if strings.HasPrefix(lower, "m=") { + mLineFound = true + // Format: m= ... + fields := strings.Fields(lower) + if len(fields) >= 1 { + media = strings.TrimPrefix(fields[0], "m=") + } else { + err = fmt.Errorf("invalid m= line: %s", lower) + return + } + } else if strings.HasPrefix(lower, "a=rtpmap:") { + rtpmapLineFound = true + + // Format: a=rtpmap: /[/channels] + after := strings.TrimSpace(lower[len("a=rtpmap:"):]) + fields := strings.Fields(after) + if len(fields) >= 2 { + payload = fields[0] + codec := strings.ToUpper(fields[1]) + parts := strings.Split(codec, "/") + if len(parts) >= 2 { + encodingName = parts[0] + clockRate = parts[1] + } else { + err = fmt.Errorf("invalid a=rtpmap: %s", lower) + return + } + } else { + err = fmt.Errorf("invalid a=rtpmap: %s", lower) + return + } + } + } + + if !mLineFound || !rtpmapLineFound { + err = fmt.Errorf("invalid SDP m= or a=rtpmap lines not found: \n%s", sdp) + } + return +} + +func (r *GstreamerConverter) OnRTP(packet *rtp.Packet) error { + // Marshal RTP packet + buf, err := packet.Marshal() + if err != nil { + return err + } + + return r.PushRtpBuf(buf) +} + +func (r *GstreamerConverter) PushRtpBuf(buf []byte) error { + r.mu.Lock() + defer r.mu.Unlock() + + // Send RTP packet over TCP using RFC4571 2-byte length prefix + if r.rtpConn != nil { + if len(buf) > 0xFFFF { + return fmt.Errorf("rtp packet too large for TCP framing: %d bytes", len(buf)) + } + header := make([]byte, 2) + binary.BigEndian.PutUint16(header, uint16(len(buf))) + if _, err := r.rtpConn.Write(header); err != nil { + r.logger.Warnf("Failed to write RTP length header: %v", err) + return err + } + if _, err := r.rtpConn.Write(buf); err != nil { + r.logger.Warnf("Failed to write RTP packet: %v", err) + return err + } + } + return nil +} + +func (r *GstreamerConverter) Close() error { + r.mu.Lock() + defer r.mu.Unlock() + + r.logger.Infof("GStreamer process pid<%d> Closing TCP connection and wait for termination...", r.gstreamerCmd.Process.Pid) + + // Cancel context to stop background goroutines + if r.cancel != nil { + r.cancel() + } + + // Close TCP connection + if r.rtpConn != nil { + _ = r.rtpConn.Close() + r.rtpConn = nil + } + + // Gracefully wait for FFmpeg termination + if r.gstreamerCmd != nil && r.gstreamerCmd.Process != nil { + // Wait for graceful exit with timeout + done := make(chan error, 1) + go func() { + done <- r.gstreamerCmd.Wait() + }() + + select { + case <-time.After(5 * time.Second): + r.logger.Warnf("GStreamer process pid<%d> termination timeout in %s...", r.gstreamerCmd.Process.Pid, time.Since(r.startAt).Round(time.Millisecond)) + + // Timeout, force kill + if e := r.gstreamerCmd.Process.Kill(); e != nil { + r.logger.Errorf("GStreamer process pid<%d> errored while killing: %v", r.gstreamerCmd.Process.Pid, e) + } + case <-done: + r.logger.Infof("GStreamer process pid<%d> exited succesfully in %s...", r.gstreamerCmd.Process.Pid, time.Since(r.startAt).Round(time.Millisecond)) + } + } + + return nil +} diff --git a/pkg/cmd/raw-recording/processing/logger_adapter.go b/pkg/cmd/raw-recording/processing/logger_adapter.go new file mode 100644 index 0000000..ff5234f --- /dev/null +++ b/pkg/cmd/raw-recording/processing/logger_adapter.go @@ -0,0 +1,68 @@ +package processing + +import ( + "fmt" + "io" +) + +// LogLevel represents the severity level of a log message +type LogLevel int + +const ( + LogLevelDebug LogLevel = iota + LogLevelInfo + LogLevelWarn + LogLevelError +) + +type ProcessingLogger struct { + writer io.Writer + level LogLevel +} + +func NewProcessingLogger(writer io.Writer, level LogLevel) *ProcessingLogger { + return &ProcessingLogger{ + writer: writer, + level: level, + } +} + +func (l *ProcessingLogger) log(level LogLevel, prefix, format string, args ...interface{}) { + if level < l.level { + return + } + msg := fmt.Sprintf(format, args...) + _, _ = fmt.Fprintf(l.writer, "%s %s\n", prefix, msg) +} + +func (l *ProcessingLogger) Debug(format string, args ...interface{}) { + l.log(LogLevelDebug, "[DEBUG]", format, args...) +} + +func (l *ProcessingLogger) Debugf(format string, args ...interface{}) { + l.log(LogLevelDebug, "[DEBUG]", format, args...) +} + +func (l *ProcessingLogger) Info(format string, args ...interface{}) { + l.log(LogLevelInfo, "[INFO]", format, args...) +} + +func (l *ProcessingLogger) Infof(format string, args ...interface{}) { + l.log(LogLevelInfo, "[INFO]", format, args...) +} + +func (l *ProcessingLogger) Warn(format string, args ...interface{}) { + l.log(LogLevelWarn, "[WARN]", format, args...) +} + +func (l *ProcessingLogger) Warnf(format string, args ...interface{}) { + l.log(LogLevelWarn, "[WARN]", format, args...) +} + +func (l *ProcessingLogger) Error(format string, args ...interface{}) { + l.log(LogLevelError, "[ERROR]", format, args...) +} + +func (l *ProcessingLogger) Errorf(format string, args ...interface{}) { + l.log(LogLevelError, "[ERROR]", format, args...) +} diff --git a/pkg/cmd/raw-recording/processing/sdp_tool.go b/pkg/cmd/raw-recording/processing/sdp_tool.go new file mode 100644 index 0000000..5e8b01d --- /dev/null +++ b/pkg/cmd/raw-recording/processing/sdp_tool.go @@ -0,0 +1,38 @@ +package processing + +import ( + "fmt" + "os" + "strings" + + webrtc "github.com/pion/webrtc/v4" +) + +func readSDP(sdpFilePath string) (string, error) { + content, err := os.ReadFile(sdpFilePath) + if err != nil { + return "", fmt.Errorf("failed to read SDP file %s: %w", sdpFilePath, err) + } + return string(content), nil +} + +func mimeType(sdp string) (string, error) { + upper := strings.ToUpper(sdp) + if strings.Contains(upper, "VP9") { + return webrtc.MimeTypeVP9, nil + } + if strings.Contains(upper, "VP8") { + return webrtc.MimeTypeVP8, nil + } + if strings.Contains(upper, "AV1") { + return webrtc.MimeTypeAV1, nil + } + if strings.Contains(upper, "OPUS") { + return webrtc.MimeTypeOpus, nil + } + if strings.Contains(upper, "H264") { + return webrtc.MimeTypeH264, nil + } + + return "", fmt.Errorf("mimeType should be OPUS, VP8, VP9, AV1, H264") +} diff --git a/pkg/cmd/raw-recording/processing/track_extractor.go b/pkg/cmd/raw-recording/processing/track_extractor.go new file mode 100644 index 0000000..5dcf547 --- /dev/null +++ b/pkg/cmd/raw-recording/processing/track_extractor.go @@ -0,0 +1,237 @@ +package processing + +import ( + "fmt" + "os" + "path/filepath" + "strings" + "time" +) + +const blackVideoFps = 5 + +type TrackExtractorConfig struct { + WorkDir string + OutputDir string + UserID string + SessionID string + TrackID string + TrackKind string + MediaType string + FillGap bool + FillDtx bool + + Cleanup bool +} + +type TrackExtractor struct { + logger *ProcessingLogger +} + +func NewTrackExtractor(logger *ProcessingLogger) *TrackExtractor { + return &TrackExtractor{logger: logger} +} + +// Generic track extraction function that works for both audio and video +func (p *TrackExtractor) ExtractTracks(config *TrackExtractorConfig, metadata *RecordingMetadata) ([]*TrackFileInfo, error) { + // Filter tracks to specified type only and apply hierarchical filtering + filteredTracks := FilterTracks(metadata.Tracks, config.UserID, config.SessionID, config.TrackID, config.TrackKind, config.MediaType) + if len(filteredTracks) == 0 { + p.logger.Warnf("No %s tracks found matching the filter criteria", config.TrackKind) + return nil, nil + } + + p.logger.Infof("Found %d %s tracks to extract", len(filteredTracks), config.TrackKind) + + // Extract and convert each track + var infos []*TrackFileInfo + for i, track := range filteredTracks { + p.logger.Debugf("Processing %s track %d/%d: %s", track.TrackKind, i+1, len(filteredTracks), track.TrackID) + + info, err := p.extractSingleTrackWithOptions(config, track) + if err != nil { + p.logger.Errorf("Failed to extract %s track %s: %v", track.TrackKind, track.TrackID, err) + continue + } + if info != nil { + infos = append(infos, info) + } + } + + return infos, nil +} + +func (p *TrackExtractor) extractSingleTrackWithOptions(config *TrackExtractorConfig, track *TrackInfo) (*TrackFileInfo, error) { + accept := func(path string, info os.FileInfo) (*SegmentInfo, bool) { + for _, s := range track.Segments { + if strings.Contains(info.Name(), s.metadata.BaseFilename) { + extension, suffix := outputFormatForMimeType(track.Codec) + abs, _ := filepath.Abs(path) + + s.RtpDumpPath = abs + s.SdpPath = strings.ReplaceAll(abs, suffixRtpDump, suffixSdp) + s.ContainerExt = extension + s.ContainerPath = strings.ReplaceAll(abs, suffixRtpDump, suffix) + return s, true + } + } + return nil, false + } + + // Convert using the WebM converter + err := ConvertDirectory(config.WorkDir, accept, config.FillDtx, p.logger) + if err != nil { + return nil, fmt.Errorf("failed to convert %s track: %w", track.TrackKind, err) + } + + // Create segments with timing info and fill gaps + finalFileInfo, err := p.processSegmentsWithGapFilling(config, track) + if err != nil { + return nil, fmt.Errorf("failed to process segments with gap filling: %w", err) + } + + track.ConcatenatedTrackFileInfo = finalFileInfo + p.logger.Infof("Successfully extracted %s track to: %s", track.TrackKind, finalFileInfo.Name) + return finalFileInfo, nil +} + +// processSegmentsWithGapFilling processes webm segments, fills gaps if requested, and concatenates into final file +func (p *TrackExtractor) processSegmentsWithGapFilling(config *TrackExtractorConfig, track *TrackInfo) (*TrackFileInfo, error) { + // Build list of files to concatenate (with optional gap fillers) + var cleanupFiles []string + concatFile, err := os.Create(p.buildConcatFilename(config.OutputDir, track)) + if err != nil { + return nil, err + } + cleanupFiles = append(cleanupFiles, concatFile.Name()) + + // If enabled, cleanUp all working files (segment mkv, silence or black frame files and concat.txt) + if config.Cleanup { + defer func(files *[]string) { + for _, file := range *files { + p.logger.Infof("Cleaning up temporary file: %s", file) + if err := os.Remove(file); err != nil { + p.logger.Warnf("Failed to clean up temporary file %s: %v", file, err) + } + } + }(&cleanupFiles) + } + defer func() { + _ = concatFile.Close() + }() + + for i, segment := range track.Segments { + if _, e := fmt.Fprintf(concatFile, "file '%s'\n", segment.ContainerPath); e != nil { + return nil, e + } + cleanupFiles = append(cleanupFiles, segment.ContainerPath) + + // Add gap filler if requested and there's a gap before the next segment + if config.FillGap && i < track.SegmentCount-1 { + nextSegment := track.Segments[i+1] + offset := int64(0) + if nextSegment.metadata.FirstKeyFrameOffsetMs != nil { + offset = *nextSegment.metadata.FirstKeyFrameOffsetMs + } + gapDuration := offset + firstPacketNtpTimestamp(nextSegment.metadata) - lastPacketNtpTimestamp(segment.metadata) + + if gapDuration > 0 { // There's a gap + gapSeconds := float64(gapDuration) / 1000.0 + p.logger.Infof("Detected %dms gap between segments, generating %s filler", gapDuration, track.TrackKind) + + // Create gap filler file + gapFilePath := p.buildGapFilename(config.OutputDir, track, i) + + var args []string + if track.TrackKind == trackKindVideo { + args = generateBlackVideoArguments(gapFilePath, track.Codec, gapSeconds, 1280, 720, blackVideoFps) + } else { + args = generateSilenceArguments(gapFilePath, gapSeconds) + } + + if e := runFFmpegCommand(args, p.logger); e != nil { + p.logger.Warnf("Failed to generate %s gap, skipping: %v", track.TrackKind, e) + continue + } + cleanupFiles = append(cleanupFiles, gapFilePath) + + absPath, err := filepath.Abs(gapFilePath) + if err != nil { + return nil, err + } + + if _, e := fmt.Fprintf(concatFile, "file '%s'\n", absPath); e != nil { + return nil, e + } + } + } + } + + // Create final output file + finalPath := p.buildFilename(config.OutputDir, track) + + // Concatenate all segments (with gap fillers if any) + args, err := generateConcatFileArguments(finalPath, concatFile.Name()) + if err != nil { + return nil, fmt.Errorf("failed to generate ffmpeg arguments: %w", err) + } + + err = runFFmpegCommand(args, p.logger) + if err != nil { + return nil, fmt.Errorf("failed to concatenate segments: %w", err) + } + + p.logger.Debugf("Successfully concatenated %d segments into %s (gap filled %t)", track.SegmentCount, finalPath, config.FillGap) + + var ts, te int64 + if len(track.Segments) > 0 { + ts = track.Segments[0].metadata.FirstRtpUnixTimestamp + te = track.Segments[len(track.Segments)-1].metadata.LastRtpUnixTimestamp + } + + var audioTrack, videoTrack *TrackInfo + switch track.TrackKind { + case trackKindAudio: + audioTrack = track + case trackKindVideo: + videoTrack = track + } + return &TrackFileInfo{ + Name: finalPath, + StartAt: time.UnixMilli(ts), + EndAt: time.UnixMilli(te), + MaxFrameDimension: p.getMaxFrameDimension(track), + AudioTrack: audioTrack, + VideoTrack: videoTrack, + }, nil +} + +func (p *TrackExtractor) getMaxFrameDimension(track *TrackInfo) SegmentFrameDimension { + frameDimension := SegmentFrameDimension{} + if track.TrackKind == trackKindVideo { + for _, segment := range track.Segments { + if segment.metadata.MaxFrameDimension != nil { + frameDimension = getMaxFrameDimension(*segment.metadata.MaxFrameDimension, frameDimension) + } + } + } + return frameDimension +} + +// buildDefaultFilename creates output filename that indicates media type +func (p *TrackExtractor) buildFilename(outputDir string, track *TrackInfo) string { + media := track.TrackKind + "_only" + if track.IsScreenshare { + media = "shared_" + media + } + + return filepath.Join(outputDir, fmt.Sprintf("individual_%s_%s_%s_%s_%s_%d.%s", track.CallType, track.CallID, track.UserID, track.SessionID, media, track.CallStartTime.UnixMilli(), track.Segments[0].ContainerExt)) +} + +func (p *TrackExtractor) buildGapFilename(outputDir string, track *TrackInfo, i int) string { + return filepath.Join(outputDir, fmt.Sprintf("gap_%s_%s_%s_%d_%d.%s", track.UserID, track.SessionID, track.TrackKind, track.CallStartTime.UnixMilli(), i, track.Segments[i].ContainerExt)) +} + +func (p *TrackExtractor) buildConcatFilename(outputDir string, track *TrackInfo) string { + return filepath.Join(outputDir, fmt.Sprintf("concat_%s_%s_%s_%d.txt", track.UserID, track.SessionID, track.TrackKind, track.CallStartTime.UnixMilli())) +} diff --git a/pkg/cmd/raw-recording/root.go b/pkg/cmd/raw-recording/root.go new file mode 100644 index 0000000..0f45a85 --- /dev/null +++ b/pkg/cmd/raw-recording/root.go @@ -0,0 +1,332 @@ +package rawrecording + +import ( + "context" + "fmt" + "os" + + "github.com/GetStream/stream-cli/pkg/cmd/raw-recording/processing" + "github.com/MakeNowJust/heredoc" + "github.com/spf13/cobra" +) + +// GlobalArgs holds the global arguments shared across all subcommands +type GlobalArgs struct { + InputFile string + InputDir string + InputS3 string + Output string + Verbose bool + CacheDir string + WorkDir string + + // resolvedInputPath is the local path to the input (after S3 download if needed) + resolvedInputPath string +} + +func NewRootCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "raw-recording", + Short: "Post-processing tools for raw video call recordings", + Long: heredoc.Doc(` + Post-processing tools for raw video call recordings. + + These commands allow you to extract, process, and mux audio/video + tracks from raw recording archives. + `), + Example: heredoc.Doc(` + # List all tracks in a recording + $ stream-cli video raw-recording list-tracks --input-file recording.tar.gz + + # Extract audio tracks for a specific user + $ stream-cli video raw-recording extract-audio --input-file recording.tar.gz --output ./out --user-id user123 + + # Mux audio and video tracks + $ stream-cli video raw-recording mux-av --input-file recording.tar.gz --output ./out + `), + } + + // Persistent flags (global options available to all subcommands) + pf := cmd.PersistentFlags() + pf.String(FlagInputFile, "", DescInputFile) + pf.String(FlagInputDir, "", DescInputDir) + pf.String(FlagInputS3, "", DescInputS3) + pf.String(FlagOutput, "", DescOutput) + pf.Bool(FlagVerbose, false, DescVerbose) + pf.String(FlagCacheDir, "", DescCacheDir) + + // Add subcommands + cmd.AddCommand( + listTracksCmd(), + extractAudioCmd(), + extractVideoCmd(), + muxAVCmd(), + mixAudioCmd(), + processAllCmd(), + ) + + return cmd +} + +// getGlobalArgs extracts global arguments from cobra command flags +func getGlobalArgs(cmd *cobra.Command) (*GlobalArgs, error) { + inputFile, _ := cmd.Flags().GetString(FlagInputFile) + inputDir, _ := cmd.Flags().GetString(FlagInputDir) + inputS3, _ := cmd.Flags().GetString(FlagInputS3) + output, _ := cmd.Flags().GetString(FlagOutput) + verbose, _ := cmd.Flags().GetBool(FlagVerbose) + cacheDir, _ := cmd.Flags().GetString(FlagCacheDir) + + // Use default cache directory if not specified + if cacheDir == "" { + cacheDir = GetDefaultCacheDir() + } + + return &GlobalArgs{ + InputFile: inputFile, + InputDir: inputDir, + InputS3: inputS3, + Output: output, + Verbose: verbose, + CacheDir: cacheDir, + }, nil +} + +// validateGlobalArgs validates global arguments +func validateGlobalArgs(globalArgs *GlobalArgs, requireOutput bool) error { + if globalArgs.InputFile == "" && globalArgs.InputDir == "" && globalArgs.InputS3 == "" { + return fmt.Errorf("either --%s or --%s or --%s must be specified", FlagInputFile, FlagInputDir, FlagInputS3) + } + + num := 0 + if globalArgs.InputFile != "" { + num++ + } + if globalArgs.InputDir != "" { + num++ + } + if globalArgs.InputS3 != "" { + num++ + } + if num > 1 { + return fmt.Errorf("--%s, --%s and --%s are exclusive, only one is allowed", FlagInputFile, FlagInputDir, FlagInputS3) + } + + if requireOutput && globalArgs.Output == "" { + return fmt.Errorf("--%s directory must be specified", FlagOutput) + } + + return nil +} + +// resolveInputPath resolves the input to a local path, downloading from S3 if necessary +func resolveInputPath(ctx context.Context, globalArgs *GlobalArgs) (string, error) { + // If already resolved, return cached path + if globalArgs.resolvedInputPath != "" { + return globalArgs.resolvedInputPath, nil + } + + var inputPath string + + if globalArgs.InputFile != "" { + inputPath = globalArgs.InputFile + } else if globalArgs.InputDir != "" { + inputPath = globalArgs.InputDir + } else if globalArgs.InputS3 != "" { + // Download from S3 (with caching) + downloader := NewS3Downloader(globalArgs.CacheDir, globalArgs.Verbose) + downloadedPath, err := downloader.Download(ctx, globalArgs.InputS3) + if err != nil { + return "", fmt.Errorf("failed to download from S3: %w", err) + } + inputPath = downloadedPath + } else { + return "", fmt.Errorf("no input specified") + } + + // Cache the resolved path + globalArgs.resolvedInputPath = inputPath + return inputPath, nil +} + +// validateInputArgs validates input arguments using mutually exclusive logic +func validateInputArgs(globalArgs *GlobalArgs, userID, sessionID, trackID string) (*processing.RecordingMetadata, error) { + // Count how many filters are specified + filtersCount := 0 + if userID != "" { + filtersCount++ + } + if sessionID != "" { + filtersCount++ + } + if trackID != "" { + filtersCount++ + } + + // Ensure filters are mutually exclusive + if filtersCount > 1 { + return nil, fmt.Errorf("only one filter can be specified at a time: --%s, --%s, and --%s are mutually exclusive", FlagUserID, FlagSessionID, FlagTrackID) + } + + // Resolve input path (download from S3 if needed) + inputPath, err := resolveInputPath(context.Background(), globalArgs) + if err != nil { + return nil, err + } + + // Parse metadata to validate the single specified argument + logger := setupLogger(false) + parser := processing.NewMetadataParser(logger) + metadata, err := parser.ParseMetadataOnly(inputPath) + if err != nil { + return nil, fmt.Errorf("failed to parse recording for validation: %w", err) + } + + // If no filters specified, no validation needed + if filtersCount == 0 { + return metadata, nil + } + + // Validate the single specified filter + if trackID != "" { + found := false + for _, track := range metadata.Tracks { + if track.TrackID == trackID { + found = true + break + } + } + if !found { + return nil, fmt.Errorf("%s '%s' not found in recording. Use 'list-tracks --%s tracks' to see available track IDs", FlagTrackID, trackID, FlagFormat) + } + } else if sessionID != "" { + found := false + for _, track := range metadata.Tracks { + if track.SessionID == sessionID { + found = true + break + } + } + if !found { + return nil, fmt.Errorf("%s '%s' not found in recording. Use 'list-tracks --%s sessions' to see available session IDs", FlagSessionID, sessionID, FlagFormat) + } + } else if userID != "" { + found := false + for _, uid := range metadata.UserIDs { + if uid == userID { + found = true + break + } + } + if !found { + return nil, fmt.Errorf("%s '%s' not found in recording. Use 'list-tracks --%s users' to see available user IDs", FlagUserID, userID, FlagFormat) + } + } + + return metadata, nil +} + +// setupLogger creates a logger with the specified verbosity +func setupLogger(verbose bool) *processing.ProcessingLogger { + var level processing.LogLevel + if verbose { + level = processing.LogLevelDebug + } else { + level = processing.LogLevelInfo + } + return processing.NewProcessingLogger(os.Stderr, level) +} + +// prepareWorkDir extracts the recording to a temp directory and returns the working directory +func prepareWorkDir(globalArgs *GlobalArgs, logger *processing.ProcessingLogger) (string, func(), error) { + // Resolve input path (download from S3 if needed) + path, err := resolveInputPath(context.Background(), globalArgs) + if err != nil { + return "", nil, err + } + + workingDir, cleanup, err := processing.ExtractToTempDir(path, logger) + if err != nil { + return "", nil, fmt.Errorf("failed to prepare working directory: %w", err) + } + + return workingDir, cleanup, nil +} + +// completeUserIDs provides completion for user IDs +func completeUserIDs(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + inputFile, _ := cmd.Flags().GetString(FlagInputFile) + inputDir, _ := cmd.Flags().GetString(FlagInputDir) + + inputPath := inputFile + if inputPath == "" { + inputPath = inputDir + } + if inputPath == "" { + return nil, cobra.ShellCompDirectiveNoFileComp + } + + logger := setupLogger(false) + parser := processing.NewMetadataParser(logger) + metadata, err := parser.ParseMetadataOnly(inputPath) + if err != nil { + return nil, cobra.ShellCompDirectiveError + } + + return metadata.UserIDs, cobra.ShellCompDirectiveNoFileComp +} + +// completeSessionIDs provides completion for session IDs +func completeSessionIDs(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + inputFile, _ := cmd.Flags().GetString(FlagInputFile) + inputDir, _ := cmd.Flags().GetString(FlagInputDir) + + inputPath := inputFile + if inputPath == "" { + inputPath = inputDir + } + if inputPath == "" { + return nil, cobra.ShellCompDirectiveNoFileComp + } + + logger := setupLogger(false) + parser := processing.NewMetadataParser(logger) + metadata, err := parser.ParseMetadataOnly(inputPath) + if err != nil { + return nil, cobra.ShellCompDirectiveError + } + + return metadata.Sessions, cobra.ShellCompDirectiveNoFileComp +} + +// completeTrackIDs provides completion for track IDs +func completeTrackIDs(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + inputFile, _ := cmd.Flags().GetString(FlagInputFile) + inputDir, _ := cmd.Flags().GetString(FlagInputDir) + + inputPath := inputFile + if inputPath == "" { + inputPath = inputDir + } + if inputPath == "" { + return nil, cobra.ShellCompDirectiveNoFileComp + } + + logger := setupLogger(false) + parser := processing.NewMetadataParser(logger) + metadata, err := parser.ParseMetadataOnly(inputPath) + if err != nil { + return nil, cobra.ShellCompDirectiveError + } + + trackIDs := make([]string, 0, len(metadata.Tracks)) + seen := make(map[string]bool) + for _, track := range metadata.Tracks { + if !seen[track.TrackID] { + trackIDs = append(trackIDs, track.TrackID) + seen[track.TrackID] = true + } + } + + return trackIDs, cobra.ShellCompDirectiveNoFileComp +} diff --git a/pkg/cmd/raw-recording/s3_downloader.go b/pkg/cmd/raw-recording/s3_downloader.go new file mode 100644 index 0000000..ddc52a1 --- /dev/null +++ b/pkg/cmd/raw-recording/s3_downloader.go @@ -0,0 +1,394 @@ +package rawrecording + +import ( + "context" + "crypto/sha256" + "encoding/hex" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "os" + "path/filepath" + "strings" + + "github.com/aws/aws-sdk-go-v2/config" + s3manager "github.com/aws/aws-sdk-go-v2/feature/s3/manager" + "github.com/aws/aws-sdk-go-v2/service/s3" +) + +// S3Downloader handles downloading files from S3 with caching +type S3Downloader struct { + cacheDir string + verbose bool +} + +// CacheMetadata stores information about a cached file +type CacheMetadata struct { + ETag string `json:"etag"` + OriginalURL string `json:"original_url"` + LastModified string `json:"last_modified,omitempty"` +} + +// NewS3Downloader creates a new S3Downloader +func NewS3Downloader(cacheDir string, verbose bool) *S3Downloader { + return &S3Downloader{ + cacheDir: cacheDir, + verbose: verbose, + } +} + +// Download downloads a file from S3 or presigned URL, using cache if available +// Returns the local file path to the downloaded file +func (d *S3Downloader) Download(ctx context.Context, inputURL string) (string, error) { + // Ensure cache directory exists + if err := os.MkdirAll(d.cacheDir, 0755); err != nil { + return "", fmt.Errorf("failed to create cache directory: %w", err) + } + + // Generate cache key from URL + cacheKey := d.generateCacheKey(inputURL) + cachedFilePath := filepath.Join(d.cacheDir, cacheKey+".tar.gz") + metadataPath := filepath.Join(d.cacheDir, cacheKey+".meta.json") + + // Check if file is already cached + if d.isCacheValid(ctx, inputURL, cachedFilePath, metadataPath) { + if d.verbose { + fmt.Printf("Using cached file: %s\n", cachedFilePath) + } + return cachedFilePath, nil + } + + // Download the file + if d.verbose { + fmt.Printf("Downloading from: %s\n", d.sanitizeURLForLog(inputURL)) + } + + var etag string + var err error + + if isS3URL(inputURL) { + etag, err = d.downloadFromS3(ctx, inputURL, cachedFilePath) + } else { + etag, err = d.downloadFromPresignedURL(ctx, inputURL, cachedFilePath) + } + + if err != nil { + return "", err + } + + // Save cache metadata + metadata := CacheMetadata{ + ETag: etag, + OriginalURL: d.hashURL(inputURL), // Store hash instead of URL for privacy + } + if err := d.saveCacheMetadata(metadataPath, &metadata); err != nil { + // Log but don't fail - download succeeded + if d.verbose { + fmt.Printf("Warning: failed to save cache metadata: %v\n", err) + } + } + + if d.verbose { + fmt.Printf("Downloaded to: %s\n", cachedFilePath) + } + + return cachedFilePath, nil +} + +// generateCacheKey creates a unique cache key from the URL +func (d *S3Downloader) generateCacheKey(inputURL string) string { + return d.hashURL(inputURL) +} + +// hashURL creates a SHA256 hash of the URL +func (d *S3Downloader) hashURL(inputURL string) string { + // For presigned URLs, we only hash the base path (without query params) + // This allows the same file to be cached even if the signature changes + baseURL := inputURL + if u, err := url.Parse(inputURL); err == nil && !isS3URL(inputURL) { + baseURL = u.Scheme + "://" + u.Host + u.Path + } + + hash := sha256.Sum256([]byte(baseURL)) + return hex.EncodeToString(hash[:])[:16] // Use first 16 chars +} + +// sanitizeURLForLog removes sensitive query parameters from URL for logging +func (d *S3Downloader) sanitizeURLForLog(inputURL string) string { + if isS3URL(inputURL) { + return inputURL + } + u, err := url.Parse(inputURL) + if err != nil { + return "[invalid URL]" + } + return u.Scheme + "://" + u.Host + u.Path + "?[signature hidden]" +} + +// isS3URL checks if the URL is an s3:// URL +func isS3URL(inputURL string) bool { + return strings.HasPrefix(inputURL, "s3://") +} + +// parseS3URL parses an s3:// URL into bucket and key +func parseS3URL(inputURL string) (bucket, key string, err error) { + if !isS3URL(inputURL) { + return "", "", fmt.Errorf("not an S3 URL: %s", inputURL) + } + + // Remove s3:// prefix + path := strings.TrimPrefix(inputURL, "s3://") + + // Split into bucket and key + parts := strings.SplitN(path, "/", 2) + if len(parts) < 2 { + return "", "", fmt.Errorf("invalid S3 URL format, expected s3://bucket/key: %s", inputURL) + } + + return parts[0], parts[1], nil +} + +// getS3ClientForBucket creates an S3 client configured for the bucket's region +func (d *S3Downloader) getS3ClientForBucket(ctx context.Context, bucket string) (*s3.Client, error) { + // First, load the default config + cfg, err := config.LoadDefaultConfig(ctx) + if err != nil { + return nil, fmt.Errorf("failed to load AWS config: %w", err) + } + + // Create a client to detect the bucket region + client := s3.NewFromConfig(cfg) + + // Get the actual bucket region + region, err := s3manager.GetBucketRegion(ctx, client, bucket) + if err != nil { + // If we can't detect the region, return the default client + if d.verbose { + fmt.Printf("Warning: could not detect bucket region, using default: %v\n", err) + } + return client, nil + } + + if d.verbose { + fmt.Printf("Detected bucket region: %s\n", region) + } + + // Reload config with the correct region + cfg, err = config.LoadDefaultConfig(ctx, config.WithRegion(region)) + if err != nil { + return nil, fmt.Errorf("failed to load AWS config with region %s: %w", region, err) + } + + return s3.NewFromConfig(cfg), nil +} + +// isCacheValid checks if the cached file is still valid +func (d *S3Downloader) isCacheValid(ctx context.Context, inputURL, cachedFilePath, metadataPath string) bool { + // Check if cached file exists + if _, err := os.Stat(cachedFilePath); os.IsNotExist(err) { + return false + } + + // Check if metadata exists + metadata, err := d.loadCacheMetadata(metadataPath) + if err != nil { + return false + } + + // Verify URL hash matches + if metadata.OriginalURL != d.hashURL(inputURL) { + return false + } + + // Get current ETag from remote + var remoteETag string + if isS3URL(inputURL) { + remoteETag, err = d.getS3ETag(ctx, inputURL) + } else { + remoteETag, err = d.getPresignedURLETag(ctx, inputURL) + } + + if err != nil { + if d.verbose { + fmt.Printf("Warning: failed to get remote ETag, will re-download: %v\n", err) + } + return false + } + + // Compare ETags + return metadata.ETag == remoteETag +} + +// getS3ETag gets the ETag for an S3 object +func (d *S3Downloader) getS3ETag(ctx context.Context, inputURL string) (string, error) { + bucket, key, err := parseS3URL(inputURL) + if err != nil { + return "", err + } + + client, err := d.getS3ClientForBucket(ctx, bucket) + if err != nil { + return "", err + } + + result, err := client.HeadObject(ctx, &s3.HeadObjectInput{ + Bucket: &bucket, + Key: &key, + }) + if err != nil { + return "", fmt.Errorf("failed to get S3 object metadata: %w", err) + } + + if result.ETag != nil { + return *result.ETag, nil + } + return "", nil +} + +// getPresignedURLETag gets the ETag for a presigned URL via HEAD request +func (d *S3Downloader) getPresignedURLETag(ctx context.Context, inputURL string) (string, error) { + req, err := http.NewRequestWithContext(ctx, http.MethodHead, inputURL, nil) + if err != nil { + return "", fmt.Errorf("failed to create HEAD request: %w", err) + } + + resp, err := http.DefaultClient.Do(req) + if err != nil { + return "", fmt.Errorf("failed to execute HEAD request: %w", err) + } + defer func() { + _ = resp.Body.Close() + }() + + if resp.StatusCode != http.StatusOK { + return "", fmt.Errorf("HEAD request failed with status: %d", resp.StatusCode) + } + + return resp.Header.Get("ETag"), nil +} + +// downloadFromS3 downloads a file from S3 using the AWS SDK +func (d *S3Downloader) downloadFromS3(ctx context.Context, inputURL, destPath string) (string, error) { + bucket, key, err := parseS3URL(inputURL) + if err != nil { + return "", err + } + + client, err := d.getS3ClientForBucket(ctx, bucket) + if err != nil { + return "", err + } + + result, err := client.GetObject(ctx, &s3.GetObjectInput{ + Bucket: &bucket, + Key: &key, + }) + if err != nil { + return "", fmt.Errorf("failed to download from S3: %w", err) + } + defer func() { + _ = result.Body.Close() + }() + + // Create destination file + file, err := os.Create(destPath) + if err != nil { + return "", fmt.Errorf("failed to create destination file: %w", err) + } + defer func() { + _ = file.Close() + }() + + // Copy content + if _, err := io.Copy(file, result.Body); err != nil { + _ = os.Remove(destPath) // Clean up partial file + return "", fmt.Errorf("failed to write file: %w", err) + } + + var etag string + if result.ETag != nil { + etag = *result.ETag + } + + return etag, nil +} + +// downloadFromPresignedURL downloads a file from a presigned URL +func (d *S3Downloader) downloadFromPresignedURL(ctx context.Context, inputURL, destPath string) (string, error) { + req, err := http.NewRequestWithContext(ctx, http.MethodGet, inputURL, nil) + if err != nil { + return "", fmt.Errorf("failed to create GET request: %w", err) + } + + resp, err := http.DefaultClient.Do(req) + if err != nil { + return "", fmt.Errorf("failed to execute GET request: %w", err) + } + defer func() { + _ = resp.Body.Close() + }() + + if resp.StatusCode != http.StatusOK { + return "", fmt.Errorf("download failed with status: %d", resp.StatusCode) + } + + // Create destination file + file, err := os.Create(destPath) + if err != nil { + return "", fmt.Errorf("failed to create destination file: %w", err) + } + defer func() { + _ = file.Close() + }() + + // Copy content + if _, err := io.Copy(file, resp.Body); err != nil { + _ = os.Remove(destPath) // Clean up partial file + return "", fmt.Errorf("failed to write file: %w", err) + } + + return resp.Header.Get("ETag"), nil +} + +// loadCacheMetadata loads cache metadata from a JSON file +func (d *S3Downloader) loadCacheMetadata(path string) (*CacheMetadata, error) { + data, err := os.ReadFile(path) + if err != nil { + return nil, err + } + + var metadata CacheMetadata + if err := json.Unmarshal(data, &metadata); err != nil { + return nil, err + } + + return &metadata, nil +} + +// saveCacheMetadata saves cache metadata to a JSON file +func (d *S3Downloader) saveCacheMetadata(path string, metadata *CacheMetadata) error { + data, err := json.MarshalIndent(metadata, "", " ") + if err != nil { + return err + } + + return os.WriteFile(path, data, 0644) +} + +// GetDefaultCacheDir returns the default cache directory +func GetDefaultCacheDir() string { + // Try user cache directory first + if cacheDir, err := os.UserCacheDir(); err == nil { + return filepath.Join(cacheDir, DefaultCacheSubdir) + } + + // Fallback to home directory + if homeDir, err := os.UserHomeDir(); err == nil { + return filepath.Join(homeDir, ".cache", DefaultCacheSubdir) + } + + // Last resort: temp directory + return filepath.Join(os.TempDir(), DefaultCacheSubdir) +} diff --git a/pkg/cmd/root/root.go b/pkg/cmd/root/root.go index 4585dcc..7e96b65 100644 --- a/pkg/cmd/root/root.go +++ b/pkg/cmd/root/root.go @@ -8,6 +8,7 @@ import ( "github.com/GetStream/stream-cli/pkg/cmd/chat" cfgCmd "github.com/GetStream/stream-cli/pkg/cmd/config" + "github.com/GetStream/stream-cli/pkg/cmd/video" "github.com/GetStream/stream-cli/pkg/config" "github.com/GetStream/stream-cli/pkg/version" ) @@ -39,6 +40,7 @@ func NewCmd() *cobra.Command { root.AddCommand( cfgCmd.NewRootCmd(), chat.NewRootCmd(), + video.NewRootCmd(), ) cobra.OnInitialize(config.GetInitConfig(root, cfgPath)) diff --git a/pkg/cmd/video/root.go b/pkg/cmd/video/root.go new file mode 100644 index 0000000..304ea8a --- /dev/null +++ b/pkg/cmd/video/root.go @@ -0,0 +1,16 @@ +package video + +import ( + "github.com/spf13/cobra" + + rawrecording "github.com/GetStream/stream-cli/pkg/cmd/raw-recording" +) + +func NewRootCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "video", + Short: "Video processing commands", + } + cmd.AddCommand(rawrecording.NewRootCmd()) + return cmd +} diff --git a/pkg/config/config.go b/pkg/config/config.go index 823276f..01363b9 100644 --- a/pkg/config/config.go +++ b/pkg/config/config.go @@ -196,7 +196,7 @@ func GetInitConfig(cmd *cobra.Command, cfgPath *string) func() { os.Exit(1) } - f.Close() + _ = f.Close() } if err != nil { cmd.PrintErr(err) diff --git a/pkg/config/config_test.go b/pkg/config/config_test.go index 7799bd5..074c381 100644 --- a/pkg/config/config_test.go +++ b/pkg/config/config_test.go @@ -188,8 +188,8 @@ default: test2 } func getNormalizedString(s string) string { - noSpace := strings.Replace(s, " ", "", -1) - noNewLine := strings.Replace(noSpace, "\n", "", -1) + noSpace := strings.ReplaceAll(s, " ", "") + noNewLine := strings.ReplaceAll(noSpace, "\n", "") return strings.TrimSpace(noNewLine) } diff --git a/test/helpers.go b/test/helpers.go index bbdb649..d4650f1 100644 --- a/test/helpers.go +++ b/test/helpers.go @@ -6,7 +6,6 @@ import ( "math/rand" "os" "testing" - "time" stream "github.com/GetStream/stream-chat-go/v5" "github.com/spf13/cobra" @@ -91,7 +90,6 @@ func DeleteMessage(id string) { } func RandomString(n int) string { - rand.Seed(time.Now().UnixNano()) bytes := make([]byte, n) for i := 0; i < n; i++ { bytes[i] = byte(65 + rand.Intn(25)) // A=65 and Z = 65+25