mirror of
https://github.com/morgan9e/UxPlay
synced 2026-04-15 00:34:05 +09:00
Merge pull request #173 from FDH2/master
UxPlay-1.70 with support for h265 (HEVC or 4K) video
This commit is contained in:
@@ -47,12 +47,6 @@ if ( GST_MACOS )
|
||||
message ( STATUS "define GST_MACOS" )
|
||||
endif()
|
||||
|
||||
if ( GST_124 )
|
||||
add_definitions( -DGST_124 )
|
||||
message ( STATUS "define GST_124" )
|
||||
endif()
|
||||
|
||||
|
||||
add_executable( uxplay uxplay.cpp )
|
||||
target_link_libraries( uxplay
|
||||
renderers
|
||||
|
||||
117
README.html
117
README.html
@@ -1,6 +1,6 @@
|
||||
<h1
|
||||
id="uxplay-1.69-airplay-mirror-and-airplay-audio-server-for-linux-macos-and-unix-now-also-runs-on-windows.">UxPlay
|
||||
1.69: AirPlay-Mirror and AirPlay-Audio server for Linux, macOS, and Unix
|
||||
id="uxplay-1.70-airplay-mirror-and-airplay-audio-server-for-linux-macos-and-unix-now-also-runs-on-windows.">UxPlay
|
||||
1.70: AirPlay-Mirror and AirPlay-Audio server for Linux, macOS, and Unix
|
||||
(now also runs on Windows).</h1>
|
||||
<h3
|
||||
id="now-developed-at-the-github-site-httpsgithub.comfdh2uxplay-where-all-user-issues-should-be-posted-and-latest-versions-can-be-found."><strong>Now
|
||||
@@ -9,10 +9,14 @@ href="https://github.com/FDH2/UxPlay">https://github.com/FDH2/UxPlay</a>
|
||||
(where ALL user issues should be posted, and latest versions can be
|
||||
found).</strong></h3>
|
||||
<ul>
|
||||
<li><em><strong>NEW in v1.69</strong>: minor changes for users:
|
||||
-nofreeze option to NOT leave frozen video in place when a network
|
||||
failure occurs; internal changes/improvements needed for planned future
|
||||
HLS video streaming support.</em></li>
|
||||
<li><em><strong>NEW in v1.70</strong>: Support for 4k (h265) video with
|
||||
the new “-h265” option.</em> (Recent Apple devices will send HEVC (h265)
|
||||
video in AirPlay mirror mode if larger resolutions (<em>h</em> >
|
||||
1080) are requested with UxPlay’s “-s wxh” option; wired ethernet
|
||||
connection is prefered to wireless in this mode, and may also be
|
||||
required by the client; the “-h265” option changes the default
|
||||
resolution from 1920x1080 to 3840x2160, but leaves default maximum
|
||||
framerate (“-fps” option) at 30fps.)</li>
|
||||
</ul>
|
||||
<h2 id="highlights">Highlights:</h2>
|
||||
<ul>
|
||||
@@ -82,8 +86,12 @@ with the option “uxplay -async”, but there is then a 2 second latency
|
||||
imposed by iOS.</p></li>
|
||||
<li><p>Add any UxPlay options you want to use as defaults to a startup
|
||||
file <code>~/.uxplayrc</code> (see “<code>man uxplay</code>” or
|
||||
“<code>uxplay -h</code>” for format and other possible
|
||||
locations).</p></li>
|
||||
“<code>uxplay -h</code>” for format and other possible locations). In
|
||||
particular, if your system uses PipeWire audio or Wayland video systems,
|
||||
you may wish to add “as pipewiresink” or “vs waylandsink” as defaults to
|
||||
the file. <em>(Output from terminal commands “ps waux | grep pulse” or
|
||||
“pactl info” will contain “pipewire” if your Linux/BSD system uses
|
||||
it).</em></p></li>
|
||||
<li><p>On Raspberry Pi: If you use Ubuntu 22.10 or earlier, GStreamer
|
||||
must be <a
|
||||
href="https://github.com/FDH2/UxPlay/wiki/Gstreamer-Video4Linux2-plugin-patches">patched</a>
|
||||
@@ -173,8 +181,9 @@ without the accompanying video (there are plans to support HLS video in
|
||||
future releases of UxPlay)</strong></p></li>
|
||||
</ul>
|
||||
<h3
|
||||
id="possibility-for-using-hardware-accelerated-h264-video-decoding-if-available.">Possibility
|
||||
for using hardware-accelerated h264 video-decoding, if available.</h3>
|
||||
id="possibility-for-using-hardware-accelerated-h264h265-video-decoding-if-available.">Possibility
|
||||
for using hardware-accelerated h264/h265 video-decoding, if
|
||||
available.</h3>
|
||||
<p>UxPlay uses <a href="https://gstreamer.freedesktop.org">GStreamer</a>
|
||||
“plugins” for rendering audio and video. This means that video and audio
|
||||
are supported “out of the box”, using a choice of plugins. AirPlay
|
||||
@@ -198,21 +207,24 @@ accelerated video decoding on the NVIDIA GPU after NVIDIA’s CUDA driver
|
||||
the plugin is called <code>nvdec</code>, and must be <a
|
||||
href="https://github.com/FDH2/UxPlay/wiki/NVIDIA-nvdec-and-nvenc-plugins">built
|
||||
by the user</a>.</p></li>
|
||||
<li><p><strong>Video4Linux2 support for the Raspberry Pi Broadcom 2835
|
||||
GPU (Pi 4B and older)</strong></p>
|
||||
<li><p><strong>Video4Linux2 support for h264 hardware decoding on
|
||||
Raspberry Pi (Pi 4B and older)</strong></p>
|
||||
<p>Raspberry Pi (RPi) computers (tested on Pi 4 Model B) can now run
|
||||
UxPlay using software video decoding, but hardware-accelerated decoding
|
||||
by firmware in the Pi’s GPU is prefered. UxPlay accesses this using the
|
||||
GStreamer-1.22 Video4Linux2 (v4l2) plugin; the plugin from older
|
||||
GStreamer < 1.22 needs a backport patch (already partially applied in
|
||||
Raspberry Pi OS (Bullseye), available for 1.18.4 and later in the <a
|
||||
UxPlay using software video decoding, but hardware-accelerated h264/h265
|
||||
decoding by firmware in the Pi’s Broadcom 2835 GPU is prefered. UxPlay
|
||||
accesses this using the GStreamer-1.22 Video4Linux2 (v4l2) plugin; Uses
|
||||
the out-of-mainline Linux kernel module bcm2835-codec maintained by
|
||||
Raspberry Pi, so far only included in Raspberry Pi OS, and two other
|
||||
distributions (Ubuntu, Manjaro) available with Raspberry Pi Imager.
|
||||
<em>(For GStreamer < 1.22, see the <a
|
||||
href="https://github.com/FDH2/UxPlay/wiki/Gstreamer-Video4Linux2-plugin-patches">UxPlay
|
||||
Wiki</a>). Also requires the out-of-mainline Linux kernel module
|
||||
bcm2835-codec maintained by Raspberry Pi, so far only included in
|
||||
Raspberry Pi OS, and two other distributions (Ubuntu, Manjaro) available
|
||||
with Raspberry Pi Imager. <em>Note: The latest Raspberry Pi model 5 does
|
||||
not provide hardware-accelerated (GPU) H264 decoding as its CPU is
|
||||
powerful enough for satisfactory software decoding.</em></p></li>
|
||||
Wiki</a>)</em>.</p></li>
|
||||
<li><p><strong>(New): Support for h265 (HEVC) hardware decoding on
|
||||
Raspberry Pi (Pi 4 model B and Pi 5)</strong></p>
|
||||
<p>Support is present, but so far satisfactory results have not been
|
||||
obtained. Pi model 5 only provides hardware-accelerated (GPU) decoding
|
||||
for h265 video, but not H264, as its CPU is powerful enough for
|
||||
satisfactory software H264 decoding</p></li>
|
||||
</ul>
|
||||
<h3 id="note-to-packagers">Note to packagers:</h3>
|
||||
<p>UxPlay’s GPLv3 license does not have an added “GPL exception”
|
||||
@@ -373,9 +385,10 @@ PCLinuxOS; it can be easily modified to include dependency lists for
|
||||
other RPM-based distributions.)</p>
|
||||
<h2 id="running-uxplay">Running UxPlay</h2>
|
||||
<h3
|
||||
id="installing-plugins-debian-based-linux-systems-skip-if-you-built-a-complete-gstreamer-from-source">Installing
|
||||
plugins (Debian-based Linux systems) (<em>skip if you built a complete
|
||||
GStreamer from source</em>)</h3>
|
||||
id="installing-plugins-debian-based-linux-distributions-including-ubuntu-and-raspberry-pi-os-skip-if-you-built-a-complete-gstreamer-from-source">Installing
|
||||
plugins (Debian-based Linux distributions, including Ubuntu and
|
||||
Raspberry Pi OS) (<em>skip if you built a complete GStreamer from
|
||||
source</em>)</h3>
|
||||
<p>Next install the GStreamer plugins that are needed with
|
||||
<code>sudo apt install gstreamer1.0-<plugin></code>. Values of
|
||||
<code><plugin></code> required are:</p>
|
||||
@@ -386,11 +399,13 @@ GStreamer from source</em>)</h3>
|
||||
decoding)</li>
|
||||
<li>“<strong>plugins-bad</strong>” (for h264 decoding).</li>
|
||||
</ol>
|
||||
<p>Plugins that may also be needed include “<strong>gl</strong>” for
|
||||
OpenGL support (this provides the “-vs glimagesink” videosink, which can
|
||||
be very useful in many systems, and should always be used when using
|
||||
h264 decoding by a NVIDIA GPU), “<strong>gtk3</strong>” (which provides
|
||||
the “-vs gtksink” videosink), and “<strong>x</strong>” for X11 support,
|
||||
<p><strong>Debian-based distributions split some of the plugin packages
|
||||
into smaller pieces:</strong> some that may also be needed include
|
||||
“<strong>gl</strong>” for OpenGL support (this provides the “-vs
|
||||
glimagesink” videosink, which can be very useful in many systems
|
||||
(including Raspberry Pi), and should always be used when using h264/h265
|
||||
decoding by a NVIDIA GPU), “<strong>gtk3</strong>” (which provides the
|
||||
“-vs gtksink” videosink), and “<strong>x</strong>” for X11 support,
|
||||
although these may already be installed; “<strong>vaapi</strong>” is
|
||||
needed for hardware-accelerated h264 video decoding by Intel or AMD
|
||||
graphics (but not for use with NVIDIA using proprietary drivers). If
|
||||
@@ -621,6 +636,14 @@ GPU with the GStreamer OMX plugin (use option
|
||||
“<code>-vd omxh264dec</code>”), but this is broken by Pi 4 Model B
|
||||
firmware. OMX support was removed from Raspberry Pi OS (Bullseye), but
|
||||
is present in Buster.</p></li>
|
||||
<li><p><strong>H265 (4K)</strong> video is supported with hardware
|
||||
decoding by the Broadcom GPU on Raspberry Pi 5 models, as well as on
|
||||
Raspberry Pi 4 model B. <strong>While GStreamer seem to make use of this
|
||||
hardware decoding, satisfactory rendering speed of 4K video by UxPlay on
|
||||
these Raspberry Pi models has not yet been acheived.</strong> The option
|
||||
“-h265” is required for activating h265 support. A wired ethernet
|
||||
connection is preferred in this mode (and may be required by the
|
||||
client).</p></li>
|
||||
</ul>
|
||||
<p>Even with GPU video decoding, some frames may be dropped by the
|
||||
lower-power models to keep audio and video synchronized using
|
||||
@@ -901,6 +924,18 @@ the mirror display (X11) window.</p>
|
||||
<p><strong>-nh</strong> Do not append “<span class="citation"
|
||||
data-cites="_hostname_">@_hostname_</span>” at the end of the AirPlay
|
||||
server name.</p>
|
||||
<p><strong>-h265</strong> Activate “ScreenMultiCodec” support (AirPlay
|
||||
“Features” bit 42) for accepting h265 (4K/HEVC) video in addition to
|
||||
h264 video (1080p) in screen-mirror mode. When this option is used, two
|
||||
“video pipelines” (one for h264, one for h265) are created. If any
|
||||
GStreamer plugins in the pipeline are specific for h264 or h265, the
|
||||
correct version will be used in each pipeline. A wired Client-Server
|
||||
ethernet connection is preferred over Wifi for 4K video, and might be
|
||||
required by the client. Only recent Apple devices (M1/M2 Macs or iPads,
|
||||
and some iPhones) can send h265 video if a resolut “-s wxh” with h >
|
||||
1080 is requested. The “-h265” option changes the default resolution
|
||||
(“-s” option) from 1920x1080 to 3840x2160, and leaves default maximum
|
||||
framerate (“-fps” option) at 30fps.</p>
|
||||
<p><strong>-pin [nnnn]</strong>: (since v1.67) use Apple-style
|
||||
(one-time) “pin” authentication when a new client connects for the first
|
||||
time: a four-digit pin code is displayed on the terminal, and the client
|
||||
@@ -974,10 +1009,11 @@ each time the length of the volume slider (or the number of steps above
|
||||
mute, where 16 steps = full volume) is reduced by 50%, the perceived
|
||||
volume is halved (a 10dB attenuation). (This is modified at low volumes,
|
||||
to use the “untapered” volume if it is louder.)</p>
|
||||
<p><strong>-s wxh</strong> (e.g. -s 1920x1080 , which is the default )
|
||||
sets the display resolution (width and height, in pixels). (This may be
|
||||
<p><strong>-s wxh</strong> e.g. -s 1920x1080 (= “1080p”), the default
|
||||
width and height resolutions in pixels for h264 video. (The default
|
||||
becomes 3840x2160 (= “4K”) when the -h265 option is used.) This is just
|
||||
a request made to the AirPlay client, and perhaps will not be the final
|
||||
resolution you get.) w and h are whole numbers with four digits or less.
|
||||
resolution you get. w and h are whole numbers with four digits or less.
|
||||
Note that the <strong>height</strong> pixel size is the controlling one
|
||||
used by the client for determining the streaming format; the width is
|
||||
dynamically adjusted to the shape of the image (portrait or landscape
|
||||
@@ -1251,9 +1287,13 @@ that your network <strong>does not have a running Bonjour/zeroconf
|
||||
DNS-SD server.</strong> Before v1.60, UxPlay used to stall silently if
|
||||
DNS-SD service registration failed, but now stops with an error message
|
||||
returned by the DNSServiceRegister function: kDNSServiceErr_Unknown if
|
||||
no DNS-SD server was found: other mDNS error codes are in the range FFFE
|
||||
FF00 (-65792) to FFFE FFFF (-65537), and are listed in the dnssd.h file.
|
||||
An older version of this (the one used by avahi) is found <a
|
||||
no DNS-SD server was found: <em>(A NixOS user found that in NixOS, this
|
||||
error can also occur if avahi-daemon service IS running with publishing
|
||||
enabled, but reports “the error disappeared on NixOS by setting
|
||||
services.avahi.openFirewall to true”.)</em> Other mDNS error codes are
|
||||
in the range FFFE FF00 (-65792) to FFFE FFFF (-65537), and are listed in
|
||||
the dnssd.h file. An older version of this (the one used by avahi) is
|
||||
found <a
|
||||
href="https://github.com/lathiat/avahi/blob/master/avahi-compat-libdns_sd/dns_sd.h">here</a>.
|
||||
A few additional error codes are defined in a later version from <a
|
||||
href="https://opensource.apple.com/source/mDNSResponder/mDNSResponder-544/mDNSShared/dns_sd.h.auto.html">Apple</a>.</p>
|
||||
@@ -1532,6 +1572,9 @@ an AppleTV6,2 with sourceVersion 380.20.1 (an AppleTV 4K 1st gen,
|
||||
introduced 2017, running tvOS 12.2.1), so it does not seem to matter
|
||||
what version UxPlay claims to be.</p>
|
||||
<h1 id="changelog">Changelog</h1>
|
||||
<p>1.70 2024-10-04 Add support for 4K (h265) video (resolution 3840 x
|
||||
2160). Fix issue with GStreamer >= 1.24 when client sleeps, then
|
||||
wakes.</p>
|
||||
<p>1.69 2024-08-09 Internal improvements (e.g. in -nohold option,
|
||||
identifying GStreamer videosink selected by autovideosink, finding X11
|
||||
display) in anticipation of future HLS video support. New -nofreeze
|
||||
|
||||
73
README.md
73
README.md
@@ -1,14 +1,12 @@
|
||||
# UxPlay 1.69: AirPlay-Mirror and AirPlay-Audio server for Linux, macOS, and Unix (now also runs on Windows).
|
||||
# UxPlay 1.70: AirPlay-Mirror and AirPlay-Audio server for Linux, macOS, and Unix (now also runs on Windows).
|
||||
|
||||
### **Now developed at the GitHub site [https://github.com/FDH2/UxPlay](https://github.com/FDH2/UxPlay) (where ALL user issues should be posted, and latest versions can be found).**
|
||||
|
||||
* _**NEW in v1.69**: minor changes for users: -nofreeze option to NOT leave frozen
|
||||
video in place when a network failure occurs; internal changes/improvements
|
||||
needed for planned future HLS video streaming support._
|
||||
|
||||
* **An experimental ("beta") version of UxPlay with support for HLS streaming of YouTube Videos from the YouTube app on an iOS client is now available at** https://github.com/FDH2/UxPlay/tree/video .
|
||||
_See the [Wiki page](https://github.com/FDH2/UxPlay/wiki/experimental-version-of-UxPlay-with-support-for-HLS-video-streaming-(you-tube-movies)) for details._
|
||||
|
||||
* _**NEW in v1.70**: Support for 4k (h265) video with the new "-h265" option._ (Recent Apple devices will send HEVC (h265) video in AirPlay mirror mode
|
||||
if larger resolutions (_h_ > 1080) are requested with UxPlay's "-s wxh" option; wired ethernet connection is prefered to
|
||||
wireless in this mode, and may also be required by the client;
|
||||
the "-h265" option changes the default resolution from 1920x1080 to 3840x2160, but leaves default maximum framerate ("-fps" option) at 30fps.)
|
||||
|
||||
## Highlights:
|
||||
|
||||
* GPLv3, open source.
|
||||
@@ -60,7 +58,9 @@ for [running UxPlay](#running-uxplay) to see which of your distribution's **GStr
|
||||
a 2 second latency imposed by iOS.
|
||||
|
||||
* Add any UxPlay options you want to use as defaults to a startup file `~/.uxplayrc`
|
||||
(see "`man uxplay`" or "``uxplay -h``" for format and other possible locations).
|
||||
(see "`man uxplay`" or "``uxplay -h``" for format and other possible locations). In particular, if your system uses PipeWire audio or
|
||||
Wayland video systems, you may wish to add "as pipewiresink" or "vs waylandsink" as defaults to the file. _(Output from terminal commands "ps waux | grep pulse" or "pactl info" will contain "pipewire" if your Linux/BSD system uses it)._
|
||||
|
||||
|
||||
* On Raspberry Pi: If you use Ubuntu 22.10 or earlier, GStreamer must
|
||||
be [patched](https://github.com/FDH2/UxPlay/wiki/Gstreamer-Video4Linux2-plugin-patches) to use hardware video decoding by the Broadcom GPU
|
||||
@@ -134,7 +134,7 @@ using the icon for AirPlay video in apps such as the YouTube app
|
||||
will only send audio (in lossless ALAC format) without the accompanying
|
||||
video (there are plans to support HLS video in future releases of UxPlay)**
|
||||
|
||||
### Possibility for using hardware-accelerated h264 video-decoding, if available.
|
||||
### Possibility for using hardware-accelerated h264/h265 video-decoding, if available.
|
||||
|
||||
UxPlay uses [GStreamer](https://gstreamer.freedesktop.org) "plugins" for rendering
|
||||
audio and video. This means that video and audio are supported "out of the box",
|
||||
@@ -159,19 +159,21 @@ if not, software decoding is used.
|
||||
or earlier, the plugin is called `nvdec`, and
|
||||
must be [built by the user](https://github.com/FDH2/UxPlay/wiki/NVIDIA-nvdec-and-nvenc-plugins).
|
||||
|
||||
* **Video4Linux2 support for the Raspberry Pi Broadcom 2835 GPU (Pi 4B and older)**
|
||||
* **Video4Linux2 support for h264 hardware decoding on Raspberry Pi (Pi 4B and older)**
|
||||
|
||||
Raspberry Pi (RPi) computers (tested on Pi 4 Model B) can now run UxPlay using software video decoding,
|
||||
but hardware-accelerated decoding by firmware in the Pi's
|
||||
but hardware-accelerated h264/h265 decoding by firmware in the Pi's Broadcom 2835
|
||||
GPU is prefered. UxPlay accesses this using the GStreamer-1.22 Video4Linux2 (v4l2) plugin;
|
||||
the plugin from older GStreamer < 1.22 needs a backport patch (already partially applied in
|
||||
Raspberry Pi OS (Bullseye), available for 1.18.4 and later
|
||||
in the [UxPlay Wiki](https://github.com/FDH2/UxPlay/wiki/Gstreamer-Video4Linux2-plugin-patches)). Also
|
||||
requires the out-of-mainline Linux kernel module bcm2835-codec maintained by Raspberry Pi,
|
||||
Uses the out-of-mainline Linux kernel module bcm2835-codec maintained by Raspberry Pi,
|
||||
so far only included in Raspberry Pi OS, and two other distributions (Ubuntu, Manjaro) available
|
||||
with Raspberry Pi Imager. _Note: The latest Raspberry Pi model 5 does not provide
|
||||
hardware-accelerated (GPU) H264 decoding as its CPU is powerful enough for satisfactory software decoding._
|
||||
with Raspberry Pi Imager. _(For GStreamer < 1.22, see
|
||||
the [UxPlay Wiki](https://github.com/FDH2/UxPlay/wiki/Gstreamer-Video4Linux2-plugin-patches))_.
|
||||
|
||||
* **(New): Support for h265 (HEVC) hardware decoding on Raspberry Pi (Pi 4 model B and Pi 5)**
|
||||
|
||||
Support is present, but so far satisfactory results have not been obtained.
|
||||
Pi model 5 only provides hardware-accelerated (GPU) decoding for h265 video, but not H264,
|
||||
as its CPU is powerful enough for satisfactory software H264 decoding
|
||||
|
||||
### Note to packagers:
|
||||
|
||||
@@ -311,7 +313,7 @@ it can be easily modified to include dependency lists for other RPM-based distri
|
||||
|
||||
## Running UxPlay
|
||||
|
||||
### Installing plugins (Debian-based Linux systems) (_skip if you built a complete GStreamer from source_)
|
||||
### Installing plugins (Debian-based Linux distributions, including Ubuntu and Raspberry Pi OS) (_skip if you built a complete GStreamer from source_)
|
||||
|
||||
Next install the GStreamer plugins that are needed with `sudo apt install gstreamer1.0-<plugin>`.
|
||||
Values of `<plugin>` required are:
|
||||
@@ -321,8 +323,9 @@ Values of `<plugin>` required are:
|
||||
3. "**plugins-good**" (for v4l2 hardware h264 decoding)
|
||||
4. "**plugins-bad**" (for h264 decoding).
|
||||
|
||||
Plugins that may also be needed include "**gl**" for OpenGL support (this provides the "-vs glimagesink" videosink, which
|
||||
can be very useful in many systems, and should always be used when using h264 decoding by a NVIDIA GPU), "**gtk3**" (which
|
||||
**Debian-based distributions split some of the plugin packages into smaller pieces:**
|
||||
some that may also be needed include "**gl**" for OpenGL support (this provides the "-vs glimagesink" videosink, which
|
||||
can be very useful in many systems (including Raspberry Pi), and should always be used when using h264/h265 decoding by a NVIDIA GPU), "**gtk3**" (which
|
||||
provides the "-vs gtksink" videosink), and "**x**" for
|
||||
X11 support, although these may already be installed; "**vaapi**"
|
||||
is needed for hardware-accelerated h264 video decoding by Intel
|
||||
@@ -477,7 +480,7 @@ See [Usage](#usage) for more run-time options.
|
||||
videosink "-vs kmssink" (the DirectFB framebuffer videosink "dfbvideosink" is broken on the Pi, and segfaults).
|
||||
_In this case you should explicitly use the "-vs kmssink" option, as without it, autovideosink does not find the correct videosink._
|
||||
|
||||
* Raspberry Pi 5 does not provide hardware H264 decoding (and does not need it).
|
||||
* Raspberry Pi 5 does not provide hardware H264 decoding (and does not need it).
|
||||
|
||||
* Pi Zero 2 W, 3 Model B+ and 4 Model B should use hardware H264 decoding by the Broadcom GPU,
|
||||
but it requires an out-of-mainstream kernel module bcm2835_codec maintained in
|
||||
@@ -503,6 +506,11 @@ See [Usage](#usage) for more run-time options.
|
||||
(use option "`-vd omxh264dec`"), but this is broken by Pi 4 Model B firmware. OMX support was removed from
|
||||
Raspberry Pi OS (Bullseye), but is present in Buster.
|
||||
|
||||
* **H265 (4K)** video is supported with hardware decoding by the Broadcom GPU on Raspberry Pi 5 models, as well as
|
||||
on Raspberry Pi 4 model B. **While GStreamer seem to make use of this hardware decoding, satisfactory rendering speed of
|
||||
4K video by UxPlay on these Raspberry Pi models has not yet been acheived.** The option "-h265" is required for activating h265 support.
|
||||
A wired ethernet connection is preferred in this mode (and may be required by the client).
|
||||
|
||||
Even with GPU video decoding, some frames may be dropped by the lower-power models to keep audio and video synchronized
|
||||
using timestamps. In Legacy Raspberry Pi OS (Bullseye), raspi-config "Performance Options" allows specifying how much memory
|
||||
to allocate to the GPU, but this setting appears to be absent in Bookworm (but it can still be set to e.g. 128MB by adding a line "gpu_mem=128" in /boot/config.txt).
|
||||
@@ -725,6 +733,14 @@ with "`#`" are treated as comments, and ignored. Command line options supersede
|
||||
|
||||
**-nh** Do not append "@_hostname_" at the end of the AirPlay server name.
|
||||
|
||||
**-h265** Activate "ScreenMultiCodec" support (AirPlay "Features" bit 42) for accepting h265 (4K/HEVC) video in addition to h264
|
||||
video (1080p) in screen-mirror mode. When this option is used, two "video pipelines" (one for h264, one for h265) are created.
|
||||
If any GStreamer plugins in the pipeline are specific for h264 or h265, the correct version will be used in each pipeline.
|
||||
A wired Client-Server ethernet connection is preferred over Wifi for 4K video, and might be required by the client. Only recent Apple devices
|
||||
(M1/M2 Macs or iPads, and some iPhones) can send h265 video if a resolut "-s wxh" with h > 1080 is requested.
|
||||
The "-h265" option changes the default resolution ("-s" option) from 1920x1080 to 3840x2160, and leaves default maximum
|
||||
framerate ("-fps" option) at 30fps.
|
||||
|
||||
**-pin [nnnn]**: (since v1.67) use Apple-style (one-time) "pin" authentication when a new client connects for the first time: a four-digit pin code is
|
||||
displayed on the terminal, and the client screen shows a login prompt for this to be entered. When "-pin" is used by itself, a new random
|
||||
pin code is chosen for each authentication; if "-pin nnnn" (e.g., "-pin 3939") is used, this will set an unchanging fixed code. Authentication adds the server to the client's list of
|
||||
@@ -771,10 +787,10 @@ using UxPlay as a second monitor for a mac computer, or monitoring a webcam; wit
|
||||
volume slider (or the number of steps above mute, where 16 steps = full volume) is reduced by 50%, the perceived volume is halved (a 10dB attenuation).
|
||||
(This is modified at low volumes, to use the "untapered" volume if it is louder.)
|
||||
|
||||
**-s wxh** (e.g. -s 1920x1080 , which is the default ) sets the display resolution (width and height,
|
||||
in pixels). (This may be a
|
||||
**-s wxh** e.g. -s 1920x1080 (= "1080p"), the default width and height resolutions in pixels for h264 video. (The default becomes
|
||||
3840x2160 (= "4K") when the -h265 option is used.) This is just a
|
||||
request made to the AirPlay client, and perhaps will not
|
||||
be the final resolution you get.) w and h are whole numbers with four
|
||||
be the final resolution you get. w and h are whole numbers with four
|
||||
digits or less. Note that the **height** pixel size is the controlling
|
||||
one used by the client for determining the streaming format; the width is
|
||||
dynamically adjusted to the shape of the image (portrait or landscape
|
||||
@@ -1001,7 +1017,9 @@ Some systems may instead use the mdnsd daemon as an alternative to provide DNS
|
||||
If UxPlay stops with the "No DNS-SD Server found" message, this means that your network **does not have a running Bonjour/zeroconf DNS-SD server.**
|
||||
Before v1.60, UxPlay used to stall silently if DNS-SD service registration failed, but now stops with an error message returned by the
|
||||
DNSServiceRegister function: kDNSServiceErr_Unknown if no DNS-SD server was found:
|
||||
other mDNS error codes are in the range FFFE FF00 (-65792) to FFFE FFFF (-65537), and are listed in the
|
||||
_(A NixOS user found that in NixOS, this error can also occur if avahi-daemon service IS running with publishing enabled, but
|
||||
reports "the error disappeared on NixOS by setting services.avahi.openFirewall to true".)_
|
||||
Other mDNS error codes are in the range FFFE FF00 (-65792) to FFFE FFFF (-65537), and are listed in the
|
||||
dnssd.h file. An older version of this (the one used by avahi) is found [here](https://github.com/lathiat/avahi/blob/master/avahi-compat-libdns_sd/dns_sd.h).
|
||||
A few additional error codes are defined in a later version
|
||||
from [Apple](https://opensource.apple.com/source/mDNSResponder/mDNSResponder-544/mDNSShared/dns_sd.h.auto.html).
|
||||
@@ -1211,6 +1229,9 @@ tvOS 12.2.1), so it does not seem to matter what version UxPlay claims to be.
|
||||
|
||||
|
||||
# Changelog
|
||||
1.70 2024-10-04 Add support for 4K (h265) video (resolution 3840 x 2160). Fix issue
|
||||
with GStreamer >= 1.24 when client sleeps, then wakes.
|
||||
|
||||
1.69 2024-08-09 Internal improvements (e.g. in -nohold option, identifying GStreamer videosink
|
||||
selected by autovideosink, finding X11 display) in anticipation of future HLS video support.
|
||||
New -nofreeze option to not leave frozen video in place when a network connection is reset.
|
||||
|
||||
120
README.txt
120
README.txt
@@ -1,11 +1,15 @@
|
||||
# UxPlay 1.69: AirPlay-Mirror and AirPlay-Audio server for Linux, macOS, and Unix (now also runs on Windows).
|
||||
# UxPlay 1.70: AirPlay-Mirror and AirPlay-Audio server for Linux, macOS, and Unix (now also runs on Windows).
|
||||
|
||||
### **Now developed at the GitHub site <https://github.com/FDH2/UxPlay> (where ALL user issues should be posted, and latest versions can be found).**
|
||||
|
||||
- ***NEW in v1.69**: minor changes for users: -nofreeze option to NOT
|
||||
leave frozen video in place when a network failure occurs; internal
|
||||
changes/improvements needed for planned future HLS video streaming
|
||||
support.*
|
||||
- ***NEW in v1.70**: Support for 4k (h265) video with the new "-h265"
|
||||
option.* (Recent Apple devices will send HEVC (h265) video in
|
||||
AirPlay mirror mode if larger resolutions (*h* \> 1080) are
|
||||
requested with UxPlay's "-s wxh" option; wired ethernet connection
|
||||
is prefered to wireless in this mode, and may also be required by
|
||||
the client; the "-h265" option changes the default resolution from
|
||||
1920x1080 to 3840x2160, but leaves default maximum framerate ("-fps"
|
||||
option) at 30fps.)
|
||||
|
||||
## Highlights:
|
||||
|
||||
@@ -74,7 +78,11 @@ After installation:
|
||||
|
||||
- Add any UxPlay options you want to use as defaults to a startup file
|
||||
`~/.uxplayrc` (see "`man uxplay`" or "`uxplay -h`" for format and
|
||||
other possible locations).
|
||||
other possible locations). In particular, if your system uses
|
||||
PipeWire audio or Wayland video systems, you may wish to add "as
|
||||
pipewiresink" or "vs waylandsink" as defaults to the file. *(Output
|
||||
from terminal commands "ps waux \| grep pulse" or "pactl info" will
|
||||
contain "pipewire" if your Linux/BSD system uses it).*
|
||||
|
||||
- On Raspberry Pi: If you use Ubuntu 22.10 or earlier, GStreamer must
|
||||
be
|
||||
@@ -163,7 +171,7 @@ stops/restarts as you leave/re-enter* **Audio** *mode.*
|
||||
format) without the accompanying video (there are plans to support
|
||||
HLS video in future releases of UxPlay)**
|
||||
|
||||
### Possibility for using hardware-accelerated h264 video-decoding, if available.
|
||||
### Possibility for using hardware-accelerated h264/h265 video-decoding, if available.
|
||||
|
||||
UxPlay uses [GStreamer](https://gstreamer.freedesktop.org) "plugins" for
|
||||
rendering audio and video. This means that video and audio are supported
|
||||
@@ -191,23 +199,27 @@ used.
|
||||
be [built by the
|
||||
user](https://github.com/FDH2/UxPlay/wiki/NVIDIA-nvdec-and-nvenc-plugins).
|
||||
|
||||
- **Video4Linux2 support for the Raspberry Pi Broadcom 2835 GPU (Pi 4B
|
||||
and older)**
|
||||
- **Video4Linux2 support for h264 hardware decoding on Raspberry Pi
|
||||
(Pi 4B and older)**
|
||||
|
||||
Raspberry Pi (RPi) computers (tested on Pi 4 Model B) can now run
|
||||
UxPlay using software video decoding, but hardware-accelerated
|
||||
decoding by firmware in the Pi's GPU is prefered. UxPlay accesses
|
||||
this using the GStreamer-1.22 Video4Linux2 (v4l2) plugin; the plugin
|
||||
from older GStreamer \< 1.22 needs a backport patch (already
|
||||
partially applied in Raspberry Pi OS (Bullseye), available for
|
||||
1.18.4 and later in the [UxPlay
|
||||
Wiki](https://github.com/FDH2/UxPlay/wiki/Gstreamer-Video4Linux2-plugin-patches)).
|
||||
Also requires the out-of-mainline Linux kernel module bcm2835-codec
|
||||
maintained by Raspberry Pi, so far only included in Raspberry Pi OS,
|
||||
and two other distributions (Ubuntu, Manjaro) available with
|
||||
Raspberry Pi Imager. *Note: The latest Raspberry Pi model 5 does not
|
||||
provide hardware-accelerated (GPU) H264 decoding as its CPU is
|
||||
powerful enough for satisfactory software decoding.*
|
||||
h264/h265 decoding by firmware in the Pi's Broadcom 2835 GPU is
|
||||
prefered. UxPlay accesses this using the GStreamer-1.22 Video4Linux2
|
||||
(v4l2) plugin; Uses the out-of-mainline Linux kernel module
|
||||
bcm2835-codec maintained by Raspberry Pi, so far only included in
|
||||
Raspberry Pi OS, and two other distributions (Ubuntu, Manjaro)
|
||||
available with Raspberry Pi Imager. *(For GStreamer \< 1.22, see the
|
||||
[UxPlay
|
||||
Wiki](https://github.com/FDH2/UxPlay/wiki/Gstreamer-Video4Linux2-plugin-patches))*.
|
||||
|
||||
- **(New): Support for h265 (HEVC) hardware decoding on Raspberry Pi
|
||||
(Pi 4 model B and Pi 5)**
|
||||
|
||||
Support is present, but so far satisfactory results have not been
|
||||
obtained. Pi model 5 only provides hardware-accelerated (GPU)
|
||||
decoding for h265 video, but not H264, as its CPU is powerful enough
|
||||
for satisfactory software H264 decoding
|
||||
|
||||
### Note to packagers:
|
||||
|
||||
@@ -371,7 +383,7 @@ other RPM-based distributions.)
|
||||
|
||||
## Running UxPlay
|
||||
|
||||
### Installing plugins (Debian-based Linux systems) (*skip if you built a complete GStreamer from source*)
|
||||
### Installing plugins (Debian-based Linux distributions, including Ubuntu and Raspberry Pi OS) (*skip if you built a complete GStreamer from source*)
|
||||
|
||||
Next install the GStreamer plugins that are needed with
|
||||
`sudo apt install gstreamer1.0-<plugin>`. Values of `<plugin>` required
|
||||
@@ -382,16 +394,17 @@ are:
|
||||
3. "**plugins-good**" (for v4l2 hardware h264 decoding)
|
||||
4. "**plugins-bad**" (for h264 decoding).
|
||||
|
||||
Plugins that may also be needed include "**gl**" for OpenGL support
|
||||
(this provides the "-vs glimagesink" videosink, which can be very useful
|
||||
in many systems, and should always be used when using h264 decoding by a
|
||||
NVIDIA GPU), "**gtk3**" (which provides the "-vs gtksink" videosink),
|
||||
and "**x**" for X11 support, although these may already be installed;
|
||||
"**vaapi**" is needed for hardware-accelerated h264 video decoding by
|
||||
Intel or AMD graphics (but not for use with NVIDIA using proprietary
|
||||
drivers). If sound is not working, "**alsa**"","**pulseaudio**", or
|
||||
"**pipewire**" plugins may need to be installed, depending on how your
|
||||
audio is set up.
|
||||
**Debian-based distributions split some of the plugin packages into
|
||||
smaller pieces:** some that may also be needed include "**gl**" for
|
||||
OpenGL support (this provides the "-vs glimagesink" videosink, which can
|
||||
be very useful in many systems (including Raspberry Pi), and should
|
||||
always be used when using h264/h265 decoding by a NVIDIA GPU),
|
||||
"**gtk3**" (which provides the "-vs gtksink" videosink), and "**x**" for
|
||||
X11 support, although these may already be installed; "**vaapi**" is
|
||||
needed for hardware-accelerated h264 video decoding by Intel or AMD
|
||||
graphics (but not for use with NVIDIA using proprietary drivers). If
|
||||
sound is not working, "**alsa**"","**pulseaudio**", or "**pipewire**"
|
||||
plugins may need to be installed, depending on how your audio is set up.
|
||||
|
||||
- Also install "**gstreamer1.0-tools**" to get the utility
|
||||
gst-inspect-1.0 for examining the GStreamer installation.
|
||||
@@ -619,6 +632,15 @@ See [Usage](#usage) for more run-time options.
|
||||
this is broken by Pi 4 Model B firmware. OMX support was removed
|
||||
from Raspberry Pi OS (Bullseye), but is present in Buster.
|
||||
|
||||
- **H265 (4K)** video is supported with hardware decoding by the
|
||||
Broadcom GPU on Raspberry Pi 5 models, as well as on Raspberry Pi 4
|
||||
model B. **While GStreamer seem to make use of this hardware
|
||||
decoding, satisfactory rendering speed of 4K video by UxPlay on
|
||||
these Raspberry Pi models has not yet been acheived.** The option
|
||||
"-h265" is required for activating h265 support. A wired ethernet
|
||||
connection is preferred in this mode (and may be required by the
|
||||
client).
|
||||
|
||||
Even with GPU video decoding, some frames may be dropped by the
|
||||
lower-power models to keep audio and video synchronized using
|
||||
timestamps. In Legacy Raspberry Pi OS (Bullseye), raspi-config
|
||||
@@ -904,6 +926,19 @@ will also now be the name shown above the mirror display (X11) window.
|
||||
**-nh** Do not append "@_hostname_" at the end of the AirPlay server
|
||||
name.
|
||||
|
||||
**-h265** Activate "ScreenMultiCodec" support (AirPlay "Features" bit
|
||||
42) for accepting h265 (4K/HEVC) video in addition to h264 video (1080p)
|
||||
in screen-mirror mode. When this option is used, two "video pipelines"
|
||||
(one for h264, one for h265) are created. If any GStreamer plugins in
|
||||
the pipeline are specific for h264 or h265, the correct version will be
|
||||
used in each pipeline. A wired Client-Server ethernet connection is
|
||||
preferred over Wifi for 4K video, and might be required by the client.
|
||||
Only recent Apple devices (M1/M2 Macs or iPads, and some iPhones) can
|
||||
send h265 video if a resolut "-s wxh" with h \> 1080 is requested. The
|
||||
"-h265" option changes the default resolution ("-s" option) from
|
||||
1920x1080 to 3840x2160, and leaves default maximum framerate ("-fps"
|
||||
option) at 30fps.
|
||||
|
||||
**-pin \[nnnn\]**: (since v1.67) use Apple-style (one-time) "pin"
|
||||
authentication when a new client connects for the first time: a
|
||||
four-digit pin code is displayed on the terminal, and the client screen
|
||||
@@ -979,10 +1014,11 @@ where 16 steps = full volume) is reduced by 50%, the perceived volume is
|
||||
halved (a 10dB attenuation). (This is modified at low volumes, to use
|
||||
the "untapered" volume if it is louder.)
|
||||
|
||||
**-s wxh** (e.g. -s 1920x1080 , which is the default ) sets the display
|
||||
resolution (width and height, in pixels). (This may be a request made to
|
||||
the AirPlay client, and perhaps will not be the final resolution you
|
||||
get.) w and h are whole numbers with four digits or less. Note that the
|
||||
**-s wxh** e.g. -s 1920x1080 (= "1080p"), the default width and height
|
||||
resolutions in pixels for h264 video. (The default becomes 3840x2160 (=
|
||||
"4K") when the -h265 option is used.) This is just a request made to the
|
||||
AirPlay client, and perhaps will not be the final resolution you get. w
|
||||
and h are whole numbers with four digits or less. Note that the
|
||||
**height** pixel size is the controlling one used by the client for
|
||||
determining the streaming format; the width is dynamically adjusted to
|
||||
the shape of the image (portrait or landscape format, depending on how
|
||||
@@ -1279,9 +1315,12 @@ that your network **does not have a running Bonjour/zeroconf DNS-SD
|
||||
server.** Before v1.60, UxPlay used to stall silently if DNS-SD service
|
||||
registration failed, but now stops with an error message returned by the
|
||||
DNSServiceRegister function: kDNSServiceErr_Unknown if no DNS-SD server
|
||||
was found: other mDNS error codes are in the range FFFE FF00 (-65792) to
|
||||
FFFE FFFF (-65537), and are listed in the dnssd.h file. An older version
|
||||
of this (the one used by avahi) is found
|
||||
was found: *(A NixOS user found that in NixOS, this error can also occur
|
||||
if avahi-daemon service IS running with publishing enabled, but reports
|
||||
"the error disappeared on NixOS by setting services.avahi.openFirewall
|
||||
to true".)* Other mDNS error codes are in the range FFFE FF00 (-65792)
|
||||
to FFFE FFFF (-65537), and are listed in the dnssd.h file. An older
|
||||
version of this (the one used by avahi) is found
|
||||
[here](https://github.com/lathiat/avahi/blob/master/avahi-compat-libdns_sd/dns_sd.h).
|
||||
A few additional error codes are defined in a later version from
|
||||
[Apple](https://opensource.apple.com/source/mDNSResponder/mDNSResponder-544/mDNSShared/dns_sd.h.auto.html).
|
||||
@@ -1572,6 +1611,9 @@ what version UxPlay claims to be.
|
||||
|
||||
# Changelog
|
||||
|
||||
1.70 2024-10-04 Add support for 4K (h265) video (resolution 3840 x
|
||||
2160). Fix issue with GStreamer \>= 1.24 when client sleeps, then wakes.
|
||||
|
||||
1.69 2024-08-09 Internal improvements (e.g. in -nohold option,
|
||||
identifying GStreamer videosink selected by autovideosink, finding X11
|
||||
display) in anticipation of future HLS video support. New -nofreeze
|
||||
|
||||
@@ -7,10 +7,10 @@ string(REGEX REPLACE "\n" ";" files "${files}")
|
||||
foreach(file ${files})
|
||||
message(STATUS "Uninstalling $ENV{DESTDIR}${file}")
|
||||
if(IS_SYMLINK "$ENV{DESTDIR}${file}" OR EXISTS "$ENV{DESTDIR}${file}")
|
||||
exec_program(
|
||||
"@CMAKE_COMMAND@" ARGS "-E remove \"$ENV{DESTDIR}${file}\""
|
||||
execute_process(
|
||||
COMMAND "@CMAKE_COMMAND@" -E remove "$ENV{DESTDIR}${file}"
|
||||
OUTPUT_VARIABLE rm_out
|
||||
RETURN_VALUE rm_retval
|
||||
RESULT_VARIABLE rm_retval
|
||||
)
|
||||
if(NOT "${rm_retval}" STREQUAL 0)
|
||||
message(FATAL_ERROR "Problem when removing $ENV{DESTDIR}${file}")
|
||||
|
||||
1316
lib/llhttp/llhttp.c
1316
lib/llhttp/llhttp.c
File diff suppressed because it is too large
Load Diff
@@ -3,8 +3,8 @@
|
||||
#define INCLUDE_LLHTTP_H_
|
||||
|
||||
#define LLHTTP_VERSION_MAJOR 9
|
||||
#define LLHTTP_VERSION_MINOR 1
|
||||
#define LLHTTP_VERSION_PATCH 3
|
||||
#define LLHTTP_VERSION_MINOR 2
|
||||
#define LLHTTP_VERSION_PATCH 1
|
||||
|
||||
#ifndef INCLUDE_LLHTTP_ITSELF_H_
|
||||
#define INCLUDE_LLHTTP_ITSELF_H_
|
||||
@@ -181,7 +181,8 @@ enum llhttp_method {
|
||||
HTTP_SET_PARAMETER = 42,
|
||||
HTTP_REDIRECT = 43,
|
||||
HTTP_RECORD = 44,
|
||||
HTTP_FLUSH = 45
|
||||
HTTP_FLUSH = 45,
|
||||
HTTP_QUERY = 46
|
||||
};
|
||||
typedef enum llhttp_method llhttp_method_t;
|
||||
|
||||
@@ -362,6 +363,7 @@ typedef enum llhttp_status llhttp_status_t;
|
||||
XX(31, LINK, LINK) \
|
||||
XX(32, UNLINK, UNLINK) \
|
||||
XX(33, SOURCE, SOURCE) \
|
||||
XX(46, QUERY, QUERY) \
|
||||
|
||||
|
||||
#define RTSP_METHOD_MAP(XX) \
|
||||
@@ -428,6 +430,7 @@ typedef enum llhttp_status llhttp_status_t;
|
||||
XX(43, REDIRECT, REDIRECT) \
|
||||
XX(44, RECORD, RECORD) \
|
||||
XX(45, FLUSH, FLUSH) \
|
||||
XX(46, QUERY, QUERY) \
|
||||
|
||||
|
||||
#define HTTP_STATUS_MAP(XX) \
|
||||
@@ -547,6 +550,8 @@ extern "C" {
|
||||
|
||||
#if defined(__wasm__)
|
||||
#define LLHTTP_EXPORT __attribute__((visibility("default")))
|
||||
#elif defined(_WIN32)
|
||||
#define LLHTTP_EXPORT __declspec(dllexport)
|
||||
#else
|
||||
#define LLHTTP_EXPORT
|
||||
#endif
|
||||
|
||||
@@ -599,3 +599,8 @@ raop_stop(raop_t *raop) {
|
||||
assert(raop);
|
||||
httpd_stop(raop->httpd);
|
||||
}
|
||||
|
||||
void raop_remove_known_connections(raop_t * raop) {
|
||||
httpd_remove_known_connections(raop->httpd);
|
||||
}
|
||||
|
||||
|
||||
13
lib/raop.h
13
lib/raop.h
@@ -36,14 +36,21 @@ typedef struct raop_s raop_t;
|
||||
|
||||
typedef void (*raop_log_callback_t)(void *cls, int level, const char *msg);
|
||||
|
||||
typedef enum video_codec_e {
|
||||
VIDEO_CODEC_UNKNOWN,
|
||||
VIDEO_CODEC_H264,
|
||||
VIDEO_CODEC_H265
|
||||
} video_codec_t;
|
||||
|
||||
struct raop_callbacks_s {
|
||||
void* cls;
|
||||
|
||||
void (*audio_process)(void *cls, raop_ntp_t *ntp, audio_decode_struct *data);
|
||||
void (*video_process)(void *cls, raop_ntp_t *ntp, h264_decode_struct *data);
|
||||
void (*video_process)(void *cls, raop_ntp_t *ntp, video_decode_struct *data);
|
||||
void (*video_pause)(void *cls);
|
||||
void (*video_resume)(void *cls);
|
||||
|
||||
void (*video_codec) (void *cls, video_codec_t video_codec);
|
||||
|
||||
/* Optional but recommended callback functions */
|
||||
void (*conn_init)(void *cls);
|
||||
void (*conn_destroy)(void *cls);
|
||||
@@ -64,6 +71,7 @@ struct raop_callbacks_s {
|
||||
bool (*check_register) (void *cls, const char *pk_str);
|
||||
void (*export_dacp) (void *cls, const char *active_remote, const char *dacp_id);
|
||||
void (*video_reset) (void *cls);
|
||||
void (*video_set_codec)(void *cls, video_codec_t codec);
|
||||
};
|
||||
typedef struct raop_callbacks_s raop_callbacks_t;
|
||||
raop_ntp_t *raop_ntp_init(logger_t *logger, raop_callbacks_t *callbacks, const char *remote,
|
||||
@@ -84,6 +92,7 @@ RAOP_API int raop_is_running(raop_t *raop);
|
||||
RAOP_API void raop_stop(raop_t *raop);
|
||||
RAOP_API void raop_set_dnssd(raop_t *raop, dnssd_t *dnssd);
|
||||
RAOP_API void raop_destroy(raop_t *raop);
|
||||
RAOP_API void raop_remove_known_connections(raop_t * raop);
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
|
||||
@@ -68,22 +68,47 @@ raop_handler_info(raop_conn_t *conn,
|
||||
plist_t name_node = plist_new_string(name);
|
||||
plist_dict_set_item(res_node, "name", name_node);
|
||||
|
||||
plist_t audio_latencies_node = plist_new_array();
|
||||
plist_t audio_latencies_0_node = plist_new_dict();
|
||||
plist_t audio_latencies_0_output_latency_micros_node = plist_new_bool(0);
|
||||
plist_t audio_latencies_0_type_node = plist_new_uint(100);
|
||||
plist_t audio_latencies_0_audio_type_node = plist_new_string("default");
|
||||
plist_t audio_latencies_0_input_latency_micros_node = plist_new_uint(0);
|
||||
plist_dict_set_item(audio_latencies_0_node, "type", audio_latencies_0_type_node);
|
||||
plist_dict_set_item(audio_latencies_0_node, "inputLatencyMicros", audio_latencies_0_input_latency_micros_node);
|
||||
plist_dict_set_item(audio_latencies_0_node, "audioType", audio_latencies_0_audio_type_node);
|
||||
plist_dict_set_item(audio_latencies_0_node, "outputLatencyMicros", audio_latencies_0_output_latency_micros_node);
|
||||
plist_array_append_item(audio_latencies_node, audio_latencies_0_node);
|
||||
|
||||
plist_t audio_latencies_1_node = plist_new_dict();
|
||||
plist_t audio_latencies_1_output_latency_micros_node = plist_new_bool(0);
|
||||
plist_t audio_latencies_1_type_node = plist_new_uint(101);
|
||||
plist_t audio_latencies_1_audio_type_node = plist_new_string("default");
|
||||
plist_t audio_latencies_1_input_latency_micros_node = plist_new_uint(0);
|
||||
plist_dict_set_item(audio_latencies_1_node, "type", audio_latencies_1_type_node);
|
||||
plist_dict_set_item(audio_latencies_1_node, "audioType", audio_latencies_1_audio_type_node);
|
||||
plist_dict_set_item(audio_latencies_1_node, "inputLatencyMicros", audio_latencies_1_input_latency_micros_node);
|
||||
plist_dict_set_item(audio_latencies_1_node, "outputLatencyMicros", audio_latencies_1_output_latency_micros_node);
|
||||
plist_array_append_item(audio_latencies_node, audio_latencies_1_node);
|
||||
plist_dict_set_item(res_node, "audioLatencies", audio_latencies_node);
|
||||
|
||||
plist_t audio_formats_node = plist_new_array();
|
||||
plist_t audio_format_0_node = plist_new_dict();
|
||||
plist_t audio_format_0_type_node = plist_new_uint(100);
|
||||
plist_t audio_format_0_audio_input_formats_node = plist_new_uint(0x3fffffc);
|
||||
plist_t audio_format_0_audio_output_formats_node = plist_new_uint(0x3fffffc);
|
||||
plist_dict_set_item(audio_format_0_node, "audioOutputFormats", audio_format_0_audio_output_formats_node);
|
||||
plist_dict_set_item(audio_format_0_node, "type", audio_format_0_type_node);
|
||||
plist_dict_set_item(audio_format_0_node, "audioInputFormats", audio_format_0_audio_input_formats_node);
|
||||
plist_dict_set_item(audio_format_0_node, "audioOutputFormats", audio_format_0_audio_output_formats_node);
|
||||
plist_array_append_item(audio_formats_node, audio_format_0_node);
|
||||
|
||||
plist_t audio_format_1_node = plist_new_dict();
|
||||
plist_t audio_format_1_type_node = plist_new_uint(101);
|
||||
plist_t audio_format_1_audio_input_formats_node = plist_new_uint(0x3fffffc);
|
||||
plist_t audio_format_1_audio_output_formats_node = plist_new_uint(0x3fffffc);
|
||||
plist_dict_set_item(audio_format_1_node, "audioOutputFormats", audio_format_1_audio_output_formats_node);
|
||||
plist_dict_set_item(audio_format_1_node, "type", audio_format_1_type_node);
|
||||
plist_dict_set_item(audio_format_1_node, "audioInputFormats", audio_format_1_audio_input_formats_node);
|
||||
plist_dict_set_item(audio_format_1_node, "audioOutputFormats", audio_format_1_audio_output_formats_node);
|
||||
plist_array_append_item(audio_formats_node, audio_format_1_node);
|
||||
plist_dict_set_item(res_node, "audioFormats", audio_formats_node);
|
||||
|
||||
@@ -102,32 +127,9 @@ raop_handler_info(raop_conn_t *conn,
|
||||
plist_t source_version_node = plist_new_string(GLOBAL_VERSION);
|
||||
plist_dict_set_item(res_node, "sourceVersion", source_version_node);
|
||||
|
||||
plist_t keep_alive_send_stats_as_body_node = plist_new_uint(1);
|
||||
plist_t keep_alive_send_stats_as_body_node = plist_new_bool(1);
|
||||
plist_dict_set_item(res_node, "keepAliveSendStatsAsBody", keep_alive_send_stats_as_body_node);
|
||||
|
||||
plist_t audio_latencies_node = plist_new_array();
|
||||
plist_t audio_latencies_0_node = plist_new_dict();
|
||||
plist_t audio_latencies_0_output_latency_micros_node = plist_new_bool(0);
|
||||
plist_t audio_latencies_0_type_node = plist_new_uint(100);
|
||||
plist_t audio_latencies_0_audio_type_node = plist_new_string("default");
|
||||
plist_t audio_latencies_0_input_latency_micros_node = plist_new_bool(0);
|
||||
plist_dict_set_item(audio_latencies_0_node, "outputLatencyMicros", audio_latencies_0_output_latency_micros_node);
|
||||
plist_dict_set_item(audio_latencies_0_node, "type", audio_latencies_0_type_node);
|
||||
plist_dict_set_item(audio_latencies_0_node, "audioType", audio_latencies_0_audio_type_node);
|
||||
plist_dict_set_item(audio_latencies_0_node, "inputLatencyMicros", audio_latencies_0_input_latency_micros_node);
|
||||
plist_array_append_item(audio_latencies_node, audio_latencies_0_node);
|
||||
plist_t audio_latencies_1_node = plist_new_dict();
|
||||
plist_t audio_latencies_1_output_latency_micros_node = plist_new_bool(0);
|
||||
plist_t audio_latencies_1_type_node = plist_new_uint(101);
|
||||
plist_t audio_latencies_1_audio_type_node = plist_new_string("default");
|
||||
plist_t audio_latencies_1_input_latency_micros_node = plist_new_bool(0);
|
||||
plist_dict_set_item(audio_latencies_1_node, "outputLatencyMicros", audio_latencies_1_output_latency_micros_node);
|
||||
plist_dict_set_item(audio_latencies_1_node, "type", audio_latencies_1_type_node);
|
||||
plist_dict_set_item(audio_latencies_1_node, "audioType", audio_latencies_1_audio_type_node);
|
||||
plist_dict_set_item(audio_latencies_1_node, "inputLatencyMicros", audio_latencies_1_input_latency_micros_node);
|
||||
plist_array_append_item(audio_latencies_node, audio_latencies_1_node);
|
||||
plist_dict_set_item(res_node, "audioLatencies", audio_latencies_node);
|
||||
|
||||
plist_t model_node = plist_new_string(GLOBAL_MODEL);
|
||||
plist_dict_set_item(res_node, "model", model_node);
|
||||
|
||||
@@ -136,15 +138,15 @@ raop_handler_info(raop_conn_t *conn,
|
||||
|
||||
plist_t displays_node = plist_new_array();
|
||||
plist_t displays_0_node = plist_new_dict();
|
||||
plist_t displays_0_width_physical_node = plist_new_uint(0);
|
||||
plist_t displays_0_height_physical_node = plist_new_uint(0);
|
||||
plist_t displays_0_uuid_node = plist_new_string("e0ff8a27-6738-3d56-8a16-cc53aacee925");
|
||||
plist_t displays_0_width_physical_node = plist_new_bool(0);
|
||||
plist_t displays_0_height_physical_node = plist_new_bool(0);
|
||||
plist_t displays_0_width_node = plist_new_uint(conn->raop->width);
|
||||
plist_t displays_0_height_node = plist_new_uint(conn->raop->height);
|
||||
plist_t displays_0_width_pixels_node = plist_new_uint(conn->raop->width);
|
||||
plist_t displays_0_height_pixels_node = plist_new_uint(conn->raop->height);
|
||||
plist_t displays_0_rotation_node = plist_new_bool(0);
|
||||
plist_t displays_0_refresh_rate_node = plist_new_uint(conn->raop->refreshRate);
|
||||
plist_t displays_0_rotation_node = plist_new_bool(0); /* set to true in AppleTV gen 3 (which has features bit 8 set */
|
||||
plist_t displays_0_refresh_rate_node = plist_new_real((double) 1.0 / conn->raop->refreshRate); /* set as real 0.166666 = 60hz in AppleTV gen 3 */
|
||||
plist_t displays_0_max_fps_node = plist_new_uint(conn->raop->maxFPS);
|
||||
plist_t displays_0_overscanned_node = plist_new_bool(conn->raop->overscanned);
|
||||
plist_t displays_0_features = plist_new_uint(14);
|
||||
@@ -156,7 +158,7 @@ raop_handler_info(raop_conn_t *conn,
|
||||
plist_dict_set_item(displays_0_node, "height", displays_0_height_node);
|
||||
plist_dict_set_item(displays_0_node, "widthPixels", displays_0_width_pixels_node);
|
||||
plist_dict_set_item(displays_0_node, "heightPixels", displays_0_height_pixels_node);
|
||||
plist_dict_set_item(displays_0_node, "rotation", displays_0_rotation_node);
|
||||
plist_dict_set_item(displays_0_node, "rotation", displays_0_rotation_node);
|
||||
plist_dict_set_item(displays_0_node, "refreshRate", displays_0_refresh_rate_node);
|
||||
plist_dict_set_item(displays_0_node, "maxFPS", displays_0_max_fps_node);
|
||||
plist_dict_set_item(displays_0_node, "overscanned", displays_0_overscanned_node);
|
||||
|
||||
@@ -195,8 +195,13 @@ raop_rtp_mirror_thread(void *arg)
|
||||
uint64_t ntp_timestamp_local = 0;
|
||||
unsigned char nal_start_code[4] = { 0x00, 0x00, 0x00, 0x01 };
|
||||
bool logger_debug = (logger_get_level(raop_rtp_mirror->logger) >= LOGGER_DEBUG);
|
||||
bool h265_video_detected = false;
|
||||
|
||||
bool h265_video = false;
|
||||
video_codec_t codec;
|
||||
const char h264[] = "h264";
|
||||
const char h265[] = "h265";
|
||||
bool unsupported_codec = false;
|
||||
bool video_stream_suspended = false;
|
||||
|
||||
while (1) {
|
||||
fd_set rfds;
|
||||
struct timeval tv;
|
||||
@@ -320,14 +325,16 @@ raop_rtp_mirror_thread(void *arg)
|
||||
* 0x00 0x00: encrypted packet containing a non-IDR type 1 VCL NAL unit *
|
||||
* 0x00 0x10: encrypted packet containing an IDR type 5 VCL NAL unit *
|
||||
* 0x01 0x00: unencrypted packet containing a type 7 SPS NAL + a type 8 PPS NAL unit *
|
||||
* 0x02 0x00: unencrypted packet (old protocol) no payload, sent once every second *
|
||||
* 0x02 0x00: unencrypted packet (old protocol) no payload, sent once every second *
|
||||
* 0x05 0x00 unencrypted packet with a "streaming report", sent once per second. */
|
||||
|
||||
/* packet[6] + packet[7] may list a payload "option": values seen are: *
|
||||
* 0x00 0x00 : encrypted and "streaming report" packets *
|
||||
* 0x1e 0x00 : old protocol (seen in AirMyPC) no-payload once-per-second packets *
|
||||
* 0x16 0x01 : seen in most unencrypted SPS+PPS packets *
|
||||
* 0x56 0x01 : occasionally seen in unencrypted SPS+PPS packets (why different?) */
|
||||
* 0x16 0x01 : seen in most unencrypted h264 SPS+PPS packets *
|
||||
* 0x56 0x01 : unencrypted h264 SPS+PPS packets (video stream stops, client sleeps) *
|
||||
* 0x1e 0x01 : unencrypted h265/HEVC SPS+PPS packets
|
||||
* 0x5e 0x01 : unencrypted h265 SPS+PPS packets (video stream stops, client sleeps) */
|
||||
|
||||
/* unencrypted packets with a SPS and a PPS NAL are sent initially, and also when a *
|
||||
* change in video format (e.g. width, height) subsequently occurs. They seem always *
|
||||
@@ -377,9 +384,9 @@ raop_rtp_mirror_thread(void *arg)
|
||||
uint64_t ntp_now = raop_ntp_get_local_time(raop_rtp_mirror->ntp);
|
||||
int64_t latency = ((int64_t) ntp_now) - ((int64_t) ntp_timestamp_local);
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG,
|
||||
"raop_rtp video: now = %8.6f, ntp = %8.6f, latency = %8.6f, ts = %8.6f, %s",
|
||||
"raop_rtp video: now = %8.6f, ntp = %8.6f, latency = %8.6f, ts = %8.6f, %s %s",
|
||||
(double) ntp_now / SEC, (double) ntp_timestamp_local / SEC, (double) latency / SEC,
|
||||
(double) ntp_timestamp_remote / SEC, packet_description);
|
||||
(double) ntp_timestamp_remote / SEC, packet_description, h265_video ? h265 : h264);
|
||||
}
|
||||
|
||||
unsigned char* payload_out;
|
||||
@@ -442,178 +449,283 @@ raop_rtp_mirror_thread(void *arg)
|
||||
valid_data = false;
|
||||
break;
|
||||
}
|
||||
int nalu_type = payload_decrypted[nalu_size] & 0x1f;
|
||||
int ref_idc = (payload_decrypted[nalu_size] >> 5);
|
||||
/* check for unsupported h265 video (sometimes sent by macOS in high-def screen mirroring) */
|
||||
if (payload_decrypted[nalu_size + 1] == 0x01) {
|
||||
switch (payload_decrypted[nalu_size]) {
|
||||
case 0x28: // h265 IDR type 20 NAL
|
||||
case 0x02: // h265 non-IDR type 1 NAL
|
||||
ref_idc = 0;
|
||||
h265_video_detected = true;
|
||||
int nalu_type;
|
||||
if (h265_video) {
|
||||
nalu_type = payload_decrypted[nalu_size] & 0x7e >> 1;;
|
||||
//logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG," h265 video, NALU type %d, size %d", nalu_type, nc_len);
|
||||
} else {
|
||||
nalu_type = payload_decrypted[nalu_size] & 0x1f;
|
||||
int ref_idc = (payload_decrypted[nalu_size] >> 5);
|
||||
switch (nalu_type) {
|
||||
case 14: /* Prefix NALu , seen before all VCL Nalu's in AirMyPc */
|
||||
case 5: /*IDR, slice_layer_without_partitioning */
|
||||
case 1: /*non-IDR, slice_layer_without_partitioning */
|
||||
break;
|
||||
case 2: /* slice data partition A */
|
||||
case 3: /* slice data partition B */
|
||||
case 4: /* slice data partition C */
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_INFO,
|
||||
"unexpected partitioned VCL NAL unit: nalu_type = %d, ref_idc = %d, nalu_size = %d,"
|
||||
"processed bytes %d, payloadsize = %d nalus_count = %d",
|
||||
nalu_type, ref_idc, nc_len, nalu_size, payload_size, nalus_count);
|
||||
break;
|
||||
case 6:
|
||||
if (logger_debug) {
|
||||
char *str = utils_data_to_string(payload_decrypted + nalu_size, nc_len, 16);
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror SEI NAL size = %d", nc_len);
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG,
|
||||
"raop_rtp_mirror h264 Supplemental Enhancement Information:\n%s", str);
|
||||
free(str);
|
||||
}
|
||||
break;
|
||||
case 7:
|
||||
if (logger_debug) {
|
||||
char *str = utils_data_to_string(payload_decrypted + nalu_size, nc_len, 16);
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror SPS NAL size = %d", nc_len);
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG,
|
||||
"raop_rtp_mirror h264 Sequence Parameter Set:\n%s", str);
|
||||
free(str);
|
||||
}
|
||||
break;
|
||||
case 8:
|
||||
if (logger_debug) {
|
||||
char *str = utils_data_to_string(payload_decrypted + nalu_size, nc_len, 16);
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror PPS NAL size = %d", nc_len);
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG,
|
||||
"raop_rtp_mirror h264 Picture Parameter Set :\n%s", str);
|
||||
free(str);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
if (h265_video_detected) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
switch (nalu_type) {
|
||||
case 14: /* Prefix NALu , seen before all VCL Nalu's in AirMyPc */
|
||||
case 5: /*IDR, slice_layer_without_partitioning */
|
||||
case 1: /*non-IDR, slice_layer_without_partitioning */
|
||||
break;
|
||||
case 2: /* slice data partition A */
|
||||
case 3: /* slice data partition B */
|
||||
case 4: /* slice data partition C */
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_INFO,
|
||||
"unexpected partitioned VCL NAL unit: nalu_type = %d, ref_idc = %d, nalu_size = %d,"
|
||||
"processed bytes %d, payloadsize = %d nalus_count = %d",
|
||||
nalu_type, ref_idc, nc_len, nalu_size, payload_size, nalus_count);
|
||||
break;
|
||||
case 6:
|
||||
if (logger_debug) {
|
||||
char *str = utils_data_to_string(payload_decrypted + nalu_size, nc_len, 16);
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror SEI NAL size = %d", nc_len);
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG,
|
||||
"raop_rtp_mirror h264 Supplemental Enhancement Information:\n%s", str);
|
||||
free(str);
|
||||
}
|
||||
break;
|
||||
case 7:
|
||||
if (logger_debug) {
|
||||
char *str = utils_data_to_string(payload_decrypted + nalu_size, nc_len, 16);
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror SPS NAL size = %d", nc_len);
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG,
|
||||
"raop_rtp_mirror h264 Sequence Parameter Set:\n%s", str);
|
||||
free(str);
|
||||
}
|
||||
break;
|
||||
case 8:
|
||||
if (logger_debug) {
|
||||
char *str = utils_data_to_string(payload_decrypted + nalu_size, nc_len, 16);
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror PPS NAL size = %d", nc_len);
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG,
|
||||
"raop_rtp_mirror h264 Picture Parameter Set :\n%s", str);
|
||||
free(str);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_INFO,
|
||||
"unexpected non-VCL NAL unit: nalu_type = %d, ref_idc = %d, nalu_size = %d,"
|
||||
"processed bytes %d, payloadsize = %d nalus_count = %d",
|
||||
nalu_type, ref_idc, nc_len, nalu_size, payload_size, nalus_count);
|
||||
break;
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_INFO,
|
||||
"unexpected non-VCL NAL unit: nalu_type = %d, ref_idc = %d, nalu_size = %d,"
|
||||
"processed bytes %d, payloadsize = %d nalus_count = %d",
|
||||
nalu_type, ref_idc, nc_len, nalu_size, payload_size, nalus_count);
|
||||
break;
|
||||
}
|
||||
}
|
||||
nalu_size += nc_len;
|
||||
}
|
||||
if (h265_video_detected) {
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_ERR,
|
||||
"unsupported h265 video detected");
|
||||
free (payload_out);
|
||||
break;
|
||||
}
|
||||
if (nalu_size != payload_size) valid_data = false;
|
||||
if(!valid_data) {
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "nalu marked as invalid");
|
||||
payload_out[0] = 1; /* mark video data as invalid h264 (failed decryption) */
|
||||
}
|
||||
|
||||
|
||||
payload_decrypted = NULL;
|
||||
h264_decode_struct h264_data;
|
||||
h264_data.ntp_time_local = ntp_timestamp_local;
|
||||
h264_data.ntp_time_remote = ntp_timestamp_remote;
|
||||
h264_data.nal_count = nalus_count; /*nal_count will be the number of nal units in the packet */
|
||||
h264_data.data_len = payload_size;
|
||||
h264_data.data = payload_out;
|
||||
video_decode_struct video_data;
|
||||
video_data.is_h265 = h265_video;
|
||||
video_data.ntp_time_local = ntp_timestamp_local;
|
||||
video_data.ntp_time_remote = ntp_timestamp_remote;
|
||||
video_data.nal_count = nalus_count; /*nal_count will be the number of nal units in the packet */
|
||||
video_data.data_len = payload_size;
|
||||
video_data.data = payload_out;
|
||||
if (prepend_sps_pps) {
|
||||
h264_data.data_len += sps_pps_len;
|
||||
h264_data.nal_count += 2;
|
||||
prepend_sps_pps = false;
|
||||
video_data.data_len += sps_pps_len;
|
||||
video_data.nal_count += 2;
|
||||
if (h265_video) {
|
||||
video_data.nal_count++;
|
||||
}
|
||||
prepend_sps_pps = false;
|
||||
}
|
||||
raop_rtp_mirror->callbacks.video_resume(raop_rtp_mirror->callbacks.cls);
|
||||
raop_rtp_mirror->callbacks.video_process(raop_rtp_mirror->callbacks.cls, raop_rtp_mirror->ntp, &h264_data);
|
||||
|
||||
raop_rtp_mirror->callbacks.video_process(raop_rtp_mirror->callbacks.cls, raop_rtp_mirror->ntp, &video_data);
|
||||
free(payload_out);
|
||||
break;
|
||||
//char *str3 = utils_data_to_string(payload_out, video_data.data_len, 16);
|
||||
//printf("%s\n", str3);
|
||||
//free (str3);
|
||||
case 0x01:
|
||||
/* 128-byte observed packet header structure
|
||||
bytes 0-15: length + timestamp
|
||||
bytes 16-19 float width_source (value is x.0000, x = unsigned short)
|
||||
bytes 20-23 float height_source (value is x.0000, x = unsigned short)
|
||||
bytes 24-39 all 0x0
|
||||
bytes 40-43 float width_source (value is x.0000, x = unsigned short)
|
||||
bytes 44-47 float height_source (value is x.0000, x = unsigned short)
|
||||
bytes 48-51 ??? float "other_w" (value seems to be x.0000, x = unsigned short)
|
||||
bytes 48-51 ??? float "other_h" (value seems to be x.0000, x = unsigned short)
|
||||
bytes 56-59 width
|
||||
bytes 60-63 height
|
||||
bytes 64-127 all 0x0
|
||||
*/
|
||||
|
||||
// The information in the payload contains an SPS and a PPS NAL
|
||||
// The sps_pps is not encrypted
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "\nReceived unencrypted codec packet from client:"
|
||||
" payload_size %d header %s ts_client = %8.6f",
|
||||
payload_size, packet_description, (double) ntp_timestamp_remote / SEC);
|
||||
if (payload_size == 0) {
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror, discard type 0x01 packet with no payload");
|
||||
break;
|
||||
|
||||
if (!video_stream_suspended && (packet[6] == 0x56 || packet[6] == 0x5e)) {
|
||||
video_stream_suspended = true;
|
||||
raop_rtp_mirror->callbacks.video_pause(raop_rtp_mirror->callbacks.cls);
|
||||
} else if (video_stream_suspended && (packet[6] == 0x16 || packet[6] == 0x1e)) {
|
||||
raop_rtp_mirror->callbacks.video_resume(raop_rtp_mirror->callbacks.cls);
|
||||
video_stream_suspended = false;
|
||||
}
|
||||
|
||||
codec = VIDEO_CODEC_UNKNOWN;
|
||||
assert (raop_rtp_mirror->callbacks.video_set_codec);
|
||||
ntp_timestamp_nal = ntp_timestamp_raw;
|
||||
float width = byteutils_get_float(packet, 16);
|
||||
float height = byteutils_get_float(packet, 20);
|
||||
float width_source = byteutils_get_float(packet, 40);
|
||||
float height_source = byteutils_get_float(packet, 44);
|
||||
if (width != width_source || height != height_source) {
|
||||
|
||||
/* these "floats" are in fact integers that fit into unsigned shorts */
|
||||
float width_0 = byteutils_get_float(packet, 16);
|
||||
float height_0 = byteutils_get_float(packet, 20);
|
||||
float width_source = byteutils_get_float(packet, 40); // duplication of width_0
|
||||
float height_source = byteutils_get_float(packet, 44); // duplication of height_0
|
||||
float unknown_w = byteutils_get_float(packet, 48);
|
||||
float unknown_h = byteutils_get_float(packet, 52);
|
||||
float width = byteutils_get_float(packet, 56);
|
||||
float height = byteutils_get_float(packet, 60);
|
||||
|
||||
if (width != width_0 || height != height_0) {
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror: Unexpected : data %f,"
|
||||
" %f != width_source = %f, height_source = %f", width, height, width_source, height_source);
|
||||
" %f != width_source = %f, height_source = %f", width_0, height_0, width_source, height_source);
|
||||
}
|
||||
width = byteutils_get_float(packet, 48);
|
||||
height = byteutils_get_float(packet, 52);
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror: unidentified extra header data %f, %f", width, height);
|
||||
width = byteutils_get_float(packet, 56);
|
||||
height = byteutils_get_float(packet, 60);
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror: unidentified extra header data %f, %f", unknown_w, unknown_h);
|
||||
if (raop_rtp_mirror->callbacks.video_report_size) {
|
||||
raop_rtp_mirror->callbacks.video_report_size(raop_rtp_mirror->callbacks.cls, &width_source, &height_source, &width, &height);
|
||||
}
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror width_source = %f height_source = %f width = %f height = %f",
|
||||
width_source, height_source, width, height);
|
||||
|
||||
short sps_size = byteutils_get_short_be(payload,6);
|
||||
unsigned char *sequence_parameter_set = payload + 8;
|
||||
short pps_size = byteutils_get_short_be(payload, sps_size + 9);
|
||||
unsigned char *picture_parameter_set = payload + sps_size + 11;
|
||||
int data_size = 6;
|
||||
if (logger_debug) {
|
||||
char *str = utils_data_to_string(payload, data_size, 16);
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror: SPS+PPS header size = %d", data_size);
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror h264 SPS+PPS header:\n%s", str);
|
||||
free(str);
|
||||
str = utils_data_to_string(sequence_parameter_set, sps_size,16);
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror SPS NAL size = %d", sps_size);
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror h264 Sequence Parameter Set:\n%s", str);
|
||||
free(str);
|
||||
str = utils_data_to_string(picture_parameter_set, pps_size, 16);
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror PPS NAL size = %d", pps_size);
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror h264 Picture Parameter Set:\n%s", str);
|
||||
free(str);
|
||||
if (payload_size == 0) {
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_ERR, "raop_rtp_mirror: received type 0x01 packet with no payload:\n"
|
||||
"this indicates non-h264 video but Airplay features bit 42 (SupportsScreenMultiCodec) is not set\n"
|
||||
"use startup option \"-h265\" to set this bit and support h265 (4K) video");
|
||||
unsupported_codec = true;
|
||||
break;
|
||||
}
|
||||
data_size = payload_size - sps_size - pps_size - 11;
|
||||
if (data_size > 0 && logger_debug) {
|
||||
char *str = utils_data_to_string (picture_parameter_set + pps_size, data_size, 16);
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "remainder size = %d", data_size);
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "remainder of SPS+PPS packet:\n%s", str);
|
||||
free(str);
|
||||
} else if (data_size < 0) {
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_ERR, " pps_sps error: packet remainder size = %d < 0", data_size);
|
||||
}
|
||||
|
||||
// Copy the sps and pps into a buffer to prepend to the next NAL unit.
|
||||
if (sps_pps) {
|
||||
free(sps_pps);
|
||||
sps_pps = NULL;
|
||||
}
|
||||
sps_pps_len = sps_size + pps_size + 8;
|
||||
sps_pps = (unsigned char*) malloc(sps_pps_len);
|
||||
assert(sps_pps);
|
||||
memcpy(sps_pps, nal_start_code, 4);
|
||||
memcpy(sps_pps + 4, sequence_parameter_set, sps_size);
|
||||
memcpy(sps_pps + sps_size + 4, nal_start_code, 4);
|
||||
memcpy(sps_pps + sps_size + 8, payload + sps_size + 11, pps_size);
|
||||
prepend_sps_pps = true;
|
||||
/* test for a H265 VPS/SPs/PPS */
|
||||
unsigned char hvc1[] = { 0x68, 0x76, 0x63, 0x31 };
|
||||
|
||||
uint64_t ntp_offset = 0;
|
||||
ntp_offset = raop_ntp_convert_remote_time(raop_rtp_mirror->ntp, ntp_offset);
|
||||
if (!ntp_offset) {
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_WARNING, "ntp synchronization has not yet started: synchronized video may fail");
|
||||
if (!memcmp(payload + 4, hvc1, 4)) {
|
||||
/* hvc1 HECV detected */
|
||||
codec = VIDEO_CODEC_H265;
|
||||
printf("h265 detected\n");
|
||||
h265_video = true;
|
||||
raop_rtp_mirror->callbacks.video_set_codec(raop_rtp_mirror->callbacks.cls, codec);
|
||||
unsigned char vps_start_code[] = { 0xa0, 0x00, 0x01, 0x00 };
|
||||
unsigned char sps_start_code[] = { 0xa1, 0x00, 0x01, 0x00 };
|
||||
unsigned char pps_start_code[] = { 0xa2, 0x00, 0x01, 0x00 };
|
||||
unsigned char *vps;
|
||||
short vps_size;
|
||||
unsigned char *sps;
|
||||
short sps_size;
|
||||
unsigned char *pps;
|
||||
short pps_size;
|
||||
|
||||
unsigned char * ptr = payload + 0x75;
|
||||
|
||||
if (memcmp(ptr, vps_start_code, 4)) {
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_ERR, "non-conforming HEVC VPS/SPS/PPS payload (VPS)");
|
||||
raop_rtp_mirror->callbacks.video_pause(raop_rtp_mirror->callbacks.cls);
|
||||
break;
|
||||
}
|
||||
vps_size = byteutils_get_short_be(ptr, 3);
|
||||
ptr += 5;
|
||||
vps = ptr;
|
||||
if (logger_debug) {
|
||||
char *str = utils_data_to_string(vps, vps_size, 16);
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "h265 vps size %d\n%s",vps_size, str);
|
||||
free(str);
|
||||
}
|
||||
ptr += vps_size;
|
||||
if (memcmp(ptr, sps_start_code, 4)) {
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_ERR, "non-conforming HEVC VPS/SPS/PPS payload (SPS)");
|
||||
raop_rtp_mirror->callbacks.video_pause(raop_rtp_mirror->callbacks.cls);
|
||||
break;
|
||||
}
|
||||
sps_size = byteutils_get_short_be(ptr, 3);
|
||||
ptr += 5;
|
||||
sps = ptr;
|
||||
if (logger_debug) {
|
||||
char *str = utils_data_to_string(sps, sps_size, 16);
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "h265 sps size %d\n%s",vps_size, str);
|
||||
free(str);
|
||||
}
|
||||
ptr += sps_size;
|
||||
if (memcmp(ptr, pps_start_code, 4)) {
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_ERR, "non-conforming HEVC VPS/SPS/PPS payload (PPS)");
|
||||
raop_rtp_mirror->callbacks.video_pause(raop_rtp_mirror->callbacks.cls);
|
||||
break;
|
||||
}
|
||||
pps_size = byteutils_get_short_be(ptr, 3);
|
||||
ptr += 5;
|
||||
pps = ptr;
|
||||
if (logger_debug) {
|
||||
char *str = utils_data_to_string(pps, pps_size, 16);
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "h265 pps size %d\n%s",pps_size, str);
|
||||
free(str);
|
||||
}
|
||||
|
||||
sps_pps_len = vps_size + sps_size + pps_size + 12;
|
||||
sps_pps = (unsigned char*) malloc(sps_pps_len);
|
||||
assert(sps_pps);
|
||||
ptr = sps_pps;
|
||||
memcpy(ptr, nal_start_code, 4);
|
||||
ptr += 4;
|
||||
memcpy(ptr, vps, vps_size);
|
||||
ptr += vps_size;
|
||||
memcpy(ptr, nal_start_code, 4);
|
||||
ptr += 4;
|
||||
memcpy(ptr, sps, sps_size);
|
||||
ptr += sps_size;
|
||||
memcpy(ptr, nal_start_code, 4);
|
||||
ptr += 4;
|
||||
memcpy(ptr, pps, pps_size);
|
||||
// printf (" HEVC (hvc1) vps + sps + pps NALU\n");
|
||||
//char *str = utils_data_to_string(sps_pps, sps_pps_len, 16);
|
||||
//printf("%s\n", str);
|
||||
//free (str);
|
||||
} else {
|
||||
codec = VIDEO_CODEC_H264;
|
||||
h265_video = false;
|
||||
raop_rtp_mirror->callbacks.video_set_codec(raop_rtp_mirror->callbacks.cls, codec);
|
||||
short sps_size = byteutils_get_short_be(payload,6);
|
||||
unsigned char *sequence_parameter_set = payload + 8;
|
||||
short pps_size = byteutils_get_short_be(payload, sps_size + 9);
|
||||
unsigned char *picture_parameter_set = payload + sps_size + 11;
|
||||
int data_size = 6;
|
||||
if (logger_debug) {
|
||||
char *str = utils_data_to_string(payload, data_size, 16);
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror: SPS+PPS header size = %d", data_size);
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror h264 SPS+PPS header:\n%s", str);
|
||||
free(str);
|
||||
str = utils_data_to_string(sequence_parameter_set, sps_size,16);
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror SPS NAL size = %d", sps_size);
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror h264 Sequence Parameter Set:\n%s", str);
|
||||
free(str);
|
||||
str = utils_data_to_string(picture_parameter_set, pps_size, 16);
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror PPS NAL size = %d", pps_size);
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror h264 Picture Parameter Set:\n%s", str);
|
||||
free(str);
|
||||
}
|
||||
data_size = payload_size - sps_size - pps_size - 11;
|
||||
if (data_size > 0 && logger_debug) {
|
||||
char *str = utils_data_to_string (picture_parameter_set + pps_size, data_size, 16);
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "remainder size = %d", data_size);
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "remainder of SPS+PPS packet:\n%s", str);
|
||||
free(str);
|
||||
} else if (data_size < 0) {
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_ERR, " pps_sps error: packet remainder size = %d < 0", data_size);
|
||||
}
|
||||
|
||||
// Copy the sps and pps into a buffer to prepend to the next NAL unit.
|
||||
sps_pps_len = sps_size + pps_size + 8;
|
||||
sps_pps = (unsigned char*) malloc(sps_pps_len);
|
||||
assert(sps_pps);
|
||||
memcpy(sps_pps, nal_start_code, 4);
|
||||
memcpy(sps_pps + 4, sequence_parameter_set, sps_size);
|
||||
memcpy(sps_pps + sps_size + 4, nal_start_code, 4);
|
||||
memcpy(sps_pps + sps_size + 8, payload + sps_size + 11, pps_size);
|
||||
}
|
||||
prepend_sps_pps = true;
|
||||
// h264codec_t h264;
|
||||
// h264.version = payload[0];
|
||||
// h264.profile_high = payload[1];
|
||||
@@ -628,7 +740,6 @@ raop_rtp_mirror_thread(void *arg)
|
||||
// h264.pps_size = pps_size;
|
||||
// h264.picture_parameter_set = malloc(h264.pps_size);
|
||||
// memcpy(h264.picture_parameter_set, picture_parameter_set, pps_size);
|
||||
raop_rtp_mirror->callbacks.video_pause(raop_rtp_mirror->callbacks.cls);
|
||||
break;
|
||||
case 0x02:
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "\nReceived old-protocol once-per-second packet from client:"
|
||||
@@ -677,9 +788,11 @@ raop_rtp_mirror_thread(void *arg)
|
||||
payload = NULL;
|
||||
memset(packet, 0, 128);
|
||||
readstart = 0;
|
||||
if (unsupported_codec) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* Close the stream file descriptor */
|
||||
if (stream_fd != -1) {
|
||||
closesocket(stream_fd);
|
||||
@@ -695,6 +808,13 @@ raop_rtp_mirror_thread(void *arg)
|
||||
const bool video_reset = false; /* leave "frozen video" showing */
|
||||
raop_rtp_mirror->callbacks.conn_reset(raop_rtp_mirror->callbacks.cls, 0, video_reset);
|
||||
}
|
||||
|
||||
if (unsupported_codec) {
|
||||
closesocket(raop_rtp_mirror->mirror_data_sock);
|
||||
raop_rtp_mirror_stop(raop_rtp_mirror);
|
||||
raop_rtp_mirror->callbacks.video_reset(raop_rtp_mirror->callbacks.cls);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
@@ -22,12 +22,13 @@
|
||||
#include <stdbool.h>
|
||||
|
||||
typedef struct {
|
||||
bool is_h265;
|
||||
int nal_count;
|
||||
unsigned char *data;
|
||||
int data_len;
|
||||
uint64_t ntp_time_local;
|
||||
uint64_t ntp_time_remote;
|
||||
} h264_decode_struct;
|
||||
} video_decode_struct;
|
||||
|
||||
typedef struct {
|
||||
unsigned char *data;
|
||||
|
||||
@@ -38,17 +38,10 @@ pkg_check_modules(GST REQUIRED gstreamer-1.0>=1.4
|
||||
gstreamer-app-1.0>=1.4
|
||||
)
|
||||
|
||||
# temporary hack to deal with an issue in gstreamer 1.24
|
||||
pkg_check_modules ( GST124 gstreamer-1.0>=1.24 )
|
||||
if ( GST124_FOUND )
|
||||
message( STATUS "*** GStreamer >= 1.24: GST_124 will be defined" )
|
||||
set( GST_124 "1" CACHE STRING "define GST_124" )
|
||||
endif()
|
||||
|
||||
add_library( renderers
|
||||
STATIC
|
||||
audio_renderer_gstreamer.c
|
||||
video_renderer_gstreamer.c )
|
||||
audio_renderer.c
|
||||
video_renderer.c )
|
||||
|
||||
target_link_libraries ( renderers PUBLIC airplay )
|
||||
|
||||
@@ -63,6 +56,15 @@ if( GST_INCLUDE_DIRS MATCHES "/Library/FrameWorks/GStreamer.framework/include" )
|
||||
set( GST_MACOS "1" CACHE STRING "define GST_MACOS in uxplay.cpp" )
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# set GST_MACOS for all Apple when GStreamer >= 1.24
|
||||
if ( APPLE AND NOT GST_MACOS )
|
||||
pkg_check_modules ( GST124 gstreamer-1.0>=1.24 )
|
||||
if ( GST124_FOUND )
|
||||
set( GST_MACOS "1" CACHE STRING "define GST_MACOS in uxplay.cpp" )
|
||||
endif()
|
||||
endif()
|
||||
|
||||
target_include_directories ( renderers PUBLIC ${GST_INCLUDE_DIRS} )
|
||||
|
||||
if( GST_LIBRARY_DIRS MATCHES "/Library/FrameWorks/GStreamer.framework/lib" )
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
* Copyright (C) 2019 Florian Draschbacher
|
||||
* Modified for:
|
||||
* UxPlay - An open-source AirPlay mirroring server
|
||||
* Copyright (C) 2021-23 F. Duncanh
|
||||
* Copyright (C) 2021-24 F. Duncanh
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
@@ -20,9 +20,10 @@
|
||||
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
*/
|
||||
|
||||
#include "video_renderer.h"
|
||||
|
||||
#include <gst/gst.h>
|
||||
#include <gst/app/gstappsrc.h>
|
||||
#include "video_renderer.h"
|
||||
|
||||
#define SECOND_IN_NSECS 1000000000UL
|
||||
#ifdef X_DISPLAY_FIX
|
||||
@@ -33,27 +34,37 @@ static bool alt_keypress = false;
|
||||
static unsigned char X11_search_attempts;
|
||||
#endif
|
||||
|
||||
static video_renderer_t *renderer = NULL;
|
||||
static GstClockTime gst_video_pipeline_base_time = GST_CLOCK_TIME_NONE;
|
||||
static logger_t *logger = NULL;
|
||||
static unsigned short width, height, width_source, height_source; /* not currently used */
|
||||
static bool first_packet = false;
|
||||
static bool sync = false;
|
||||
static bool auto_videosink;
|
||||
#ifdef X_DISPLAY_FIX
|
||||
static bool use_x11 = false;
|
||||
#endif
|
||||
static bool auto_videosink = true;
|
||||
static bool logger_debug = false;
|
||||
static bool video_terminate = false;
|
||||
static user_data_t user_data;
|
||||
|
||||
#define NCODECS 2 /* renderers for h264 and h265 */
|
||||
|
||||
struct video_renderer_s {
|
||||
GstElement *appsrc, *pipeline;
|
||||
GstBus *bus;
|
||||
const char *codec;
|
||||
bool autovideo, state_pending;
|
||||
int id;
|
||||
#ifdef X_DISPLAY_FIX
|
||||
bool use_x11;
|
||||
const char * server_name;
|
||||
X11_Window_t * gst_window;
|
||||
#endif
|
||||
};
|
||||
|
||||
static video_renderer_t *renderer = NULL;
|
||||
static video_renderer_t *renderer_type[NCODECS] = {0};
|
||||
static int n_renderers = NCODECS;
|
||||
static char h264[] = "h264";
|
||||
static char h265[] = "h265";
|
||||
|
||||
static void append_videoflip (GString *launch, const videoflip_t *flip, const videoflip_t *rot) {
|
||||
/* videoflip image transform */
|
||||
switch (*flip) {
|
||||
@@ -122,6 +133,7 @@ static void append_videoflip (GString *launch, const videoflip_t *flip, const vi
|
||||
* range = 2 -> GST_VIDEO_COLOR_RANGE_16_235 ("limited RGB") */
|
||||
|
||||
static const char h264_caps[]="video/x-h264,stream-format=(string)byte-stream,alignment=(string)au";
|
||||
static const char h265_caps[]="video/x-h265,stream-format=(string)byte-stream,alignment=(string)au";
|
||||
|
||||
void video_renderer_size(float *f_width_source, float *f_height_source, float *f_width, float *f_height) {
|
||||
width_source = (unsigned short) *f_width_source;
|
||||
@@ -131,90 +143,131 @@ void video_renderer_size(float *f_width_source, float *f_height_source, float *f
|
||||
logger_log(logger, LOGGER_DEBUG, "begin video stream wxh = %dx%d; source %dx%d", width, height, width_source, height_source);
|
||||
}
|
||||
|
||||
void video_renderer_init(logger_t *render_logger, const char *server_name, videoflip_t videoflip[2], const char *parser,
|
||||
const char *decoder, const char *converter, const char *videosink, const bool *initial_fullscreen,
|
||||
const bool *video_sync) {
|
||||
void video_renderer_init(logger_t *render_logger, const char *server_name, videoflip_t videoflip[2], const char *parser,
|
||||
const char *decoder, const char *converter, const char *videosink, const char *videosink_options,
|
||||
bool initial_fullscreen, bool video_sync, bool h265_support) {
|
||||
GError *error = NULL;
|
||||
GstCaps *caps = NULL;
|
||||
GstClock *clock = gst_system_clock_obtain();
|
||||
g_object_set(clock, "clock-type", GST_CLOCK_TYPE_REALTIME, NULL);
|
||||
|
||||
|
||||
/* videosink choices that are auto */
|
||||
auto_videosink = (strstr(videosink, "autovideosink") || strstr(videosink, "fpsdisplaysink"));
|
||||
|
||||
logger = render_logger;
|
||||
logger_debug = (logger_get_level(logger) >= LOGGER_DEBUG);
|
||||
video_terminate = false;
|
||||
|
||||
/* this call to g_set_application_name makes server_name appear in the X11 display window title bar, */
|
||||
/* (instead of the program name uxplay taken from (argv[0]). It is only set one time. */
|
||||
|
||||
const gchar *appname = g_get_application_name();
|
||||
if (!appname || strcmp(appname,server_name)) g_set_application_name(server_name);
|
||||
appname = NULL;
|
||||
|
||||
renderer = calloc(1, sizeof(video_renderer_t));
|
||||
g_assert(renderer);
|
||||
|
||||
GString *launch = g_string_new("appsrc name=video_source ! ");
|
||||
g_string_append(launch, "queue ! ");
|
||||
g_string_append(launch, parser);
|
||||
g_string_append(launch, " ! ");
|
||||
g_string_append(launch, decoder);
|
||||
g_string_append(launch, " ! ");
|
||||
append_videoflip(launch, &videoflip[0], &videoflip[1]);
|
||||
g_string_append(launch, converter);
|
||||
g_string_append(launch, " ! ");
|
||||
g_string_append(launch, "videoscale ! ");
|
||||
g_string_append(launch, videosink);
|
||||
if (*video_sync) {
|
||||
g_string_append(launch, " sync=true");
|
||||
sync = true;
|
||||
} else {
|
||||
g_string_append(launch, " sync=false");
|
||||
sync = false;
|
||||
}
|
||||
logger_log(logger, LOGGER_DEBUG, "GStreamer video pipeline will be:\n\"%s\"", launch->str);
|
||||
renderer->pipeline = gst_parse_launch(launch->str, &error);
|
||||
if (error) {
|
||||
g_error ("get_parse_launch error (video) :\n %s\n",error->message);
|
||||
g_clear_error (&error);
|
||||
}
|
||||
g_assert (renderer->pipeline);
|
||||
gst_pipeline_use_clock(GST_PIPELINE_CAST(renderer->pipeline), clock);
|
||||
|
||||
renderer->appsrc = gst_bin_get_by_name (GST_BIN (renderer->pipeline), "video_source");
|
||||
g_assert(renderer->appsrc);
|
||||
caps = gst_caps_from_string(h264_caps);
|
||||
g_object_set(renderer->appsrc, "caps", caps, "stream-type", 0, "is-live", TRUE, "format", GST_FORMAT_TIME, NULL);
|
||||
g_string_free(launch, TRUE);
|
||||
gst_caps_unref(caps);
|
||||
gst_object_unref(clock);
|
||||
|
||||
#ifdef X_DISPLAY_FIX
|
||||
use_x11 = (strstr(videosink, "xvimagesink") || strstr(videosink, "ximagesink") || auto_videosink);
|
||||
fullscreen = *initial_fullscreen;
|
||||
renderer->server_name = server_name;
|
||||
renderer->gst_window = NULL;
|
||||
X11_search_attempts = 0;
|
||||
if (use_x11) {
|
||||
renderer->gst_window = calloc(1, sizeof(X11_Window_t));
|
||||
g_assert(renderer->gst_window);
|
||||
get_X11_Display(renderer->gst_window);
|
||||
if (!renderer->gst_window->display) {
|
||||
free(renderer->gst_window);
|
||||
renderer->gst_window = NULL;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
gst_element_set_state (renderer->pipeline, GST_STATE_READY);
|
||||
GstState state;
|
||||
if (gst_element_get_state (renderer->pipeline, &state, NULL, 0)) {
|
||||
if (state == GST_STATE_READY) {
|
||||
logger_log(logger, LOGGER_DEBUG, "Initialized GStreamer video renderer");
|
||||
|
||||
n_renderers = h265_support ? 2 : 1;
|
||||
g_assert (n_renderers <= NCODECS);
|
||||
for (int i = 0; i < n_renderers; i++) {
|
||||
g_assert (i < 2);
|
||||
renderer_type[i] = (video_renderer_t *) calloc(1, sizeof(video_renderer_t));
|
||||
g_assert(renderer_type[i]);
|
||||
renderer_type[i]->autovideo = auto_videosink;
|
||||
renderer_type[i]->id = i;
|
||||
renderer_type[i]->bus = NULL;
|
||||
switch (i) {
|
||||
case 0:
|
||||
renderer_type[i]->codec = h264;
|
||||
caps = gst_caps_from_string(h264_caps);
|
||||
break;
|
||||
case 1:
|
||||
renderer_type[i]->codec = h265;
|
||||
caps = gst_caps_from_string(h265_caps);
|
||||
break;
|
||||
default:
|
||||
g_assert(0);
|
||||
}
|
||||
GString *launch = g_string_new("appsrc name=video_source ! ");
|
||||
g_string_append(launch, "queue ! ");
|
||||
g_string_append(launch, parser);
|
||||
g_string_append(launch, " ! ");
|
||||
g_string_append(launch, decoder);
|
||||
g_string_append(launch, " ! ");
|
||||
append_videoflip(launch, &videoflip[0], &videoflip[1]);
|
||||
g_string_append(launch, converter);
|
||||
g_string_append(launch, " ! ");
|
||||
g_string_append(launch, "videoscale ! ");
|
||||
g_string_append(launch, videosink);
|
||||
g_string_append(launch, " name=");
|
||||
g_string_append(launch, videosink);
|
||||
g_string_append(launch, "_");
|
||||
g_string_append(launch, renderer_type[i]->codec);
|
||||
g_string_append(launch, videosink_options);
|
||||
if (video_sync) {
|
||||
g_string_append(launch, " sync=true");
|
||||
sync = true;
|
||||
} else {
|
||||
logger_log(logger, LOGGER_ERR, "Failed to initialize GStreamer video renderer");
|
||||
g_string_append(launch, " sync=false");
|
||||
sync = false;
|
||||
}
|
||||
} else {
|
||||
logger_log(logger, LOGGER_ERR, "Failed to initialize GStreamer video renderer");
|
||||
|
||||
if (!strcmp(renderer_type[i]->codec, h265)) {
|
||||
g_string_replace (launch, (const gchar *) h264, (const gchar *) h265, 0);
|
||||
} else {
|
||||
g_string_replace (launch, (const gchar *) h265, (const gchar *) h264, 0);
|
||||
}
|
||||
|
||||
logger_log(logger, LOGGER_DEBUG, "GStreamer video pipeline %d:\n\"%s\"", i + 1, launch->str);
|
||||
renderer_type[i]->pipeline = gst_parse_launch(launch->str, &error);
|
||||
if (error) {
|
||||
g_error ("get_parse_launch error (video) :\n %s\n",error->message);
|
||||
g_clear_error (&error);
|
||||
}
|
||||
g_assert (renderer_type[i]->pipeline);
|
||||
|
||||
GstClock *clock = gst_system_clock_obtain();
|
||||
g_object_set(clock, "clock-type", GST_CLOCK_TYPE_REALTIME, NULL);
|
||||
|
||||
gst_pipeline_use_clock(GST_PIPELINE_CAST(renderer_type[i]->pipeline), clock);
|
||||
renderer_type[i]->appsrc = gst_bin_get_by_name (GST_BIN (renderer_type[i]->pipeline), "video_source");
|
||||
g_assert(renderer_type[i]->appsrc);
|
||||
|
||||
g_object_set(renderer_type[i]->appsrc, "caps", caps, "stream-type", 0, "is-live", TRUE, "format", GST_FORMAT_TIME, NULL);
|
||||
g_string_free(launch, TRUE);
|
||||
gst_caps_unref(caps);
|
||||
gst_object_unref(clock);
|
||||
#ifdef X_DISPLAY_FIX
|
||||
bool use_x11 = (strstr(videosink, "xvimagesink") || strstr(videosink, "ximagesink") || auto_videosink);
|
||||
fullscreen = initial_fullscreen;
|
||||
renderer_type[i]->server_name = server_name;
|
||||
renderer_type[i]->gst_window = NULL;
|
||||
renderer_type[i]->use_x11 = false;
|
||||
X11_search_attempts = 0;
|
||||
if (use_x11) {
|
||||
if (i == 0) {
|
||||
renderer_type[0]->gst_window = (X11_Window_t *) calloc(1, sizeof(X11_Window_t));
|
||||
g_assert(renderer_type[0]->gst_window);
|
||||
get_X11_Display(renderer_type[0]->gst_window);
|
||||
if (renderer_type[0]->gst_window->display) {
|
||||
renderer_type[i]->use_x11 = true;
|
||||
} else {
|
||||
free(renderer_type[0]->gst_window);
|
||||
renderer_type[0]->gst_window = NULL;
|
||||
} } else if (renderer_type[0]->use_x11) {
|
||||
renderer_type[i]->gst_window = (X11_Window_t *) calloc(1, sizeof(X11_Window_t));
|
||||
g_assert(renderer_type[i]->gst_window);
|
||||
memcpy(renderer_type[i]->gst_window, renderer_type[0]->gst_window, sizeof(X11_Window_t));
|
||||
renderer_type[i]->use_x11 = true;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
gst_element_set_state (renderer_type[i]->pipeline, GST_STATE_READY);
|
||||
GstState state;
|
||||
if (gst_element_get_state (renderer_type[i]->pipeline, &state, NULL, 0)) {
|
||||
if (state == GST_STATE_READY) {
|
||||
logger_log(logger, LOGGER_DEBUG, "Initialized GStreamer video renderer %d", i + 1);
|
||||
} else {
|
||||
logger_log(logger, LOGGER_ERR, "Failed to initialize GStreamer video renderer %d", i + 1);
|
||||
}
|
||||
} else {
|
||||
logger_log(logger, LOGGER_ERR, "Failed to initialize GStreamer video renderer %d", i + 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -224,23 +277,23 @@ void video_renderer_pause() {
|
||||
}
|
||||
|
||||
void video_renderer_resume() {
|
||||
if (video_renderer_is_paused()) {
|
||||
logger_log(logger, LOGGER_DEBUG, "video renderer resumed");
|
||||
gst_element_set_state (renderer->pipeline, GST_STATE_PLAYING);
|
||||
gst_video_pipeline_base_time = gst_element_get_base_time(renderer->appsrc);
|
||||
}
|
||||
}
|
||||
|
||||
bool video_renderer_is_paused() {
|
||||
gst_element_set_state (renderer->pipeline, GST_STATE_PLAYING);
|
||||
GstState state;
|
||||
gst_element_get_state(renderer->pipeline, &state, NULL, 0);
|
||||
return (state == GST_STATE_PAUSED);
|
||||
/* wait with timeout 100 msec for pipeline to change state from PAUSED to PLAYING */
|
||||
gst_element_get_state(renderer->pipeline, &state, NULL, 100 * GST_MSECOND);
|
||||
const gchar *state_name = gst_element_state_get_name(state);
|
||||
logger_log(logger, LOGGER_DEBUG, "video renderer resumed: state %s", state_name);
|
||||
gst_video_pipeline_base_time = gst_element_get_base_time(renderer->appsrc);
|
||||
}
|
||||
|
||||
void video_renderer_start() {
|
||||
gst_element_set_state (renderer->pipeline, GST_STATE_PLAYING);
|
||||
gst_video_pipeline_base_time = gst_element_get_base_time(renderer->appsrc);
|
||||
renderer->bus = gst_element_get_bus(renderer->pipeline);
|
||||
/* start both h264 and h265 pipelines; will shut down the "wrong" one when we know the codec */
|
||||
for (int i = 0; i < n_renderers; i++) {
|
||||
gst_element_set_state (renderer_type[i]->pipeline, GST_STATE_PLAYING);
|
||||
gst_video_pipeline_base_time = gst_element_get_base_time(renderer_type[i]->appsrc);
|
||||
renderer_type[i]->bus = gst_element_get_bus(renderer_type[i]->pipeline);
|
||||
}
|
||||
renderer = NULL;
|
||||
first_packet = true;
|
||||
#ifdef X_DISPLAY_FIX
|
||||
X11_search_attempts = 0;
|
||||
@@ -281,11 +334,11 @@ void video_renderer_render_buffer(unsigned char* data, int *data_len, int *nal_c
|
||||
gst_buffer_fill(buffer, 0, data, *data_len);
|
||||
gst_app_src_push_buffer (GST_APP_SRC(renderer->appsrc), buffer);
|
||||
#ifdef X_DISPLAY_FIX
|
||||
if (renderer->gst_window && !(renderer->gst_window->window) && use_x11) {
|
||||
if (renderer->gst_window && !(renderer->gst_window->window) && renderer->use_x11) {
|
||||
X11_search_attempts++;
|
||||
logger_log(logger, LOGGER_DEBUG, "Looking for X11 UxPlay Window, attempt %d", (int) X11_search_attempts);
|
||||
get_x_window(renderer->gst_window, renderer->server_name);
|
||||
if (renderer->gst_window->window) {
|
||||
if (renderer->gst_window->window) {
|
||||
logger_log(logger, LOGGER_INFO, "\n*** X11 Windows: Use key F11 or (left Alt)+Enter to toggle full-screen mode\n");
|
||||
if (fullscreen) {
|
||||
set_fullscreen(renderer->gst_window, &fullscreen);
|
||||
@@ -306,13 +359,13 @@ void video_renderer_stop() {
|
||||
}
|
||||
}
|
||||
|
||||
void video_renderer_destroy() {
|
||||
static void video_renderer_destroy_h26x(video_renderer_t *renderer) {
|
||||
if (renderer) {
|
||||
GstState state;
|
||||
gst_element_get_state(renderer->pipeline, &state, NULL, 0);
|
||||
if (state != GST_STATE_NULL) {
|
||||
gst_app_src_end_of_stream (GST_APP_SRC(renderer->appsrc));
|
||||
gst_element_set_state (renderer->pipeline, GST_STATE_NULL);
|
||||
gst_element_set_state (renderer->pipeline, GST_STATE_NULL);
|
||||
}
|
||||
gst_object_unref(renderer->bus);
|
||||
gst_object_unref (renderer->appsrc);
|
||||
@@ -328,11 +381,34 @@ void video_renderer_destroy() {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void video_renderer_destroy() {
|
||||
for (int i = 0; i < n_renderers; i++) {
|
||||
if (renderer_type[i]) {
|
||||
video_renderer_destroy_h26x(renderer_type[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* not implemented for gstreamer */
|
||||
void video_renderer_update_background(int type) {
|
||||
}
|
||||
|
||||
gboolean gstreamer_pipeline_bus_callback(GstBus *bus, GstMessage *message, gpointer loop) {
|
||||
gboolean gstreamer_pipeline_bus_callback(GstBus *bus, GstMessage *message, void * loop) {
|
||||
|
||||
/* identify which pipeline sent the message */
|
||||
int type = -1;
|
||||
for (int i = 0 ; i < n_renderers ; i ++ ) {
|
||||
if (renderer_type[i]->bus == bus) {
|
||||
type = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
g_assert(type != -1);
|
||||
|
||||
if (logger_debug) {
|
||||
g_print("GStreamer %s bus message: %s %s\n", renderer_type[type]->codec, GST_MESSAGE_SRC_NAME(message), GST_MESSAGE_TYPE_NAME(message));
|
||||
}
|
||||
switch (GST_MESSAGE_TYPE (message)) {
|
||||
case GST_MESSAGE_ERROR: {
|
||||
GError *err;
|
||||
@@ -352,10 +428,10 @@ gboolean gstreamer_pipeline_bus_callback(GstBus *bus, GstMessage *message, gpoin
|
||||
}
|
||||
g_error_free (err);
|
||||
g_free (debug);
|
||||
gst_app_src_end_of_stream (GST_APP_SRC(renderer->appsrc));
|
||||
gst_app_src_end_of_stream (GST_APP_SRC(renderer_type[type]->appsrc));
|
||||
flushing = TRUE;
|
||||
gst_bus_set_flushing(bus, flushing);
|
||||
gst_element_set_state (renderer->pipeline, GST_STATE_NULL);
|
||||
gst_element_set_state (renderer_type[type]->pipeline, GST_STATE_NULL);
|
||||
g_main_loop_quit( (GMainLoop *) loop);
|
||||
break;
|
||||
}
|
||||
@@ -365,21 +441,34 @@ gboolean gstreamer_pipeline_bus_callback(GstBus *bus, GstMessage *message, gpoin
|
||||
// g_main_loop_quit( (GMainLoop *) loop);
|
||||
break;
|
||||
case GST_MESSAGE_STATE_CHANGED:
|
||||
if (auto_videosink) {
|
||||
if (renderer_type[type]->state_pending && strstr(GST_MESSAGE_SRC_NAME(message), "pipeline")) {
|
||||
GstState state;
|
||||
gst_element_get_state(renderer_type[type]->pipeline, &state, NULL,0);
|
||||
if (state == GST_STATE_NULL) {
|
||||
gst_element_set_state(renderer_type[type]->pipeline, GST_STATE_PLAYING);
|
||||
} else if (state == GST_STATE_PLAYING) {
|
||||
renderer_type[type]->state_pending = false;
|
||||
}
|
||||
}
|
||||
if (renderer_type[type]->autovideo) {
|
||||
char *sink = strstr(GST_MESSAGE_SRC_NAME(message), "-actual-sink-");
|
||||
if (sink) {
|
||||
sink += strlen("-actual-sink-");
|
||||
logger_log(logger, LOGGER_DEBUG, "GStreamer: automatically-selected videosink is \"%ssink\"", sink);
|
||||
auto_videosink = false;
|
||||
if (strstr(GST_MESSAGE_SRC_NAME(message), renderer_type[type]->codec)) {
|
||||
logger_log(logger, LOGGER_DEBUG, "GStreamer: automatically-selected videosink"
|
||||
" (renderer %d: %s) is \"%ssink\"", renderer_type[type]->id + 1,
|
||||
renderer_type[type]->codec, sink);
|
||||
#ifdef X_DISPLAY_FIX
|
||||
use_x11 = (strstr(sink, "ximage") || strstr(sink, "xvimage"));
|
||||
renderer_type[type]->use_x11 = (strstr(sink, "ximage") || strstr(sink, "xvimage"));
|
||||
#endif
|
||||
renderer_type[type]->autovideo = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
break;
|
||||
#ifdef X_DISPLAY_FIX
|
||||
case GST_MESSAGE_ELEMENT:
|
||||
if (renderer->gst_window && renderer->gst_window->window) {
|
||||
if (renderer_type[type]->gst_window && renderer_type[type]->gst_window->window) {
|
||||
GstNavigationMessageType message_type = gst_navigation_message_get_type (message);
|
||||
if (message_type == GST_NAVIGATION_MESSAGE_EVENT) {
|
||||
GstEvent *event = NULL;
|
||||
@@ -391,7 +480,7 @@ gboolean gstreamer_pipeline_bus_callback(GstBus *bus, GstMessage *message, gpoin
|
||||
if (gst_navigation_event_parse_key_event (event, &key)) {
|
||||
if ((strcmp (key, "F11") == 0) || (alt_keypress && strcmp (key, "Return") == 0)) {
|
||||
fullscreen = !(fullscreen);
|
||||
set_fullscreen(renderer->gst_window, &fullscreen);
|
||||
set_fullscreen(renderer_type[type]->gst_window, &fullscreen);
|
||||
} else if (strcmp (key, "Alt_L") == 0) {
|
||||
alt_keypress = true;
|
||||
}
|
||||
@@ -421,7 +510,42 @@ gboolean gstreamer_pipeline_bus_callback(GstBus *bus, GstMessage *message, gpoin
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
unsigned int video_renderer_listen(void *loop) {
|
||||
return (unsigned int) gst_bus_add_watch(renderer->bus, (GstBusFunc)
|
||||
void video_renderer_choose_codec (bool video_is_h265) {
|
||||
/* set renderer to h264 or h265, depending on pps/sps received by raop_rtp_mirror */
|
||||
video_renderer_t *renderer_new = video_is_h265 ? renderer_type[1] : renderer_type[0];
|
||||
if (renderer == renderer_new) {
|
||||
return;
|
||||
}
|
||||
video_renderer_t *renderer_prev = renderer;
|
||||
renderer = renderer_new;
|
||||
gst_video_pipeline_base_time = gst_element_get_base_time(renderer->appsrc);
|
||||
/* it seems unlikely that the codec will change between h264 and h265 during a connection,
|
||||
* but in case it does, we set the previous renderer to GST_STATE_NULL, detect
|
||||
* when this is finished by listening for the bus message, and then reset it to
|
||||
* GST_STATE_READY, so it can be reused if the codec changes again. */
|
||||
if (renderer_prev) {
|
||||
gst_app_src_end_of_stream (GST_APP_SRC(renderer_prev->appsrc));
|
||||
gst_bus_set_flushing(renderer_prev->bus, TRUE);
|
||||
/* set state of previous renderer to GST_STATE_NULL to (hopefully?) close its video window */
|
||||
gst_element_set_state (renderer_prev->pipeline, GST_STATE_NULL);
|
||||
renderer_prev->state_pending = true; // will set state to PLAYING once state is NULL
|
||||
}
|
||||
}
|
||||
|
||||
unsigned int video_reset_callback(void * loop) {
|
||||
if (video_terminate) {
|
||||
video_terminate = false;
|
||||
gst_app_src_end_of_stream (GST_APP_SRC(renderer->appsrc));
|
||||
gboolean flushing = TRUE;
|
||||
gst_bus_set_flushing(renderer->bus, flushing);
|
||||
gst_element_set_state (renderer->pipeline, GST_STATE_NULL);
|
||||
g_main_loop_quit( (GMainLoop *) loop);
|
||||
}
|
||||
return (unsigned int) TRUE;
|
||||
}
|
||||
|
||||
unsigned int video_renderer_listen(void *loop, int id) {
|
||||
g_assert(id >= 0 && id < n_renderers);
|
||||
return (unsigned int) gst_bus_add_watch(renderer_type[id]->bus,(GstBusFunc)
|
||||
gstreamer_pipeline_bus_callback, (gpointer) loop);
|
||||
}
|
||||
@@ -47,23 +47,27 @@ typedef enum videoflip_e {
|
||||
|
||||
typedef struct video_renderer_s video_renderer_t;
|
||||
|
||||
void video_renderer_init (logger_t *logger, const char *server_name, videoflip_t videoflip[2], const char *parser,
|
||||
const char *decoder, const char *converter, const char *videosink, const bool *fullscreen,
|
||||
const bool *video_sync);
|
||||
typedef struct user_data_s {
|
||||
int type;
|
||||
GMainLoop *loop;
|
||||
} user_data_t;
|
||||
|
||||
void video_renderer_init(logger_t *render_logger, const char *server_name, videoflip_t videoflip[2], const char *parser,
|
||||
const char *decoder, const char *converter, const char *videosink, const char *videosin_options,
|
||||
bool initial_fullscreen, bool video_sync, bool h265_support);
|
||||
void video_renderer_start ();
|
||||
void video_renderer_stop ();
|
||||
void video_renderer_pause ();
|
||||
void video_renderer_resume ();
|
||||
bool video_renderer_is_paused();
|
||||
void video_renderer_render_buffer (unsigned char* data, int *data_len, int *nal_count, uint64_t *ntp_time);
|
||||
void video_renderer_flush ();
|
||||
unsigned int video_renderer_listen(void *loop);
|
||||
void video_renderer_destroy ();
|
||||
void video_renderer_size(float *width_source, float *height_source, float *width, float *height);
|
||||
|
||||
/* not implemented for gstreamer */
|
||||
void video_renderer_update_background (int type);
|
||||
void video_renderer_choose_codec(bool is_h265);
|
||||
|
||||
unsigned int video_renderer_listen(void *loop, int id);
|
||||
unsigned int video_reset_callback(void *loop);
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
||||
11
uxplay.1
11
uxplay.1
@@ -1,11 +1,11 @@
|
||||
.TH UXPLAY "1" "August 2024" "1.69" "User Commands"
|
||||
.TH UXPLAY "1" "September 2024" "1.70" "User Commands"
|
||||
.SH NAME
|
||||
uxplay \- start AirPlay server
|
||||
.SH SYNOPSIS
|
||||
.B uxplay
|
||||
[\fI\,-n name\/\fR] [\fI\,-s wxh\/\fR] [\fI\,-p \/\fR[\fI\,n\/\fR]] [more \fI OPTIONS \/\fR ...]
|
||||
.SH DESCRIPTION
|
||||
UxPlay 1.69: An open\-source AirPlay mirroring (+ audio streaming) server:
|
||||
UxPlay 1.70: An open\-source AirPlay mirroring (+ audio streaming) server:
|
||||
.SH OPTIONS
|
||||
.TP
|
||||
.B
|
||||
@@ -13,6 +13,8 @@ UxPlay 1.69: An open\-source AirPlay mirroring (+ audio streaming) server:
|
||||
.TP
|
||||
\fB\-nh\fR Do \fBNOT\fR append "@\fIhostname\fR" at end of AirPlay server name
|
||||
.TP
|
||||
\fB\-h265\fR Support h265 (4K) video (with h265 versions of h264 plugins)
|
||||
.TP
|
||||
\fB\-pin\fI[xxxx]\fRUse a 4-digit pin code to control client access (default: no)
|
||||
.IP
|
||||
without option, pin is random: optionally use fixed pin xxxx.
|
||||
@@ -38,7 +40,10 @@ UxPlay 1.69: An open\-source AirPlay mirroring (+ audio streaming) server:
|
||||
.TP
|
||||
\fB\-taper\fR Use a "tapered" AirPlay volume-control profile.
|
||||
.TP
|
||||
\fB\-s\fR wxh[@r]Set display resolution [refresh_rate] default 1920x1080[@60]
|
||||
\fB\-s\fR wxh[@r]Request to client for video display resolution [refresh_rate]
|
||||
.IP
|
||||
default 1920x1080[@60] (or 3840x2160[@60] with -h265 option).
|
||||
.PP
|
||||
.TP
|
||||
\fB\-o\fR Set display "overscanned" mode on (not usually needed)
|
||||
.TP
|
||||
|
||||
103
uxplay.cpp
103
uxplay.cpp
@@ -62,7 +62,7 @@
|
||||
#include "renderers/video_renderer.h"
|
||||
#include "renderers/audio_renderer.h"
|
||||
|
||||
#define VERSION "1.69"
|
||||
#define VERSION "1.70"
|
||||
|
||||
#define SECOND_IN_USECS 1000000
|
||||
#define SECOND_IN_NSECS 1000000000UL
|
||||
@@ -85,6 +85,7 @@ static bool relaunch_video = false;
|
||||
static bool reset_loop = false;
|
||||
static unsigned int open_connections= 0;
|
||||
static std::string videosink = "autovideosink";
|
||||
static std::string videosink_options = "";
|
||||
static videoflip_t videoflip[2] = { NONE , NONE };
|
||||
static bool use_video = true;
|
||||
static unsigned char compression_type = 0;
|
||||
@@ -141,6 +142,8 @@ static std::vector <std::string> registered_keys;
|
||||
static double db_low = -30.0;
|
||||
static double db_high = 0.0;
|
||||
static bool taper_volume = false;
|
||||
static bool h265_support = false;
|
||||
static int n_renderers = 0;
|
||||
|
||||
/* logging */
|
||||
|
||||
@@ -357,13 +360,13 @@ static gboolean reset_callback(gpointer loop) {
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
static gboolean sigint_callback(gpointer loop) {
|
||||
static gboolean sigint_callback(gpointer loop) {
|
||||
relaunch_video = false;
|
||||
g_main_loop_quit((GMainLoop *) loop);
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
static gboolean sigterm_callback(gpointer loop) {
|
||||
static gboolean sigterm_callback(gpointer loop) {
|
||||
relaunch_video = false;
|
||||
g_main_loop_quit((GMainLoop *) loop);
|
||||
return TRUE;
|
||||
@@ -391,22 +394,30 @@ static guint g_unix_signal_add(gint signum, GSourceFunc handler, gpointer user_d
|
||||
#endif
|
||||
|
||||
static void main_loop() {
|
||||
guint gst_bus_watch_id = 0;
|
||||
guint gst_bus_watch_id[2] = { 0 };
|
||||
g_assert(n_renderers <= 2);
|
||||
GMainLoop *loop = g_main_loop_new(NULL,FALSE);
|
||||
relaunch_video = false;
|
||||
if (use_video) {
|
||||
relaunch_video = true;
|
||||
gst_bus_watch_id = (guint) video_renderer_listen((void *)loop);
|
||||
for (int i = 0; i < n_renderers; i++) {
|
||||
gst_bus_watch_id[i] = (guint) video_renderer_listen((void *)loop, i);
|
||||
}
|
||||
}
|
||||
guint reset_watch_id = g_timeout_add(100, (GSourceFunc) reset_callback, (gpointer) loop);
|
||||
guint video_reset_watch_id = g_timeout_add(100, (GSourceFunc) video_reset_callback, (gpointer) loop);
|
||||
guint sigterm_watch_id = g_unix_signal_add(SIGTERM, (GSourceFunc) sigterm_callback, (gpointer) loop);
|
||||
guint sigint_watch_id = g_unix_signal_add(SIGINT, (GSourceFunc) sigint_callback, (gpointer) loop);
|
||||
//printf("********** main_loop_run *******************\n");
|
||||
g_main_loop_run(loop);
|
||||
|
||||
if (gst_bus_watch_id > 0) g_source_remove(gst_bus_watch_id);
|
||||
//printf("********** main_loop_exit *******************\n");
|
||||
for (int i = 0; i < n_renderers; i++) {
|
||||
if (gst_bus_watch_id[i] > 0) g_source_remove(gst_bus_watch_id[i]);
|
||||
}
|
||||
if (sigint_watch_id > 0) g_source_remove(sigint_watch_id);
|
||||
if (sigterm_watch_id > 0) g_source_remove(sigterm_watch_id);
|
||||
if (reset_watch_id > 0) g_source_remove(reset_watch_id);
|
||||
if (video_reset_watch_id > 0) g_source_remove(video_reset_watch_id);
|
||||
g_main_loop_unref(loop);
|
||||
}
|
||||
|
||||
@@ -570,6 +581,7 @@ static void print_info (char *name) {
|
||||
printf("Options:\n");
|
||||
printf("-n name Specify the network name of the AirPlay server\n");
|
||||
printf("-nh Do not add \"@hostname\" at the end of AirPlay server name\n");
|
||||
printf("-h265 Support h265 (4K) video (with h265 versions of h264 plugins)\n");
|
||||
printf("-pin[xxxx]Use a 4-digit pin code to control client access (default: no)\n");
|
||||
printf(" default pin is random: optionally use fixed pin xxxx\n");
|
||||
printf("-reg [fn] Keep a register in $HOME/.uxplay.register to verify returning\n");
|
||||
@@ -582,7 +594,8 @@ static void print_info (char *name) {
|
||||
printf("-db l[:h] Set minimum volume attenuation to l dB (decibels, negative);\n");
|
||||
printf(" optional: set maximum to h dB (+ or -) default: -30.0:0.0 dB\n");
|
||||
printf("-taper Use a \"tapered\" AirPlay volume-control profile\n");
|
||||
printf("-s wxh[@r]Set display resolution [refresh_rate] default 1920x1080[@60]\n");
|
||||
printf("-s wxh[@r]Request to client for video display resolution [refresh_rate]\n");
|
||||
printf(" default 1920x1080[@60] (or 3840x2160[@60] with -h265 option)\n");
|
||||
printf("-o Set display \"overscanned\" mode on (not usually needed)\n");
|
||||
printf("-fs Full-screen (only works with X11, Wayland, VAAPI, D3D11)\n");
|
||||
printf("-p Use legacy ports UDP 6000:6001:7011 TCP 7000:7001:7100\n");
|
||||
@@ -594,7 +607,6 @@ static void print_info (char *name) {
|
||||
printf("-vd ... Choose the GStreamer h264 decoder; default \"decodebin\"\n");
|
||||
printf(" choices: (software) avdec_h264; (hardware) v4l2h264dec,\n");
|
||||
printf(" nvdec, nvh264dec, vaapih64dec, vtdec,etc.\n");
|
||||
printf(" choices: avdec_h264,vaapih264dec,nvdec,nvh264dec,v4l2h264dec\n");
|
||||
printf("-vc ... Choose the GStreamer videoconverter; default \"videoconvert\"\n");
|
||||
printf(" another choice when using v4l2h264dec: v4l2convert\n");
|
||||
printf("-vs ... Choose the GStreamer videosink; default \"autovideosink\"\n");
|
||||
@@ -930,6 +942,12 @@ static void parse_arguments (int argc, char *argv[]) {
|
||||
if (!option_has_value(i, argc, arg, argv[i+1])) exit(1);
|
||||
videosink.erase();
|
||||
videosink.append(argv[++i]);
|
||||
std::size_t pos = videosink.find(" ");
|
||||
if (pos != std::string::npos) {
|
||||
videosink_options.erase();
|
||||
videosink_options = videosink.substr(pos);
|
||||
videosink.erase(pos);
|
||||
}
|
||||
} else if (arg == "-as") {
|
||||
if (!option_has_value(i, argc, arg, argv[i+1])) exit(1);
|
||||
audiosink.erase();
|
||||
@@ -1127,6 +1145,8 @@ static void parse_arguments (int argc, char *argv[]) {
|
||||
db_low = db1;
|
||||
db_high = db2;
|
||||
printf("db range %f:%f\n", db_low, db_high);
|
||||
} else if (arg == "-h265") {
|
||||
h265_support = true;
|
||||
} else if (arg == "-nofreeze") {
|
||||
nofreeze = true;
|
||||
} else {
|
||||
@@ -1300,7 +1320,7 @@ static int register_dnssd() {
|
||||
return -4;
|
||||
}
|
||||
|
||||
LOGD("register_dnssd: advertised AirPlay service with \"Features\" code = 0x%X",
|
||||
LOGD("register_dnssd: advertised AirPlay service with \"Features\" code = 0x%llX",
|
||||
dnssd_get_airplay_features(dnssd));
|
||||
return 0;
|
||||
}
|
||||
@@ -1379,11 +1399,7 @@ static int start_dnssd(std::vector<char> hw_addr, std::string name) {
|
||||
dnssd_set_airplay_features(dnssd, 30, 1); // RAOP support: with this bit set, the AirTunes service is not required.
|
||||
dnssd_set_airplay_features(dnssd, 31, 0); //
|
||||
|
||||
for (int i = 32; i < 64; i++) {
|
||||
dnssd_set_airplay_features(dnssd, i, 0);
|
||||
}
|
||||
|
||||
/* bits 32-63 are not used here: see https://emanualcozzi.net/docs/airplay2/features
|
||||
/* bits 32-63 see https://emanualcozzi.net/docs/airplay2/features
|
||||
dnssd_set_airplay_features(dnssd, 32, 0); // isCarPlay when ON,; Supports InitialVolume when OFF
|
||||
dnssd_set_airplay_features(dnssd, 33, 0); // Supports Air Play Video Play Queue
|
||||
dnssd_set_airplay_features(dnssd, 34, 0); // Supports Air Play from cloud (requires that bit 6 is ON)
|
||||
@@ -1396,7 +1412,8 @@ static int start_dnssd(std::vector<char> hw_addr, std::string name) {
|
||||
|
||||
dnssd_set_airplay_features(dnssd, 40, 0); // Supports Buffered Audio
|
||||
dnssd_set_airplay_features(dnssd, 41, 0); // Supports PTP
|
||||
dnssd_set_airplay_features(dnssd, 42, 0); // Supports Screen Multi Codec
|
||||
|
||||
dnssd_set_airplay_features(dnssd, 42, 0); // Supports Screen Multi Codec (allows h265 video)
|
||||
dnssd_set_airplay_features(dnssd, 43, 0); // Supports System Pairing
|
||||
|
||||
dnssd_set_airplay_features(dnssd, 44, 0); // is AP Valeria Screen Sender
|
||||
@@ -1423,6 +1440,9 @@ static int start_dnssd(std::vector<char> hw_addr, std::string name) {
|
||||
dnssd_set_airplay_features(dnssd, 61, 0); // Supports RFC2198 redundancy
|
||||
*/
|
||||
|
||||
/* needed for h265 video support */
|
||||
dnssd_set_airplay_features(dnssd, 42, (int) h265_support);
|
||||
|
||||
/* bit 27 of Features determines whether the AirPlay2 client-pairing protocol will be used (1) or not (0) */
|
||||
dnssd_set_airplay_features(dnssd, 27, (int) setup_legacy_pairing);
|
||||
return 0;
|
||||
@@ -1460,7 +1480,12 @@ extern "C" void video_reset(void *cls) {
|
||||
relaunch_video = true;
|
||||
}
|
||||
|
||||
|
||||
extern "C" void video_set_codec(void *cls, video_codec_t codec) {
|
||||
if (use_video) {
|
||||
bool video_is_h265 = (codec == VIDEO_CODEC_H265);
|
||||
video_renderer_choose_codec(video_is_h265);
|
||||
}
|
||||
}
|
||||
|
||||
extern "C" void display_pin(void *cls, char *pin) {
|
||||
int margin = 10;
|
||||
@@ -1572,7 +1597,7 @@ extern "C" void audio_process (void *cls, raop_ntp_t *ntp, audio_decode_struct *
|
||||
}
|
||||
}
|
||||
|
||||
extern "C" void video_process (void *cls, raop_ntp_t *ntp, h264_decode_struct *data) {
|
||||
extern "C" void video_process (void *cls, raop_ntp_t *ntp, video_decode_struct *data) {
|
||||
if (dump_video) {
|
||||
dump_video_to_file(data->data, data->data_len);
|
||||
}
|
||||
@@ -1586,18 +1611,12 @@ extern "C" void video_process (void *cls, raop_ntp_t *ntp, h264_decode_struct *d
|
||||
}
|
||||
|
||||
extern "C" void video_pause (void *cls) {
|
||||
#ifdef GST_124
|
||||
return; //pause/resume changes in GStreamer-1.24 break this code
|
||||
#endif
|
||||
if (use_video) {
|
||||
video_renderer_pause();
|
||||
}
|
||||
}
|
||||
|
||||
extern "C" void video_resume (void *cls) {
|
||||
#ifdef GST_124
|
||||
return; //pause/resume changes in GStreamer-1.24 break this code
|
||||
#endif
|
||||
if (use_video) {
|
||||
video_renderer_resume();
|
||||
}
|
||||
@@ -1831,6 +1850,7 @@ static int start_raop_server (unsigned short display[5], unsigned short tcp[3],
|
||||
raop_cbs.check_register = check_register;
|
||||
raop_cbs.export_dacp = export_dacp;
|
||||
raop_cbs.video_reset = video_reset;
|
||||
raop_cbs.video_set_codec = video_set_codec;
|
||||
|
||||
raop = raop_init(&raop_cbs);
|
||||
if (raop == NULL) {
|
||||
@@ -2037,21 +2057,22 @@ int main (int argc, char *argv[]) {
|
||||
use_video = false;
|
||||
videosink.erase();
|
||||
videosink.append("fakesink");
|
||||
videosink_options.erase();
|
||||
LOGI("video_disabled");
|
||||
display[3] = 1; /* set fps to 1 frame per sec when no video will be shown */
|
||||
}
|
||||
|
||||
if (fullscreen && use_video) {
|
||||
if (videosink == "waylandsink" || videosink == "vaapisink") {
|
||||
videosink.append(" fullscreen=true");
|
||||
videosink_options.append(" fullscreen=true");
|
||||
}
|
||||
}
|
||||
|
||||
if (videosink == "d3d11videosink" && use_video) {
|
||||
if (videosink == "d3d11videosink" && videosink_options.empty() && use_video) {
|
||||
if (fullscreen) {
|
||||
videosink.append(" fullscreen-toggle-mode=GST_D3D11_WINDOW_FULLSCREEN_TOGGLE_MODE_PROPERTY fullscreen=true ");
|
||||
videosink_options.append(" fullscreen-toggle-mode=GST_D3D11_WINDOW_FULLSCREEN_TOGGLE_MODE_PROPERTY fullscreen=true ");
|
||||
} else {
|
||||
videosink.append(" fullscreen-toggle-mode=GST_D3D11_WINDOW_FULLSCREEN_TOGGLE_MODE_ALT_ENTER ");
|
||||
videosink_options.append(" fullscreen-toggle-mode=GST_D3D11_WINDOW_FULLSCREEN_TOGGLE_MODE_ALT_ENTER ");
|
||||
}
|
||||
LOGI("d3d11videosink is being used with option fullscreen-toggle-mode=alt-enter\n"
|
||||
"Use Alt-Enter key combination to toggle into/out of full-screen mode");
|
||||
@@ -2129,8 +2150,10 @@ int main (int argc, char *argv[]) {
|
||||
}
|
||||
|
||||
if (use_video) {
|
||||
n_renderers = h265_support ? 2 : 1;
|
||||
video_renderer_init(render_logger, server_name.c_str(), videoflip, video_parser.c_str(),
|
||||
video_decoder.c_str(), video_converter.c_str(), videosink.c_str(), &fullscreen, &video_sync);
|
||||
video_decoder.c_str(), video_converter.c_str(), videosink.c_str(),
|
||||
videosink_options.c_str(),fullscreen, video_sync, h265_support);
|
||||
video_renderer_start();
|
||||
}
|
||||
|
||||
@@ -2158,10 +2181,22 @@ int main (int argc, char *argv[]) {
|
||||
write_coverart(coverart_filename.c_str(), (const void *) empty_image, sizeof(empty_image));
|
||||
}
|
||||
|
||||
restart:
|
||||
/* set default resolutions for h264 or h265*/
|
||||
if (!display[0] && !display[1]) {
|
||||
if (h265_support) {
|
||||
display[0] = 3840;
|
||||
display[1] = 2160;
|
||||
} else {
|
||||
display[0] = 1920;
|
||||
display[1] = 1080;
|
||||
}
|
||||
}
|
||||
|
||||
restart:
|
||||
if (start_dnssd(server_hw_addr, server_name)) {
|
||||
goto cleanup;
|
||||
}
|
||||
|
||||
if (start_raop_server(display, tcp, udp, debug_log)) {
|
||||
stop_dnssd();
|
||||
goto cleanup;
|
||||
@@ -2173,7 +2208,8 @@ int main (int argc, char *argv[]) {
|
||||
}
|
||||
reconnect:
|
||||
compression_type = 0;
|
||||
close_window = new_window_closing_behavior;
|
||||
close_window = new_window_closing_behavior;
|
||||
|
||||
main_loop();
|
||||
if (relaunch_video || reset_loop) {
|
||||
if(reset_loop) {
|
||||
@@ -2184,9 +2220,10 @@ int main (int argc, char *argv[]) {
|
||||
if (use_audio) audio_renderer_stop();
|
||||
if (use_video && close_window) {
|
||||
video_renderer_destroy();
|
||||
raop_remove_known_connections(raop);
|
||||
video_renderer_init(render_logger, server_name.c_str(), videoflip, video_parser.c_str(),
|
||||
video_decoder.c_str(), video_converter.c_str(), videosink.c_str(), &fullscreen,
|
||||
&video_sync);
|
||||
video_decoder.c_str(), video_converter.c_str(), videosink.c_str(),
|
||||
videosink_options.c_str(), fullscreen, video_sync, h265_support);
|
||||
video_renderer_start();
|
||||
}
|
||||
if (relaunch_video) {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
Name: uxplay
|
||||
Version: 1.69
|
||||
Version: 1.70
|
||||
Release: 1%{?dist}
|
||||
|
||||
%global gittag v%{version}
|
||||
@@ -135,7 +135,7 @@ cd build
|
||||
%{_docdir}/%{name}/llhttp/LICENSE-MIT
|
||||
|
||||
%changelog
|
||||
* Fri Aug 09 2024 UxPlay maintainer <https://github.com/FDH2/UxPlay>
|
||||
* Tue Sep 17 2024 UxPlay maintainer <https://github.com/FDH2/UxPlay>
|
||||
Initial uxplay.spec: tested on Fedora 38, Rocky Linux 9.2, OpenSUSE
|
||||
Leap 15.5, Mageia 9, OpenMandriva ROME, PCLinuxOS
|
||||
-
|
||||
|
||||
Reference in New Issue
Block a user