Index: webkitgtk-2.52.0/Source/cmake/GStreamerChecks.cmake
===================================================================
--- webkitgtk-2.52.0.orig/Source/cmake/GStreamerChecks.cmake
+++ webkitgtk-2.52.0/Source/cmake/GStreamerChecks.cmake
@@ -1,7 +1,7 @@
 if (ENABLE_VIDEO OR ENABLE_WEB_AUDIO OR ENABLE_WEB_CODECS)
     SET_AND_EXPOSE_TO_BUILD(USE_GSTREAMER TRUE)
       if (USE_GSTREAMER_FULL)
-          find_package(GStreamer 1.18.4 REQUIRED COMPONENTS full)
+          find_package(GStreamer 1.16.1 REQUIRED COMPONENTS full)
           if (NOT PC_GSTREAMER_FULL_FOUND)
               message(FATAL_ERROR "GStreamer static library libgstreamer-full-1.0 not found")
           else ()
@@ -25,7 +25,7 @@ if (ENABLE_VIDEO OR ENABLE_WEB_AUDIO OR
               list(APPEND GSTREAMER_COMPONENTS webrtc)
           endif ()
 
-          find_package(GStreamer 1.18.4 REQUIRED COMPONENTS ${GSTREAMER_COMPONENTS})
+          find_package(GStreamer 1.16.1 REQUIRED COMPONENTS ${GSTREAMER_COMPONENTS})
 
           if (ENABLE_WEB_AUDIO)
               if (NOT PC_GSTREAMER_AUDIO_FOUND OR NOT PC_GSTREAMER_FFT_FOUND)
Index: webkitgtk-2.52.0/Source/WebCore/platform/audio/gstreamer/AudioSourceProviderGStreamer.cpp
===================================================================
--- webkitgtk-2.52.0.orig/Source/WebCore/platform/audio/gstreamer/AudioSourceProviderGStreamer.cpp
+++ webkitgtk-2.52.0/Source/WebCore/platform/audio/gstreamer/AudioSourceProviderGStreamer.cpp
@@ -141,7 +141,8 @@ AudioSourceProviderGStreamer::AudioSourc
     g_signal_connect_swapped(decodebin, "pad-added", G_CALLBACK(+[](AudioSourceProviderGStreamer* provider, GstPad* pad) {
         auto padCaps = adoptGRef(gst_pad_query_caps(pad, nullptr));
         bool isAudio = doCapsHaveType(padCaps.get(), "audio"_s);
-        RELEASE_ASSERT(isAudio);
+        if (!isAudio)
+            return;
 
         auto sinkPad = adoptGRef(gst_element_get_static_pad(provider->m_audioSinkBin.get(), "sink"));
         gst_pad_link(pad, sinkPad.get());
Index: webkitgtk-2.52.0/Source/WebCore/platform/graphics/gstreamer/GLVideoSinkGStreamer.cpp
===================================================================
--- webkitgtk-2.52.0.orig/Source/WebCore/platform/graphics/gstreamer/GLVideoSinkGStreamer.cpp
+++ webkitgtk-2.52.0/Source/WebCore/platform/graphics/gstreamer/GLVideoSinkGStreamer.cpp
@@ -74,16 +74,7 @@ static bool s_isDMABufDisabled;
 
 static void initializeDMABufAvailability()
 {
-    static std::once_flag onceFlag;
-    std::call_once(onceFlag, [] {
-        if (!gst_check_version(1, 20, 0))
-            return;
-
-        auto value = CStringView::unsafeFromUTF8(g_getenv("WEBKIT_GST_DMABUF_SINK_DISABLED"));
-        s_isDMABufDisabled = !value.isEmpty() && (equalLettersIgnoringASCIICase(value.span(), "true"_s) || equalLettersIgnoringASCIICase(value.span(), "1"_s));
-        if (!s_isDMABufDisabled && !DRMDeviceManager::singleton().mainGBMDevice(DRMDeviceManager::NodeType::Render))
-            s_isDMABufDisabled = true;
-    });
+    s_isDMABufDisabled = true;
 }
 #endif
 
@@ -126,7 +117,19 @@ static void webKitGLVideoSinkConstructed
     if (!s_isDMABufDisabled)
         gst_caps_append(caps.get(), buildDMABufCaps().leakRef());
 #endif
-    GRefPtr<GstCaps> glCaps = adoptGRef(gst_caps_from_string("video/x-raw, format = (string) " GST_GL_CAPS_FORMAT));
+    // Workaround until we can depend on GStreamer 1.16.2.
+    // https://gitlab.freedesktop.org/gstreamer/gst-plugins-base/commit/8d32de090554cf29fe359f83aa46000ba658a693
+    // Forcing a color conversion to RGBA here allows glupload to internally use
+    // an uploader that adds a VideoMeta, through the TextureUploadMeta caps
+    // feature, without needing the patch above. However this specific caps
+    // feature is going to be removed from GStreamer so it is considered a
+    // short-term workaround. This code path most likely will have a negative
+    // performance impact on embedded platforms as well. Downstream embedders
+    // are highly encouraged to cherry-pick the patch linked above in their BSP
+    // and set the WEBKIT_GST_NO_RGBA_CONVERSION environment variable until
+    // GStreamer 1.16.2 is released.
+    // See also https://bugs.webkit.org/show_bug.cgi?id=201422
+    GRefPtr<GstCaps> glCaps = adoptGRef(gst_caps_from_string("video/x-raw, format = (string) RGBA"));
     gst_caps_set_features(glCaps.get(), 0, gst_caps_features_new(GST_CAPS_FEATURE_MEMORY_GL_MEMORY, nullptr));
     gst_caps_append(caps.get(), glCaps.leakRef());
 
@@ -170,17 +173,11 @@ static GstStateChangeReturn webKitGLVide
     return GST_ELEMENT_CLASS(webkit_gl_video_sink_parent_class)->change_state(element, transition);
 }
 
-static void webKitGLVideoSinkGetProperty(GObject* object, guint propertyId, GValue* value, GParamSpec* paramSpec)
+static void webKitGLVideoSinkGetProperty(GObject* object, guint propertyId, GValue*, GParamSpec* paramSpec)
 {
-    WebKitGLVideoSink* sink = WEBKIT_GL_VIDEO_SINK(object);
-
     switch (propertyId) {
-    case WEBKIT_GL_VIDEO_SINK_PROP_STATS: {
-        GUniqueOutPtr<GstStructure> stats;
-        g_object_get(sink->priv->appSink.get(), "stats", &stats.outPtr(), nullptr);
-        gst_value_set_structure(value, stats.get());
+    case WEBKIT_GL_VIDEO_SINK_PROP_STATS:
         break;
-    }
     default:
         G_OBJECT_WARN_INVALID_PROPERTY_ID(object, propertyId, paramSpec);
         RELEASE_ASSERT_NOT_REACHED();
Index: webkitgtk-2.52.0/Source/WebCore/platform/graphics/gstreamer/GStreamerAudioMixer.cpp
===================================================================
--- webkitgtk-2.52.0.orig/Source/WebCore/platform/graphics/gstreamer/GStreamerAudioMixer.cpp
+++ webkitgtk-2.52.0/Source/WebCore/platform/graphics/gstreamer/GStreamerAudioMixer.cpp
@@ -32,7 +32,7 @@ GST_DEBUG_CATEGORY_STATIC(webkit_media_g
 
 bool GStreamerAudioMixer::isAvailable()
 {
-    return isGStreamerPluginAvailable("inter"_s) && isGStreamerPluginAvailable("audiomixer"_s);
+    return false;
 }
 
 GStreamerAudioMixer& GStreamerAudioMixer::singleton()
Index: webkitgtk-2.52.0/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.cpp
===================================================================
--- webkitgtk-2.52.0.orig/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.cpp
+++ webkitgtk-2.52.0/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.cpp
@@ -137,6 +137,25 @@ static GstClockTime s_webkitGstInitTime;
 }
 
 #if ENABLE(VIDEO)
+
+#if !GST_CHECK_VERSION(1, 18, 0)
+void webkitGstVideoFormatInfoComponent(const GstVideoFormatInfo* info, guint plane, gint components[GST_VIDEO_MAX_COMPONENTS])
+{
+    guint c, i = 0;
+
+    /* Reverse mapping of info->plane. */
+    for (c = 0; c < GST_VIDEO_FORMAT_INFO_N_COMPONENTS(info); c++) {
+        if (GST_VIDEO_FORMAT_INFO_PLANE(info, c) == plane) {
+            components[i] = c;
+            i++;
+        }
+    }
+
+    for (c = i; c < GST_VIDEO_MAX_COMPONENTS; c++)
+        components[c] = -1;
+}
+#endif
+
 bool getVideoSizeAndFormatFromCaps(const GstCaps* caps, WebCore::IntSize& size, GstVideoFormat& format, int& pixelAspectRatioNumerator, int& pixelAspectRatioDenominator, int& stride, double& frameRate, PlatformVideoColorSpace& colorSpace)
 {
     if (!doCapsHaveType(caps, GST_VIDEO_CAPS_TYPE_PREFIX)) {
@@ -725,31 +744,6 @@ void deinitializeGStreamer()
     teardownVideoEncoderSingleton();
     teardownGStreamerImageDecoders();
 #endif
-
-    bool isLeaksTracerActive = false;
-    auto activeTracers = gst_tracing_get_active_tracers();
-    while (activeTracers) {
-        auto tracer = adoptGRef(GST_TRACER_CAST(activeTracers->data));
-        if (!isLeaksTracerActive && equal(unsafeSpan(G_OBJECT_TYPE_NAME(G_OBJECT(tracer.get()))), "GstLeaksTracer"_s))
-            isLeaksTracerActive = true;
-        activeTracers = g_list_delete_link(activeTracers, activeTracers);
-    }
-
-    if (!isLeaksTracerActive)
-        return;
-
-    // Make sure there is no active pipeline left. Those might trigger deadlocks during gst_deinit().
-    {
-        Locker locker { s_activePipelinesMapLock };
-        for (auto& pipeline : activePipelinesMap().values()) {
-            GST_DEBUG("Pipeline %" GST_PTR_FORMAT " was left running. Forcing clean-up.", pipeline.get());
-            disconnectSimpleBusMessageCallback(pipeline.get());
-            gst_element_set_state(pipeline.get(), GST_STATE_NULL);
-        }
-        activePipelinesMap().clear();
-    }
-
-    gst_deinit();
 }
 
 unsigned getGstPlayFlag(ASCIILiteral nick)
@@ -825,8 +819,11 @@ GstMappedFrame::GstMappedFrame(GstMapped
 {
     std::swap(m_frame, other.m_frame);
     other.m_frame.buffer = nullptr;
-    std::swap(m_alignment, other.m_alignment);
-    std::swap(m_planeSizes, other.m_planeSizes);
+}
+
+GstMappedFrame::GstMappedFrame(GstBuffer* buffer, const GstVideoInfo* info, GstMapFlags flags)
+{
+    gst_video_frame_map(&m_frame, const_cast<GstVideoInfo*>(info), buffer, flags);
 }
 
 GstMappedFrame::GstMappedFrame(const GRefPtr<GstSample>& sample, GstMapFlags flags)
@@ -835,11 +832,7 @@ GstMappedFrame::GstMappedFrame(const GRe
     if (!gst_video_info_from_caps(&info, gst_sample_get_caps(sample.get())))
         return;
 
-    if (!gst_video_frame_map(&m_frame, &info, gst_sample_get_buffer(sample.get()), flags))
-        return;
-
-    gst_video_alignment_reset(&m_alignment);
-    gst_video_info_align_full(&info, &m_alignment, m_planeSizes.data());
+    gst_video_frame_map(&m_frame, &info, gst_sample_get_buffer(sample.get()), flags);
 }
 
 GstMappedFrame::~GstMappedFrame()
@@ -908,7 +901,7 @@ std::span<uint8_t> GstMappedFrame::plane
     WTF_ALLOW_UNSAFE_BUFFER_USAGE_BEGIN; // GLib port
     auto data = reinterpret_cast<uint8_t*>(GST_VIDEO_FRAME_PLANE_DATA(&m_frame, planeIndex));
     WTF_ALLOW_UNSAFE_BUFFER_USAGE_END;
-    return unsafeMakeSpan(data, planeHeight(planeIndex) * planeStride(planeIndex));
+    return unsafeMakeSpan(data, height() * planeStride(planeIndex));
 }
 
 WTF_ALLOW_UNSAFE_BUFFER_USAGE_BEGIN; // GLib port
@@ -918,12 +911,6 @@ int GstMappedFrame::planeStride(uint32_t
     return GST_VIDEO_FRAME_PLANE_STRIDE(&m_frame, planeIndex);
 }
 
-size_t GstMappedFrame::planeHeight(uint32_t planeIndex) const
-{
-    RELEASE_ASSERT(isValid());
-    return GST_VIDEO_INFO_PLANE_HEIGHT(&m_frame.info, planeIndex, m_planeSizes.data());
-}
-
 #if USE(GSTREAMER_GL)
 GLuint GstMappedFrame::textureID(int planeIndex) const
 {
@@ -1611,6 +1598,36 @@ String gstStructureToJSONString(const Gs
     return value->toJSONString();
 }
 
+#if !GST_CHECK_VERSION(1, 18, 0)
+GstClockTime webkitGstElementGetCurrentRunningTime(GstElement* element)
+{
+    g_return_val_if_fail(GST_IS_ELEMENT(element), GST_CLOCK_TIME_NONE);
+
+    auto baseTime = gst_element_get_base_time(element);
+    if (!GST_CLOCK_TIME_IS_VALID(baseTime)) {
+        GST_DEBUG_OBJECT(element, "Could not determine base time");
+        return GST_CLOCK_TIME_NONE;
+    }
+
+    auto clock = adoptGRef(gst_element_get_clock(element));
+    if (!clock) {
+        GST_DEBUG_OBJECT(element, "Element has no clock");
+        return GST_CLOCK_TIME_NONE;
+    }
+
+    auto clockTime = gst_clock_get_time(clock.get());
+    if (!GST_CLOCK_TIME_IS_VALID(clockTime))
+        return GST_CLOCK_TIME_NONE;
+
+    if (clockTime < baseTime) {
+        GST_DEBUG_OBJECT(element, "Got negative current running time");
+        return GST_CLOCK_TIME_NONE;
+    }
+
+    return clockTime - baseTime;
+}
+#endif
+
 GstClockTime webkitGstInitTime()
 {
     return s_webkitGstInitTime;
@@ -1668,6 +1685,7 @@ PlatformVideoColorSpace videoColorSpaceF
     case GST_VIDEO_TRANSFER_BT709:
         colorSpace.transfer = PlatformVideoTransferCharacteristics::Bt709;
         break;
+#if GST_CHECK_VERSION(1, 18, 0)
     case GST_VIDEO_TRANSFER_BT601:
         colorSpace.transfer = PlatformVideoTransferCharacteristics::Smpte170m;
         break;
@@ -1680,6 +1698,7 @@ PlatformVideoColorSpace videoColorSpaceF
     case GST_VIDEO_TRANSFER_BT2020_10:
         colorSpace.transfer = PlatformVideoTransferCharacteristics::Bt2020_10bit;
         break;
+#endif
     case GST_VIDEO_TRANSFER_BT2020_12:
         colorSpace.transfer = PlatformVideoTransferCharacteristics::Bt2020_12bit;
         break;
@@ -1798,6 +1817,7 @@ void fillVideoInfoColorimetryFromColorSp
         case PlatformVideoTransferCharacteristics::Bt709:
             GST_VIDEO_INFO_COLORIMETRY(info).transfer = GST_VIDEO_TRANSFER_BT709;
             break;
+#if GST_CHECK_VERSION(1, 18, 0)
         case PlatformVideoTransferCharacteristics::Smpte170m:
             GST_VIDEO_INFO_COLORIMETRY(info).transfer = GST_VIDEO_TRANSFER_BT601;
             break;
@@ -1810,6 +1830,7 @@ void fillVideoInfoColorimetryFromColorSp
         case PlatformVideoTransferCharacteristics::Bt2020_10bit:
             GST_VIDEO_INFO_COLORIMETRY(info).transfer = GST_VIDEO_TRANSFER_BT2020_10;
             break;
+#endif
         case PlatformVideoTransferCharacteristics::Bt2020_12bit:
             GST_VIDEO_INFO_COLORIMETRY(info).transfer = GST_VIDEO_TRANSFER_BT2020_12;
             break;
Index: webkitgtk-2.52.0/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.h
===================================================================
--- webkitgtk-2.52.0.orig/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.h
+++ webkitgtk-2.52.0/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.h
@@ -78,6 +78,14 @@ inline bool gst_check_version(guint majo
 }
 #endif
 
+#if !GST_CHECK_VERSION(1, 18, 0)
+// gst_video_format_info_component() is GStreamer 1.18 API, so for older versions we use a local
+// vendored copy of the function.
+#define GST_VIDEO_MAX_COMPONENTS 4
+void webkitGstVideoFormatInfoComponent(const GstVideoFormatInfo*, guint, gint components[GST_VIDEO_MAX_COMPONENTS]);
+#define gst_video_format_info_component webkitGstVideoFormatInfoComponent
+#endif
+
 #define GST_VIDEO_CAPS_TYPE_PREFIX  "video/"_s
 #define GST_AUDIO_CAPS_TYPE_PREFIX  "audio/"_s
 #define GST_TEXT_CAPS_TYPE_PREFIX   "text/"_s
@@ -224,6 +232,7 @@ class GstMappedFrame {
 
 public:
     GstMappedFrame(GstMappedFrame&&);
+    GstMappedFrame(GstBuffer*, const GstVideoInfo*, GstMapFlags);
     GstMappedFrame(const GRefPtr<GstSample>&, GstMapFlags);
 
     ~GstMappedFrame();
@@ -242,7 +251,6 @@ public:
     int format() const;
     std::span<uint8_t> planeData(uint32_t) const;
     int planeStride(uint32_t) const;
-    size_t planeHeight(uint32_t) const;
 
     bool isValid() const { return m_frame.buffer; }
     explicit operator bool() const { return m_frame.buffer; }
@@ -257,8 +265,6 @@ public:
 
 private:
     GstVideoFrame m_frame;
-    GstVideoAlignment m_alignment;
-    std::array<size_t, GST_VIDEO_MAX_PLANES> m_planeSizes { };
 };
 
 class GstMappedAudioBuffer {
@@ -317,6 +323,13 @@ Vector<T> gstStructureGetList(const GstS
 
 String gstStructureToJSONString(const GstStructure*);
 
+#if !GST_CHECK_VERSION(1, 18, 0)
+// gst_element_get_current_running_time() is GStreamer 1.18 API, so for older versions we use a local
+// vendored copy of the function.
+GstClockTime webkitGstElementGetCurrentRunningTime(GstElement*);
+#define gst_element_get_current_running_time webkitGstElementGetCurrentRunningTime
+#endif
+
 GstClockTime webkitGstInitTime();
 
 PlatformVideoColorSpace videoColorSpaceFromCaps(const GstCaps*);
Index: webkitgtk-2.52.0/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp
===================================================================
--- webkitgtk-2.52.0.orig/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp
+++ webkitgtk-2.52.0/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp
@@ -628,8 +628,6 @@ bool MediaPlayerPrivateGStreamer::doSeek
     auto seekStop = toGstClockTime(endTime);
     auto event = adoptGRef(gst_event_new_seek(rate, GST_FORMAT_TIME, seekFlags, GST_SEEK_TYPE_SET, seekStart, GST_SEEK_TYPE_SET, seekStop));
 
-    GST_DEBUG_OBJECT(pipeline(), "[Seek] Performing actual seek to %" GST_TIMEP_FORMAT " (endTime: %" GST_TIMEP_FORMAT ") at rate %f", &seekStart, &seekStop, rate);
-
     if (isAsync) {
         auto data = createAsyncSeekData();
         data->event = WTF::move(event);
@@ -4424,33 +4422,7 @@ void MediaPlayerPrivateGStreamer::setStr
 
 bool MediaPlayerPrivateGStreamer::updateVideoSinkStatistics()
 {
-    if (!m_videoSink)
-        return false;
-
-    GUniqueOutPtr<GstStructure> stats;
-    g_object_get(m_videoSink.get(), "stats", &stats.outPtr(), nullptr);
-    if (!stats)
-        return false;
-
-    auto totalVideoFrames = gstStructureGet<uint64_t>(stats.get(), "rendered"_s);
-    auto droppedVideoFrames = gstStructureGet<uint64_t>(stats.get(), "dropped"_s);
-    auto averageRate = gstStructureGet<double>(stats.get(), "average-rate"_s);
-
-    if (!totalVideoFrames || !droppedVideoFrames || !averageRate)
-        return false;
-
-    // Caching is required so that metrics queries performed after EOS still return valid values.
-    if (*totalVideoFrames)
-        m_totalVideoFrames = *totalVideoFrames;
-    if (*droppedVideoFrames)
-        m_droppedVideoFrames = *droppedVideoFrames;
-
-    if (*averageRate && m_videoInfo) {
-        double frameRate;
-        gst_util_fraction_to_double(GST_VIDEO_INFO_FPS_N(&m_videoInfo->info), GST_VIDEO_INFO_FPS_D(&m_videoInfo->info), &frameRate);
-        m_averageFrameRate = *averageRate * frameRate;
-    }
-    return true;
+    return false;
 }
 
 std::optional<VideoPlaybackQualityMetrics> MediaPlayerPrivateGStreamer::videoPlaybackQualityMetrics()
Index: webkitgtk-2.52.0/Source/WebCore/platform/graphics/texmap/coordinated/CoordinatedPlatformLayerBufferVideo.cpp
===================================================================
--- webkitgtk-2.52.0.orig/Source/WebCore/platform/graphics/texmap/coordinated/CoordinatedPlatformLayerBufferVideo.cpp
+++ webkitgtk-2.52.0/Source/WebCore/platform/graphics/texmap/coordinated/CoordinatedPlatformLayerBufferVideo.cpp
@@ -102,9 +102,7 @@ std::unique_ptr<CoordinatedPlatformLayer
     // When not having a texture, we map the frame here and upload the pixels to a texture in the
     // compositor thread, in paintToTextureMapper(), which also allows us to use the texture mapper
     // bitmap texture pool.
-    auto caps = adoptGRef(gst_video_info_to_caps(videoInfo));
-    auto sample = adoptGRef(gst_sample_new(buffer, caps.get(), nullptr, nullptr));
-    m_videoFrame.emplace(GstMappedFrame(sample, GST_MAP_READ));
+    m_videoFrame.emplace(GstMappedFrame(buffer, videoInfo, GST_MAP_READ));
     if (!*m_videoFrame) {
         // If mapping failed, clear the GstMappedFrame holder.
         m_videoFrame = std::nullopt;
@@ -189,10 +187,7 @@ std::unique_ptr<CoordinatedPlatformLayer
             colorSpace = DMABufBuffer::ColorSpace::Bt709;
         else if (gst_video_colorimetry_matches(&GST_VIDEO_INFO_COLORIMETRY(videoInfo), GST_VIDEO_COLORIMETRY_BT2020))
             colorSpace = DMABufBuffer::ColorSpace::Bt2020;
-        else if (gst_video_colorimetry_matches(&GST_VIDEO_INFO_COLORIMETRY(videoInfo), GST_VIDEO_COLORIMETRY_BT2100_PQ)) {
-            colorSpace = DMABufBuffer::ColorSpace::Bt2020;
-            transferFunction = DMABufBuffer::TransferFunction::Pq;
-        } else if (gst_video_colorimetry_matches(&GST_VIDEO_INFO_COLORIMETRY(videoInfo), GST_VIDEO_COLORIMETRY_SMPTE240M))
+        else if (gst_video_colorimetry_matches(&GST_VIDEO_INFO_COLORIMETRY(videoInfo), GST_VIDEO_COLORIMETRY_SMPTE240M))
             colorSpace = DMABufBuffer::ColorSpace::Smpte240M;
         dmabuf->setColorSpace(colorSpace);
         dmabuf->setTransferFunction(transferFunction);
@@ -209,9 +204,9 @@ std::unique_ptr<CoordinatedPlatformLayer
 #if USE(GSTREAMER_GL)
 std::unique_ptr<CoordinatedPlatformLayerBuffer> CoordinatedPlatformLayerBufferVideo::createBufferFromGLMemory(GstBuffer* buffer, const GstVideoInfo* videoInfo)
 {
+    m_videoFrame.emplace(GstMappedFrame(buffer, videoInfo, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL)));
     auto caps = adoptGRef(gst_video_info_to_caps(videoInfo));
     auto sample = adoptGRef(gst_sample_new(buffer, caps.get(), nullptr, nullptr));
-    m_videoFrame.emplace(GstMappedFrame(sample, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL)));
     if (!*m_videoFrame) {
         // If mapping failed, clear the GstMappedFrame holder.
         m_videoFrame = std::nullopt;
@@ -254,10 +249,7 @@ std::unique_ptr<CoordinatedPlatformLayer
             yuvToRgbColorSpace = CoordinatedPlatformLayerBufferYUV::YuvToRgbColorSpace::Bt709;
         else if (gst_video_colorimetry_matches(&GST_VIDEO_INFO_COLORIMETRY(m_videoFrame->info()), GST_VIDEO_COLORIMETRY_BT2020))
             yuvToRgbColorSpace = CoordinatedPlatformLayerBufferYUV::YuvToRgbColorSpace::Bt2020;
-        else if (gst_video_colorimetry_matches(&GST_VIDEO_INFO_COLORIMETRY(m_videoFrame->info()), GST_VIDEO_COLORIMETRY_BT2100_PQ)) {
-            yuvToRgbColorSpace = CoordinatedPlatformLayerBufferYUV::YuvToRgbColorSpace::Bt2020;
-            transferFunction = CoordinatedPlatformLayerBufferYUV::TransferFunction::Pq;
-        } else if (gst_video_colorimetry_matches(&GST_VIDEO_INFO_COLORIMETRY(m_videoFrame->info()), GST_VIDEO_COLORIMETRY_SMPTE240M))
+        else if (gst_video_colorimetry_matches(&GST_VIDEO_INFO_COLORIMETRY(m_videoFrame->info()), GST_VIDEO_COLORIMETRY_SMPTE240M))
             yuvToRgbColorSpace = CoordinatedPlatformLayerBufferYUV::YuvToRgbColorSpace::Smpte240M;
 
         return CoordinatedPlatformLayerBufferYUV::create(numberOfPlanes, WTF::move(planes), WTF::move(yuvPlane), WTF::move(yuvPlaneOffset), yuvToRgbColorSpace, transferFunction, m_size, m_flags, nullptr);
Index: webkitgtk-2.52.0/Source/WebCore/platform/gstreamer/GStreamerCodecUtilities.cpp
===================================================================
--- webkitgtk-2.52.0.orig/Source/WebCore/platform/gstreamer/GStreamerCodecUtilities.cpp
+++ webkitgtk-2.52.0/Source/WebCore/platform/gstreamer/GStreamerCodecUtilities.cpp
@@ -254,9 +254,10 @@ static std::pair<GRefPtr<GstCaps>, GRefP
         GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_BT709;
     else if (transfer == VPConfigurationTransferCharacteristics::BT_470_7_BG)
         GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_GAMMA28;
-    else if (transfer == VPConfigurationTransferCharacteristics::BT_601_7)
-        GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_BT601;
-    else if (transfer == VPConfigurationTransferCharacteristics::SMPTE_ST_240)
+    else if (transfer == VPConfigurationTransferCharacteristics::BT_601_7) {
+        GST_WARNING("VPConfigurationTransferCharacteristics::BT_601_7 not supported");
+        GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_UNKNOWN;
+    } else if (transfer == VPConfigurationTransferCharacteristics::SMPTE_ST_240)
         GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_SMPTE240M;
     else if (transfer == VPConfigurationTransferCharacteristics::Linear)
         GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_GAMMA10;
@@ -269,17 +270,21 @@ static std::pair<GRefPtr<GstCaps>, GRefP
     else if (transfer == VPConfigurationTransferCharacteristics::IEC_61966_2_1) {
         GST_WARNING("VPConfigurationTransferCharacteristics::IEC_61966_2_1 not supported");
         GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_UNKNOWN;
-    } else if (transfer == VPConfigurationTransferCharacteristics::BT_2020_10bit)
-        GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_BT2020_10;
-    else if (transfer == VPConfigurationTransferCharacteristics::BT_2020_12bit)
+    } else if (transfer == VPConfigurationTransferCharacteristics::BT_2020_10bit) {
+        GST_WARNING("VPConfigurationTransferCharacteristics::BT_2020_10bit not supported");
+        GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_UNKNOWN;
+    } else if (transfer == VPConfigurationTransferCharacteristics::BT_2020_12bit)
         GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_BT2020_12;
-    else if (transfer == VPConfigurationTransferCharacteristics::SMPTE_ST_2084)
-        GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_SMPTE2084;
-    else if (transfer == VPConfigurationTransferCharacteristics::SMPTE_ST_428_1) {
+    else if (transfer == VPConfigurationTransferCharacteristics::SMPTE_ST_2084) {
+        GST_WARNING("VPConfigurationTransferCharacteristics::SMPTE_ST_2084 not supported");
+        GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_UNKNOWN;
+    } else if (transfer == VPConfigurationTransferCharacteristics::SMPTE_ST_428_1) {
         GST_WARNING("VPConfigurationTransferCharacteristics::SMPTE_ST_428_1 not supported");
         GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_UNKNOWN;
-    } else if (transfer == VPConfigurationTransferCharacteristics::BT_2100_HLG)
-        GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_ARIB_STD_B67;
+    } else if (transfer == VPConfigurationTransferCharacteristics::BT_2100_HLG) {
+        GST_WARNING("VPConfigurationTransferCharacteristics::BT_2100_HLG not supported");
+        GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_UNKNOWN;
+    }
 
     auto matrix = parameters->matrixCoefficients;
     if (matrix == VPConfigurationMatrixCoefficients::Identity)
@@ -422,7 +427,8 @@ static std::pair<GRefPtr<GstCaps>, GRefP
         GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_GAMMA28;
         break;
     case AV1ConfigurationTransferCharacteristics::BT_601_7:
-        GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_BT601;
+        GST_WARNING("AV1ConfigurationTransferCharacteristics::BT_601_7 not supported");
+        GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_UNKNOWN;
         break;
     case AV1ConfigurationTransferCharacteristics::SMPTE_ST_240:
         GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_SMPTE240M;
@@ -446,20 +452,23 @@ static std::pair<GRefPtr<GstCaps>, GRefP
         GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_UNKNOWN;
         break;
     case AV1ConfigurationTransferCharacteristics::BT_2020_10bit:
-        GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_BT2020_10;
+        GST_WARNING("AV1ConfigurationTransferCharacteristics::BT_2020_10bit not supported");
+        GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_UNKNOWN;
         break;
     case AV1ConfigurationTransferCharacteristics::BT_2020_12bit:
         GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_BT2020_12;
         break;
     case AV1ConfigurationTransferCharacteristics::SMPTE_ST_2084:
-        GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_SMPTE2084;
+        GST_WARNING("AV1ConfigurationTransferCharacteristics::SMPTE_ST_2084 not supported");
+        GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_UNKNOWN;
         break;
     case AV1ConfigurationTransferCharacteristics::SMPTE_ST_428_1:
         GST_WARNING("AV1ConfigurationTransferCharacteristics::SMPTE_ST_428_1 not supported");
         GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_UNKNOWN;
         break;
     case AV1ConfigurationTransferCharacteristics::BT_2100_HLG:
-        GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_ARIB_STD_B67;
+        GST_WARNING("AV1ConfigurationTransferCharacteristics::BT_2100_HLG not supported");
+        GST_VIDEO_INFO_COLORIMETRY(&info).transfer = GST_VIDEO_TRANSFER_UNKNOWN;
         break;
     };
 
Index: webkitgtk-2.52.0/Source/WebCore/platform/gstreamer/VideoEncoderPrivateGStreamer.cpp
===================================================================
--- webkitgtk-2.52.0.orig/Source/WebCore/platform/gstreamer/VideoEncoderPrivateGStreamer.cpp
+++ webkitgtk-2.52.0/Source/WebCore/platform/gstreamer/VideoEncoderPrivateGStreamer.cpp
@@ -889,22 +889,6 @@ static void webkit_video_encoder_class_i
                 "temporal-scalability-rate-decimator", decimators.get(),
                 "temporal-scalability-target-bitrate", bitrates.get(), nullptr);
 
-            if (layerFlags) {
-                GValue layerSyncFlagsValue G_VALUE_INIT;
-
-                g_value_init(&boolValue, G_TYPE_BOOLEAN);
-                gst_value_array_init(&layerSyncFlagsValue, layerSyncFlags.size());
-                for (auto& flag : layerSyncFlags) {
-                    g_value_set_boolean(&boolValue, flag);
-                    gst_value_array_append_value(&layerSyncFlagsValue, &boolValue);
-                }
-
-                g_object_set_property(G_OBJECT(encoder), "temporal-scalability-layer-sync-flags", &layerSyncFlagsValue);
-                g_value_unset(&layerSyncFlagsValue);
-                g_value_unset(&boolValue);
-                gst_util_set_object_arg(G_OBJECT(encoder), "temporal-scalability-layer-flags", layerFlags.characters());
-            }
-
             ALLOW_DEPRECATED_DECLARATIONS_END;
         });
 
