diff options
Diffstat (limited to 'src')
22 files changed, 1136 insertions, 68 deletions
diff --git a/src/audio_core/audio_renderer.h b/src/audio_core/audio_renderer.h index be1b019f1..c0fae669e 100644 --- a/src/audio_core/audio_renderer.h +++ b/src/audio_core/audio_renderer.h | |||
| @@ -189,7 +189,7 @@ struct UpdateDataHeader { | |||
| 189 | UpdateDataHeader() {} | 189 | UpdateDataHeader() {} |
| 190 | 190 | ||
| 191 | explicit UpdateDataHeader(const AudioRendererParameter& config) { | 191 | explicit UpdateDataHeader(const AudioRendererParameter& config) { |
| 192 | revision = Common::MakeMagic('R', 'E', 'V', '4'); // 5.1.0 Revision | 192 | revision = Common::MakeMagic('R', 'E', 'V', '8'); // 9.2.0 Revision |
| 193 | behavior_size = 0xb0; | 193 | behavior_size = 0xb0; |
| 194 | memory_pools_size = (config.effect_count + (config.voice_count * 4)) * 0x10; | 194 | memory_pools_size = (config.effect_count + (config.voice_count * 4)) * 0x10; |
| 195 | voices_size = config.voice_count * 0x10; | 195 | voices_size = config.voice_count * 0x10; |
diff --git a/src/core/hle/service/am/am.cpp b/src/core/hle/service/am/am.cpp index d1bf13c89..557608e76 100644 --- a/src/core/hle/service/am/am.cpp +++ b/src/core/hle/service/am/am.cpp | |||
| @@ -52,6 +52,11 @@ enum class LaunchParameterKind : u32 { | |||
| 52 | AccountPreselectedUser = 2, | 52 | AccountPreselectedUser = 2, |
| 53 | }; | 53 | }; |
| 54 | 54 | ||
| 55 | enum class VrMode : u8 { | ||
| 56 | Disabled = 0, | ||
| 57 | Enabled = 1, | ||
| 58 | }; | ||
| 59 | |||
| 55 | constexpr u32 LAUNCH_PARAMETER_ACCOUNT_PRESELECTED_USER_MAGIC = 0xC79497CA; | 60 | constexpr u32 LAUNCH_PARAMETER_ACCOUNT_PRESELECTED_USER_MAGIC = 0xC79497CA; |
| 56 | 61 | ||
| 57 | struct LaunchParameterAccountPreselectedUser { | 62 | struct LaunchParameterAccountPreselectedUser { |
| @@ -605,11 +610,11 @@ ICommonStateGetter::ICommonStateGetter(Core::System& system, | |||
| 605 | {30, nullptr, "GetHomeButtonReaderLockAccessor"}, | 610 | {30, nullptr, "GetHomeButtonReaderLockAccessor"}, |
| 606 | {31, nullptr, "GetReaderLockAccessorEx"}, | 611 | {31, nullptr, "GetReaderLockAccessorEx"}, |
| 607 | {40, nullptr, "GetCradleFwVersion"}, | 612 | {40, nullptr, "GetCradleFwVersion"}, |
| 608 | {50, nullptr, "IsVrModeEnabled"}, | 613 | {50, &ICommonStateGetter::IsVrModeEnabled, "IsVrModeEnabled"}, |
| 609 | {51, nullptr, "SetVrModeEnabled"}, | 614 | {51, &ICommonStateGetter::SetVrModeEnabled, "SetVrModeEnabled"}, |
| 610 | {52, &ICommonStateGetter::SetLcdBacklighOffEnabled, "SetLcdBacklighOffEnabled"}, | 615 | {52, &ICommonStateGetter::SetLcdBacklighOffEnabled, "SetLcdBacklighOffEnabled"}, |
| 611 | {53, nullptr, "BeginVrModeEx"}, | 616 | {53, nullptr, "BeginVrModeEx"}, |
| 612 | {54, nullptr, "EndVrModeEx"}, | 617 | {54, &ICommonStateGetter::EndVrModeEx, "EndVrModeEx"}, |
| 613 | {55, nullptr, "IsInControllerFirmwareUpdateSection"}, | 618 | {55, nullptr, "IsInControllerFirmwareUpdateSection"}, |
| 614 | {60, &ICommonStateGetter::GetDefaultDisplayResolution, "GetDefaultDisplayResolution"}, | 619 | {60, &ICommonStateGetter::GetDefaultDisplayResolution, "GetDefaultDisplayResolution"}, |
| 615 | {61, &ICommonStateGetter::GetDefaultDisplayResolutionChangeEvent, "GetDefaultDisplayResolutionChangeEvent"}, | 620 | {61, &ICommonStateGetter::GetDefaultDisplayResolutionChangeEvent, "GetDefaultDisplayResolutionChangeEvent"}, |
| @@ -672,6 +677,30 @@ void ICommonStateGetter::GetCurrentFocusState(Kernel::HLERequestContext& ctx) { | |||
| 672 | rb.Push(static_cast<u8>(FocusState::InFocus)); | 677 | rb.Push(static_cast<u8>(FocusState::InFocus)); |
| 673 | } | 678 | } |
| 674 | 679 | ||
| 680 | void ICommonStateGetter::IsVrModeEnabled(Kernel::HLERequestContext& ctx) { | ||
| 681 | LOG_WARNING(Service_AM, "(STUBBED) called"); | ||
| 682 | |||
| 683 | IPC::ResponseBuilder rb{ctx, 3}; | ||
| 684 | rb.Push(RESULT_SUCCESS); | ||
| 685 | rb.PushEnum(VrMode::Disabled); | ||
| 686 | } | ||
| 687 | |||
| 688 | void ICommonStateGetter::SetVrModeEnabled(Kernel::HLERequestContext& ctx) { | ||
| 689 | IPC::RequestParser rp{ctx}; | ||
| 690 | const auto is_vr_mode_enabled = rp.Pop<bool>(); | ||
| 691 | |||
| 692 | LOG_WARNING(Service_AM, "(STUBBED) called. is_vr_mode_enabled={}", is_vr_mode_enabled); | ||
| 693 | |||
| 694 | IPC::ResponseBuilder rb{ctx, 2}; | ||
| 695 | if (!is_vr_mode_enabled) { | ||
| 696 | rb.Push(RESULT_SUCCESS); | ||
| 697 | } else { | ||
| 698 | // TODO: Find better error code for this | ||
| 699 | UNIMPLEMENTED_MSG("is_vr_mode_enabled={}", is_vr_mode_enabled); | ||
| 700 | rb.Push(RESULT_UNKNOWN); | ||
| 701 | } | ||
| 702 | } | ||
| 703 | |||
| 675 | void ICommonStateGetter::SetLcdBacklighOffEnabled(Kernel::HLERequestContext& ctx) { | 704 | void ICommonStateGetter::SetLcdBacklighOffEnabled(Kernel::HLERequestContext& ctx) { |
| 676 | IPC::RequestParser rp{ctx}; | 705 | IPC::RequestParser rp{ctx}; |
| 677 | const auto is_lcd_backlight_off_enabled = rp.Pop<bool>(); | 706 | const auto is_lcd_backlight_off_enabled = rp.Pop<bool>(); |
| @@ -683,6 +712,13 @@ void ICommonStateGetter::SetLcdBacklighOffEnabled(Kernel::HLERequestContext& ctx | |||
| 683 | rb.Push(RESULT_SUCCESS); | 712 | rb.Push(RESULT_SUCCESS); |
| 684 | } | 713 | } |
| 685 | 714 | ||
| 715 | void ICommonStateGetter::EndVrModeEx(Kernel::HLERequestContext& ctx) { | ||
| 716 | LOG_WARNING(Service_AM, "(STUBBED) called"); | ||
| 717 | |||
| 718 | IPC::ResponseBuilder rb{ctx, 2}; | ||
| 719 | rb.Push(RESULT_SUCCESS); | ||
| 720 | } | ||
| 721 | |||
| 686 | void ICommonStateGetter::GetDefaultDisplayResolutionChangeEvent(Kernel::HLERequestContext& ctx) { | 722 | void ICommonStateGetter::GetDefaultDisplayResolutionChangeEvent(Kernel::HLERequestContext& ctx) { |
| 687 | LOG_DEBUG(Service_AM, "called"); | 723 | LOG_DEBUG(Service_AM, "called"); |
| 688 | 724 | ||
diff --git a/src/core/hle/service/am/am.h b/src/core/hle/service/am/am.h index 0843de781..53cfce10f 100644 --- a/src/core/hle/service/am/am.h +++ b/src/core/hle/service/am/am.h | |||
| @@ -182,7 +182,10 @@ private: | |||
| 182 | void GetOperationMode(Kernel::HLERequestContext& ctx); | 182 | void GetOperationMode(Kernel::HLERequestContext& ctx); |
| 183 | void GetPerformanceMode(Kernel::HLERequestContext& ctx); | 183 | void GetPerformanceMode(Kernel::HLERequestContext& ctx); |
| 184 | void GetBootMode(Kernel::HLERequestContext& ctx); | 184 | void GetBootMode(Kernel::HLERequestContext& ctx); |
| 185 | void IsVrModeEnabled(Kernel::HLERequestContext& ctx); | ||
| 186 | void SetVrModeEnabled(Kernel::HLERequestContext& ctx); | ||
| 185 | void SetLcdBacklighOffEnabled(Kernel::HLERequestContext& ctx); | 187 | void SetLcdBacklighOffEnabled(Kernel::HLERequestContext& ctx); |
| 188 | void EndVrModeEx(Kernel::HLERequestContext& ctx); | ||
| 186 | void GetDefaultDisplayResolution(Kernel::HLERequestContext& ctx); | 189 | void GetDefaultDisplayResolution(Kernel::HLERequestContext& ctx); |
| 187 | void SetCpuBoostMode(Kernel::HLERequestContext& ctx); | 190 | void SetCpuBoostMode(Kernel::HLERequestContext& ctx); |
| 188 | 191 | ||
diff --git a/src/core/hle/service/hid/hid.cpp b/src/core/hle/service/hid/hid.cpp index e6b56a9f9..d6ed5f304 100644 --- a/src/core/hle/service/hid/hid.cpp +++ b/src/core/hle/service/hid/hid.cpp | |||
| @@ -235,7 +235,7 @@ Hid::Hid(Core::System& system) : ServiceFramework("hid"), system(system) { | |||
| 235 | {303, nullptr, "ActivateSevenSixAxisSensor"}, | 235 | {303, nullptr, "ActivateSevenSixAxisSensor"}, |
| 236 | {304, nullptr, "StartSevenSixAxisSensor"}, | 236 | {304, nullptr, "StartSevenSixAxisSensor"}, |
| 237 | {305, nullptr, "StopSevenSixAxisSensor"}, | 237 | {305, nullptr, "StopSevenSixAxisSensor"}, |
| 238 | {306, nullptr, "InitializeSevenSixAxisSensor"}, | 238 | {306, &Hid::InitializeSevenSixAxisSensor, "InitializeSevenSixAxisSensor"}, |
| 239 | {307, nullptr, "FinalizeSevenSixAxisSensor"}, | 239 | {307, nullptr, "FinalizeSevenSixAxisSensor"}, |
| 240 | {308, nullptr, "SetSevenSixAxisSensorFusionStrength"}, | 240 | {308, nullptr, "SetSevenSixAxisSensorFusionStrength"}, |
| 241 | {309, nullptr, "GetSevenSixAxisSensorFusionStrength"}, | 241 | {309, nullptr, "GetSevenSixAxisSensorFusionStrength"}, |
| @@ -853,6 +853,13 @@ void Hid::SetPalmaBoostMode(Kernel::HLERequestContext& ctx) { | |||
| 853 | rb.Push(RESULT_SUCCESS); | 853 | rb.Push(RESULT_SUCCESS); |
| 854 | } | 854 | } |
| 855 | 855 | ||
| 856 | void Hid::InitializeSevenSixAxisSensor(Kernel::HLERequestContext& ctx) { | ||
| 857 | LOG_WARNING(Service_HID, "(STUBBED) called"); | ||
| 858 | |||
| 859 | IPC::ResponseBuilder rb{ctx, 2}; | ||
| 860 | rb.Push(RESULT_SUCCESS); | ||
| 861 | } | ||
| 862 | |||
| 856 | class HidDbg final : public ServiceFramework<HidDbg> { | 863 | class HidDbg final : public ServiceFramework<HidDbg> { |
| 857 | public: | 864 | public: |
| 858 | explicit HidDbg() : ServiceFramework{"hid:dbg"} { | 865 | explicit HidDbg() : ServiceFramework{"hid:dbg"} { |
diff --git a/src/core/hle/service/hid/hid.h b/src/core/hle/service/hid/hid.h index ad20f147c..039c38b58 100644 --- a/src/core/hle/service/hid/hid.h +++ b/src/core/hle/service/hid/hid.h | |||
| @@ -128,6 +128,7 @@ private: | |||
| 128 | void StopSixAxisSensor(Kernel::HLERequestContext& ctx); | 128 | void StopSixAxisSensor(Kernel::HLERequestContext& ctx); |
| 129 | void SetIsPalmaAllConnectable(Kernel::HLERequestContext& ctx); | 129 | void SetIsPalmaAllConnectable(Kernel::HLERequestContext& ctx); |
| 130 | void SetPalmaBoostMode(Kernel::HLERequestContext& ctx); | 130 | void SetPalmaBoostMode(Kernel::HLERequestContext& ctx); |
| 131 | void InitializeSevenSixAxisSensor(Kernel::HLERequestContext& ctx); | ||
| 131 | 132 | ||
| 132 | std::shared_ptr<IAppletResource> applet_resource; | 133 | std::shared_ptr<IAppletResource> applet_resource; |
| 133 | Core::System& system; | 134 | Core::System& system; |
diff --git a/src/core/hle/service/time/interface.cpp b/src/core/hle/service/time/interface.cpp index 1660bbdb8..f509653a3 100644 --- a/src/core/hle/service/time/interface.cpp +++ b/src/core/hle/service/time/interface.cpp | |||
| @@ -30,7 +30,7 @@ Time::Time(std::shared_ptr<Module> module, Core::System& system, const char* nam | |||
| 30 | {400, &Time::GetClockSnapshot, "GetClockSnapshot"}, | 30 | {400, &Time::GetClockSnapshot, "GetClockSnapshot"}, |
| 31 | {401, &Time::GetClockSnapshotFromSystemClockContext, "GetClockSnapshotFromSystemClockContext"}, | 31 | {401, &Time::GetClockSnapshotFromSystemClockContext, "GetClockSnapshotFromSystemClockContext"}, |
| 32 | {500, nullptr, "CalculateStandardUserSystemClockDifferenceByUser"}, | 32 | {500, nullptr, "CalculateStandardUserSystemClockDifferenceByUser"}, |
| 33 | {501, nullptr, "CalculateSpanBetween"}, | 33 | {501, &Time::CalculateSpanBetween, "CalculateSpanBetween"}, |
| 34 | }; | 34 | }; |
| 35 | // clang-format on | 35 | // clang-format on |
| 36 | 36 | ||
diff --git a/src/core/hle/service/time/time.cpp b/src/core/hle/service/time/time.cpp index 749b7be70..ce859f18d 100644 --- a/src/core/hle/service/time/time.cpp +++ b/src/core/hle/service/time/time.cpp | |||
| @@ -308,6 +308,35 @@ void Module::Interface::GetClockSnapshotFromSystemClockContext(Kernel::HLEReques | |||
| 308 | ctx.WriteBuffer(&clock_snapshot, sizeof(Clock::ClockSnapshot)); | 308 | ctx.WriteBuffer(&clock_snapshot, sizeof(Clock::ClockSnapshot)); |
| 309 | } | 309 | } |
| 310 | 310 | ||
| 311 | void Module::Interface::CalculateSpanBetween(Kernel::HLERequestContext& ctx) { | ||
| 312 | LOG_DEBUG(Service_Time, "called"); | ||
| 313 | |||
| 314 | IPC::RequestParser rp{ctx}; | ||
| 315 | const auto snapshot_a = rp.PopRaw<Clock::ClockSnapshot>(); | ||
| 316 | const auto snapshot_b = rp.PopRaw<Clock::ClockSnapshot>(); | ||
| 317 | |||
| 318 | Clock::TimeSpanType time_span_type{}; | ||
| 319 | s64 span{}; | ||
| 320 | if (const ResultCode result{snapshot_a.steady_clock_time_point.GetSpanBetween( | ||
| 321 | snapshot_b.steady_clock_time_point, span)}; | ||
| 322 | result != RESULT_SUCCESS) { | ||
| 323 | if (snapshot_a.network_time && snapshot_b.network_time) { | ||
| 324 | time_span_type = | ||
| 325 | Clock::TimeSpanType::FromSeconds(snapshot_b.network_time - snapshot_a.network_time); | ||
| 326 | } else { | ||
| 327 | IPC::ResponseBuilder rb{ctx, 2}; | ||
| 328 | rb.Push(ERROR_TIME_NOT_FOUND); | ||
| 329 | return; | ||
| 330 | } | ||
| 331 | } else { | ||
| 332 | time_span_type = Clock::TimeSpanType::FromSeconds(span); | ||
| 333 | } | ||
| 334 | |||
| 335 | IPC::ResponseBuilder rb{ctx, (sizeof(s64) / 4) + 2}; | ||
| 336 | rb.Push(RESULT_SUCCESS); | ||
| 337 | rb.PushRaw(time_span_type.nanoseconds); | ||
| 338 | } | ||
| 339 | |||
| 311 | void Module::Interface::GetSharedMemoryNativeHandle(Kernel::HLERequestContext& ctx) { | 340 | void Module::Interface::GetSharedMemoryNativeHandle(Kernel::HLERequestContext& ctx) { |
| 312 | LOG_DEBUG(Service_Time, "called"); | 341 | LOG_DEBUG(Service_Time, "called"); |
| 313 | IPC::ResponseBuilder rb{ctx, 2, 1}; | 342 | IPC::ResponseBuilder rb{ctx, 2, 1}; |
diff --git a/src/core/hle/service/time/time.h b/src/core/hle/service/time/time.h index aadc2df60..351988468 100644 --- a/src/core/hle/service/time/time.h +++ b/src/core/hle/service/time/time.h | |||
| @@ -32,6 +32,7 @@ public: | |||
| 32 | void CalculateMonotonicSystemClockBaseTimePoint(Kernel::HLERequestContext& ctx); | 32 | void CalculateMonotonicSystemClockBaseTimePoint(Kernel::HLERequestContext& ctx); |
| 33 | void GetClockSnapshot(Kernel::HLERequestContext& ctx); | 33 | void GetClockSnapshot(Kernel::HLERequestContext& ctx); |
| 34 | void GetClockSnapshotFromSystemClockContext(Kernel::HLERequestContext& ctx); | 34 | void GetClockSnapshotFromSystemClockContext(Kernel::HLERequestContext& ctx); |
| 35 | void CalculateSpanBetween(Kernel::HLERequestContext& ctx); | ||
| 35 | void GetSharedMemoryNativeHandle(Kernel::HLERequestContext& ctx); | 36 | void GetSharedMemoryNativeHandle(Kernel::HLERequestContext& ctx); |
| 36 | 37 | ||
| 37 | private: | 38 | private: |
diff --git a/src/video_core/CMakeLists.txt b/src/video_core/CMakeLists.txt index 91df062d7..effe76a63 100644 --- a/src/video_core/CMakeLists.txt +++ b/src/video_core/CMakeLists.txt | |||
| @@ -210,6 +210,8 @@ if (ENABLE_VULKAN) | |||
| 210 | renderer_vulkan/vk_texture_cache.h | 210 | renderer_vulkan/vk_texture_cache.h |
| 211 | renderer_vulkan/vk_update_descriptor.cpp | 211 | renderer_vulkan/vk_update_descriptor.cpp |
| 212 | renderer_vulkan/vk_update_descriptor.h | 212 | renderer_vulkan/vk_update_descriptor.h |
| 213 | renderer_vulkan/wrapper.cpp | ||
| 214 | renderer_vulkan/wrapper.h | ||
| 213 | ) | 215 | ) |
| 214 | 216 | ||
| 215 | target_include_directories(video_core PRIVATE sirit ../../externals/Vulkan-Headers/include) | 217 | target_include_directories(video_core PRIVATE sirit ../../externals/Vulkan-Headers/include) |
diff --git a/src/video_core/engines/const_buffer_engine_interface.h b/src/video_core/engines/const_buffer_engine_interface.h index 724ee0fd6..ebe139504 100644 --- a/src/video_core/engines/const_buffer_engine_interface.h +++ b/src/video_core/engines/const_buffer_engine_interface.h | |||
| @@ -18,10 +18,14 @@ struct SamplerDescriptor { | |||
| 18 | union { | 18 | union { |
| 19 | u32 raw = 0; | 19 | u32 raw = 0; |
| 20 | BitField<0, 2, Tegra::Shader::TextureType> texture_type; | 20 | BitField<0, 2, Tegra::Shader::TextureType> texture_type; |
| 21 | BitField<2, 3, Tegra::Texture::ComponentType> component_type; | 21 | BitField<2, 3, Tegra::Texture::ComponentType> r_type; |
| 22 | BitField<5, 1, u32> is_array; | 22 | BitField<5, 1, u32> is_array; |
| 23 | BitField<6, 1, u32> is_buffer; | 23 | BitField<6, 1, u32> is_buffer; |
| 24 | BitField<7, 1, u32> is_shadow; | 24 | BitField<7, 1, u32> is_shadow; |
| 25 | BitField<8, 3, Tegra::Texture::ComponentType> g_type; | ||
| 26 | BitField<11, 3, Tegra::Texture::ComponentType> b_type; | ||
| 27 | BitField<14, 3, Tegra::Texture::ComponentType> a_type; | ||
| 28 | BitField<17, 7, Tegra::Texture::TextureFormat> format; | ||
| 25 | }; | 29 | }; |
| 26 | 30 | ||
| 27 | bool operator==(const SamplerDescriptor& rhs) const noexcept { | 31 | bool operator==(const SamplerDescriptor& rhs) const noexcept { |
| @@ -36,9 +40,11 @@ struct SamplerDescriptor { | |||
| 36 | using Tegra::Shader::TextureType; | 40 | using Tegra::Shader::TextureType; |
| 37 | SamplerDescriptor result; | 41 | SamplerDescriptor result; |
| 38 | 42 | ||
| 39 | // This is going to be used to determine the shading language type. | 43 | result.format.Assign(tic.format.Value()); |
| 40 | // Because of that we don't care about all component types on color textures. | 44 | result.r_type.Assign(tic.r_type.Value()); |
| 41 | result.component_type.Assign(tic.r_type.Value()); | 45 | result.g_type.Assign(tic.g_type.Value()); |
| 46 | result.b_type.Assign(tic.b_type.Value()); | ||
| 47 | result.a_type.Assign(tic.a_type.Value()); | ||
| 42 | 48 | ||
| 43 | switch (tic.texture_type.Value()) { | 49 | switch (tic.texture_type.Value()) { |
| 44 | case Tegra::Texture::TextureType::Texture1D: | 50 | case Tegra::Texture::TextureType::Texture1D: |
diff --git a/src/video_core/engines/shader_bytecode.h b/src/video_core/engines/shader_bytecode.h index 49dc5abe0..930b605af 100644 --- a/src/video_core/engines/shader_bytecode.h +++ b/src/video_core/engines/shader_bytecode.h | |||
| @@ -231,18 +231,6 @@ enum class AtomicOp : u64 { | |||
| 231 | Or = 6, | 231 | Or = 6, |
| 232 | Xor = 7, | 232 | Xor = 7, |
| 233 | Exch = 8, | 233 | Exch = 8, |
| 234 | }; | ||
| 235 | |||
| 236 | enum class GlobalAtomicOp : u64 { | ||
| 237 | Add = 0, | ||
| 238 | Min = 1, | ||
| 239 | Max = 2, | ||
| 240 | Inc = 3, | ||
| 241 | Dec = 4, | ||
| 242 | And = 5, | ||
| 243 | Or = 6, | ||
| 244 | Xor = 7, | ||
| 245 | Exch = 8, | ||
| 246 | SafeAdd = 10, | 234 | SafeAdd = 10, |
| 247 | }; | 235 | }; |
| 248 | 236 | ||
| @@ -1001,7 +989,7 @@ union Instruction { | |||
| 1001 | } stg; | 989 | } stg; |
| 1002 | 990 | ||
| 1003 | union { | 991 | union { |
| 1004 | BitField<52, 4, GlobalAtomicOp> operation; | 992 | BitField<52, 4, AtomicOp> operation; |
| 1005 | BitField<49, 3, GlobalAtomicType> type; | 993 | BitField<49, 3, GlobalAtomicType> type; |
| 1006 | BitField<28, 20, s64> offset; | 994 | BitField<28, 20, s64> offset; |
| 1007 | } atom; | 995 | } atom; |
diff --git a/src/video_core/renderer_opengl/gl_rasterizer.cpp b/src/video_core/renderer_opengl/gl_rasterizer.cpp index 826eee7df..31add708f 100644 --- a/src/video_core/renderer_opengl/gl_rasterizer.cpp +++ b/src/video_core/renderer_opengl/gl_rasterizer.cpp | |||
| @@ -444,6 +444,7 @@ void RasterizerOpenGL::Clear() { | |||
| 444 | } | 444 | } |
| 445 | 445 | ||
| 446 | SyncRasterizeEnable(); | 446 | SyncRasterizeEnable(); |
| 447 | SyncStencilTestState(); | ||
| 447 | 448 | ||
| 448 | if (regs.clear_flags.scissor) { | 449 | if (regs.clear_flags.scissor) { |
| 449 | SyncScissorTest(); | 450 | SyncScissorTest(); |
| @@ -1052,12 +1053,8 @@ void RasterizerOpenGL::SyncStencilTestState() { | |||
| 1052 | flags[Dirty::StencilTest] = false; | 1053 | flags[Dirty::StencilTest] = false; |
| 1053 | 1054 | ||
| 1054 | const auto& regs = gpu.regs; | 1055 | const auto& regs = gpu.regs; |
| 1055 | if (!regs.stencil_enable) { | 1056 | oglEnable(GL_STENCIL_TEST, regs.stencil_enable); |
| 1056 | glDisable(GL_STENCIL_TEST); | ||
| 1057 | return; | ||
| 1058 | } | ||
| 1059 | 1057 | ||
| 1060 | glEnable(GL_STENCIL_TEST); | ||
| 1061 | glStencilFuncSeparate(GL_FRONT, MaxwellToGL::ComparisonOp(regs.stencil_front_func_func), | 1058 | glStencilFuncSeparate(GL_FRONT, MaxwellToGL::ComparisonOp(regs.stencil_front_func_func), |
| 1062 | regs.stencil_front_func_ref, regs.stencil_front_func_mask); | 1059 | regs.stencil_front_func_ref, regs.stencil_front_func_mask); |
| 1063 | glStencilOpSeparate(GL_FRONT, MaxwellToGL::StencilOp(regs.stencil_front_op_fail), | 1060 | glStencilOpSeparate(GL_FRONT, MaxwellToGL::StencilOp(regs.stencil_front_op_fail), |
diff --git a/src/video_core/renderer_opengl/gl_shader_decompiler.cpp b/src/video_core/renderer_opengl/gl_shader_decompiler.cpp index 8aa4a7ac9..c7d24cf14 100644 --- a/src/video_core/renderer_opengl/gl_shader_decompiler.cpp +++ b/src/video_core/renderer_opengl/gl_shader_decompiler.cpp | |||
| @@ -2114,6 +2114,10 @@ private: | |||
| 2114 | 2114 | ||
| 2115 | template <const std::string_view& opname, Type type> | 2115 | template <const std::string_view& opname, Type type> |
| 2116 | Expression Atomic(Operation operation) { | 2116 | Expression Atomic(Operation operation) { |
| 2117 | if ((opname == Func::Min || opname == Func::Max) && type == Type::Int) { | ||
| 2118 | UNIMPLEMENTED_MSG("Unimplemented Min & Max for atomic operations"); | ||
| 2119 | return {}; | ||
| 2120 | } | ||
| 2117 | return {fmt::format("atomic{}({}, {})", opname, Visit(operation[0]).GetCode(), | 2121 | return {fmt::format("atomic{}({}, {})", opname, Visit(operation[0]).GetCode(), |
| 2118 | Visit(operation[1]).As(type)), | 2122 | Visit(operation[1]).As(type)), |
| 2119 | type}; | 2123 | type}; |
| @@ -2307,6 +2311,8 @@ private: | |||
| 2307 | ~Func() = delete; | 2311 | ~Func() = delete; |
| 2308 | 2312 | ||
| 2309 | static constexpr std::string_view Add = "Add"; | 2313 | static constexpr std::string_view Add = "Add"; |
| 2314 | static constexpr std::string_view Min = "Min"; | ||
| 2315 | static constexpr std::string_view Max = "Max"; | ||
| 2310 | static constexpr std::string_view And = "And"; | 2316 | static constexpr std::string_view And = "And"; |
| 2311 | static constexpr std::string_view Or = "Or"; | 2317 | static constexpr std::string_view Or = "Or"; |
| 2312 | static constexpr std::string_view Xor = "Xor"; | 2318 | static constexpr std::string_view Xor = "Xor"; |
| @@ -2457,7 +2463,21 @@ private: | |||
| 2457 | &GLSLDecompiler::AtomicImage<Func::Xor>, | 2463 | &GLSLDecompiler::AtomicImage<Func::Xor>, |
| 2458 | &GLSLDecompiler::AtomicImage<Func::Exchange>, | 2464 | &GLSLDecompiler::AtomicImage<Func::Exchange>, |
| 2459 | 2465 | ||
| 2466 | &GLSLDecompiler::Atomic<Func::Exchange, Type::Uint>, | ||
| 2460 | &GLSLDecompiler::Atomic<Func::Add, Type::Uint>, | 2467 | &GLSLDecompiler::Atomic<Func::Add, Type::Uint>, |
| 2468 | &GLSLDecompiler::Atomic<Func::Min, Type::Uint>, | ||
| 2469 | &GLSLDecompiler::Atomic<Func::Max, Type::Uint>, | ||
| 2470 | &GLSLDecompiler::Atomic<Func::And, Type::Uint>, | ||
| 2471 | &GLSLDecompiler::Atomic<Func::Or, Type::Uint>, | ||
| 2472 | &GLSLDecompiler::Atomic<Func::Xor, Type::Uint>, | ||
| 2473 | |||
| 2474 | &GLSLDecompiler::Atomic<Func::Exchange, Type::Int>, | ||
| 2475 | &GLSLDecompiler::Atomic<Func::Add, Type::Int>, | ||
| 2476 | &GLSLDecompiler::Atomic<Func::Min, Type::Int>, | ||
| 2477 | &GLSLDecompiler::Atomic<Func::Max, Type::Int>, | ||
| 2478 | &GLSLDecompiler::Atomic<Func::And, Type::Int>, | ||
| 2479 | &GLSLDecompiler::Atomic<Func::Or, Type::Int>, | ||
| 2480 | &GLSLDecompiler::Atomic<Func::Xor, Type::Int>, | ||
| 2461 | 2481 | ||
| 2462 | &GLSLDecompiler::Branch, | 2482 | &GLSLDecompiler::Branch, |
| 2463 | &GLSLDecompiler::BranchIndirect, | 2483 | &GLSLDecompiler::BranchIndirect, |
diff --git a/src/video_core/renderer_vulkan/maxwell_to_vk.cpp b/src/video_core/renderer_vulkan/maxwell_to_vk.cpp index f93447610..7480cb7c3 100644 --- a/src/video_core/renderer_vulkan/maxwell_to_vk.cpp +++ b/src/video_core/renderer_vulkan/maxwell_to_vk.cpp | |||
| @@ -401,6 +401,26 @@ vk::Format VertexFormat(Maxwell::VertexAttribute::Type type, Maxwell::VertexAttr | |||
| 401 | } | 401 | } |
| 402 | break; | 402 | break; |
| 403 | case Maxwell::VertexAttribute::Type::SignedScaled: | 403 | case Maxwell::VertexAttribute::Type::SignedScaled: |
| 404 | switch (size) { | ||
| 405 | case Maxwell::VertexAttribute::Size::Size_8: | ||
| 406 | return vk::Format::eR8Sscaled; | ||
| 407 | case Maxwell::VertexAttribute::Size::Size_8_8: | ||
| 408 | return vk::Format::eR8G8Sscaled; | ||
| 409 | case Maxwell::VertexAttribute::Size::Size_8_8_8: | ||
| 410 | return vk::Format::eR8G8B8Sscaled; | ||
| 411 | case Maxwell::VertexAttribute::Size::Size_8_8_8_8: | ||
| 412 | return vk::Format::eR8G8B8A8Sscaled; | ||
| 413 | case Maxwell::VertexAttribute::Size::Size_16: | ||
| 414 | return vk::Format::eR16Sscaled; | ||
| 415 | case Maxwell::VertexAttribute::Size::Size_16_16: | ||
| 416 | return vk::Format::eR16G16Sscaled; | ||
| 417 | case Maxwell::VertexAttribute::Size::Size_16_16_16: | ||
| 418 | return vk::Format::eR16G16B16Sscaled; | ||
| 419 | case Maxwell::VertexAttribute::Size::Size_16_16_16_16: | ||
| 420 | return vk::Format::eR16G16B16A16Sscaled; | ||
| 421 | default: | ||
| 422 | break; | ||
| 423 | } | ||
| 404 | break; | 424 | break; |
| 405 | case Maxwell::VertexAttribute::Type::Float: | 425 | case Maxwell::VertexAttribute::Type::Float: |
| 406 | switch (size) { | 426 | switch (size) { |
diff --git a/src/video_core/renderer_vulkan/vk_shader_decompiler.cpp b/src/video_core/renderer_vulkan/vk_shader_decompiler.cpp index 51ecb5567..d67f08cf9 100644 --- a/src/video_core/renderer_vulkan/vk_shader_decompiler.cpp +++ b/src/video_core/renderer_vulkan/vk_shader_decompiler.cpp | |||
| @@ -1941,7 +1941,11 @@ private: | |||
| 1941 | return {}; | 1941 | return {}; |
| 1942 | } | 1942 | } |
| 1943 | 1943 | ||
| 1944 | Expression AtomicAdd(Operation operation) { | 1944 | template <Id (Module::*func)(Id, Id, Id, Id, Id), Type result_type, |
| 1945 | Type value_type = result_type> | ||
| 1946 | Expression Atomic(Operation operation) { | ||
| 1947 | const Id type_def = GetTypeDefinition(result_type); | ||
| 1948 | |||
| 1945 | Id pointer; | 1949 | Id pointer; |
| 1946 | if (const auto smem = std::get_if<SmemNode>(&*operation[0])) { | 1950 | if (const auto smem = std::get_if<SmemNode>(&*operation[0])) { |
| 1947 | pointer = GetSharedMemoryPointer(*smem); | 1951 | pointer = GetSharedMemoryPointer(*smem); |
| @@ -1949,14 +1953,15 @@ private: | |||
| 1949 | pointer = GetGlobalMemoryPointer(*gmem); | 1953 | pointer = GetGlobalMemoryPointer(*gmem); |
| 1950 | } else { | 1954 | } else { |
| 1951 | UNREACHABLE(); | 1955 | UNREACHABLE(); |
| 1952 | return {Constant(t_uint, 0), Type::Uint}; | 1956 | return {Constant(type_def, 0), result_type}; |
| 1953 | } | 1957 | } |
| 1954 | 1958 | ||
| 1959 | const Id value = As(Visit(operation[1]), value_type); | ||
| 1960 | |||
| 1955 | const Id scope = Constant(t_uint, static_cast<u32>(spv::Scope::Device)); | 1961 | const Id scope = Constant(t_uint, static_cast<u32>(spv::Scope::Device)); |
| 1956 | const Id semantics = Constant(t_uint, 0U); | 1962 | const Id semantics = Constant(type_def, 0); |
| 1957 | 1963 | ||
| 1958 | const Id value = AsUint(Visit(operation[1])); | 1964 | return {(this->*func)(type_def, pointer, scope, semantics, value), result_type}; |
| 1959 | return {OpAtomicIAdd(t_uint, pointer, scope, semantics, value), Type::Uint}; | ||
| 1960 | } | 1965 | } |
| 1961 | 1966 | ||
| 1962 | Expression Branch(Operation operation) { | 1967 | Expression Branch(Operation operation) { |
| @@ -2545,7 +2550,21 @@ private: | |||
| 2545 | &SPIRVDecompiler::AtomicImageXor, | 2550 | &SPIRVDecompiler::AtomicImageXor, |
| 2546 | &SPIRVDecompiler::AtomicImageExchange, | 2551 | &SPIRVDecompiler::AtomicImageExchange, |
| 2547 | 2552 | ||
| 2548 | &SPIRVDecompiler::AtomicAdd, | 2553 | &SPIRVDecompiler::Atomic<&Module::OpAtomicExchange, Type::Uint>, |
| 2554 | &SPIRVDecompiler::Atomic<&Module::OpAtomicIAdd, Type::Uint>, | ||
| 2555 | &SPIRVDecompiler::Atomic<&Module::OpAtomicUMin, Type::Uint>, | ||
| 2556 | &SPIRVDecompiler::Atomic<&Module::OpAtomicUMax, Type::Uint>, | ||
| 2557 | &SPIRVDecompiler::Atomic<&Module::OpAtomicAnd, Type::Uint>, | ||
| 2558 | &SPIRVDecompiler::Atomic<&Module::OpAtomicOr, Type::Uint>, | ||
| 2559 | &SPIRVDecompiler::Atomic<&Module::OpAtomicXor, Type::Uint>, | ||
| 2560 | |||
| 2561 | &SPIRVDecompiler::Atomic<&Module::OpAtomicExchange, Type::Int>, | ||
| 2562 | &SPIRVDecompiler::Atomic<&Module::OpAtomicIAdd, Type::Int>, | ||
| 2563 | &SPIRVDecompiler::Atomic<&Module::OpAtomicSMin, Type::Int>, | ||
| 2564 | &SPIRVDecompiler::Atomic<&Module::OpAtomicSMax, Type::Int>, | ||
| 2565 | &SPIRVDecompiler::Atomic<&Module::OpAtomicAnd, Type::Int>, | ||
| 2566 | &SPIRVDecompiler::Atomic<&Module::OpAtomicOr, Type::Int>, | ||
| 2567 | &SPIRVDecompiler::Atomic<&Module::OpAtomicXor, Type::Int>, | ||
| 2549 | 2568 | ||
| 2550 | &SPIRVDecompiler::Branch, | 2569 | &SPIRVDecompiler::Branch, |
| 2551 | &SPIRVDecompiler::BranchIndirect, | 2570 | &SPIRVDecompiler::BranchIndirect, |
diff --git a/src/video_core/renderer_vulkan/wrapper.cpp b/src/video_core/renderer_vulkan/wrapper.cpp new file mode 100644 index 000000000..c412b7f20 --- /dev/null +++ b/src/video_core/renderer_vulkan/wrapper.cpp | |||
| @@ -0,0 +1,342 @@ | |||
| 1 | // Copyright 2020 yuzu Emulator Project | ||
| 2 | // Licensed under GPLv2 or any later version | ||
| 3 | // Refer to the license.txt file included. | ||
| 4 | |||
| 5 | #include <exception> | ||
| 6 | #include <memory> | ||
| 7 | #include <optional> | ||
| 8 | #include <utility> | ||
| 9 | #include <vector> | ||
| 10 | |||
| 11 | #include "common/common_types.h" | ||
| 12 | |||
| 13 | #include "video_core/renderer_vulkan/wrapper.h" | ||
| 14 | |||
| 15 | namespace Vulkan::vk { | ||
| 16 | |||
| 17 | namespace { | ||
| 18 | |||
| 19 | template <typename T> | ||
| 20 | bool Proc(T& result, const InstanceDispatch& dld, const char* proc_name, | ||
| 21 | VkInstance instance = nullptr) noexcept { | ||
| 22 | result = reinterpret_cast<T>(dld.vkGetInstanceProcAddr(instance, proc_name)); | ||
| 23 | return result != nullptr; | ||
| 24 | } | ||
| 25 | |||
| 26 | template <typename T> | ||
| 27 | void Proc(T& result, const DeviceDispatch& dld, const char* proc_name, VkDevice device) noexcept { | ||
| 28 | result = reinterpret_cast<T>(dld.vkGetDeviceProcAddr(device, proc_name)); | ||
| 29 | } | ||
| 30 | |||
| 31 | void Load(VkDevice device, DeviceDispatch& dld) noexcept { | ||
| 32 | #define X(name) Proc(dld.name, dld, #name, device) | ||
| 33 | X(vkAcquireNextImageKHR); | ||
| 34 | X(vkAllocateCommandBuffers); | ||
| 35 | X(vkAllocateDescriptorSets); | ||
| 36 | X(vkAllocateMemory); | ||
| 37 | X(vkBeginCommandBuffer); | ||
| 38 | X(vkBindBufferMemory); | ||
| 39 | X(vkBindImageMemory); | ||
| 40 | X(vkCmdBeginQuery); | ||
| 41 | X(vkCmdBeginRenderPass); | ||
| 42 | X(vkCmdBeginTransformFeedbackEXT); | ||
| 43 | X(vkCmdBindDescriptorSets); | ||
| 44 | X(vkCmdBindIndexBuffer); | ||
| 45 | X(vkCmdBindPipeline); | ||
| 46 | X(vkCmdBindTransformFeedbackBuffersEXT); | ||
| 47 | X(vkCmdBindVertexBuffers); | ||
| 48 | X(vkCmdBlitImage); | ||
| 49 | X(vkCmdClearAttachments); | ||
| 50 | X(vkCmdCopyBuffer); | ||
| 51 | X(vkCmdCopyBufferToImage); | ||
| 52 | X(vkCmdCopyImage); | ||
| 53 | X(vkCmdCopyImageToBuffer); | ||
| 54 | X(vkCmdDispatch); | ||
| 55 | X(vkCmdDraw); | ||
| 56 | X(vkCmdDrawIndexed); | ||
| 57 | X(vkCmdEndQuery); | ||
| 58 | X(vkCmdEndRenderPass); | ||
| 59 | X(vkCmdEndTransformFeedbackEXT); | ||
| 60 | X(vkCmdFillBuffer); | ||
| 61 | X(vkCmdPipelineBarrier); | ||
| 62 | X(vkCmdPushConstants); | ||
| 63 | X(vkCmdSetBlendConstants); | ||
| 64 | X(vkCmdSetCheckpointNV); | ||
| 65 | X(vkCmdSetDepthBias); | ||
| 66 | X(vkCmdSetDepthBounds); | ||
| 67 | X(vkCmdSetScissor); | ||
| 68 | X(vkCmdSetStencilCompareMask); | ||
| 69 | X(vkCmdSetStencilReference); | ||
| 70 | X(vkCmdSetStencilWriteMask); | ||
| 71 | X(vkCmdSetViewport); | ||
| 72 | X(vkCreateBuffer); | ||
| 73 | X(vkCreateBufferView); | ||
| 74 | X(vkCreateCommandPool); | ||
| 75 | X(vkCreateComputePipelines); | ||
| 76 | X(vkCreateDescriptorPool); | ||
| 77 | X(vkCreateDescriptorSetLayout); | ||
| 78 | X(vkCreateDescriptorUpdateTemplateKHR); | ||
| 79 | X(vkCreateFence); | ||
| 80 | X(vkCreateFramebuffer); | ||
| 81 | X(vkCreateGraphicsPipelines); | ||
| 82 | X(vkCreateImage); | ||
| 83 | X(vkCreateImageView); | ||
| 84 | X(vkCreatePipelineLayout); | ||
| 85 | X(vkCreateQueryPool); | ||
| 86 | X(vkCreateRenderPass); | ||
| 87 | X(vkCreateSampler); | ||
| 88 | X(vkCreateSemaphore); | ||
| 89 | X(vkCreateShaderModule); | ||
| 90 | X(vkCreateSwapchainKHR); | ||
| 91 | X(vkDestroyBuffer); | ||
| 92 | X(vkDestroyBufferView); | ||
| 93 | X(vkDestroyCommandPool); | ||
| 94 | X(vkDestroyDescriptorPool); | ||
| 95 | X(vkDestroyDescriptorSetLayout); | ||
| 96 | X(vkDestroyDescriptorUpdateTemplateKHR); | ||
| 97 | X(vkDestroyFence); | ||
| 98 | X(vkDestroyFramebuffer); | ||
| 99 | X(vkDestroyImage); | ||
| 100 | X(vkDestroyImageView); | ||
| 101 | X(vkDestroyPipeline); | ||
| 102 | X(vkDestroyPipelineLayout); | ||
| 103 | X(vkDestroyQueryPool); | ||
| 104 | X(vkDestroyRenderPass); | ||
| 105 | X(vkDestroySampler); | ||
| 106 | X(vkDestroySemaphore); | ||
| 107 | X(vkDestroyShaderModule); | ||
| 108 | X(vkDestroySwapchainKHR); | ||
| 109 | X(vkDeviceWaitIdle); | ||
| 110 | X(vkEndCommandBuffer); | ||
| 111 | X(vkFreeCommandBuffers); | ||
| 112 | X(vkFreeDescriptorSets); | ||
| 113 | X(vkFreeMemory); | ||
| 114 | X(vkGetBufferMemoryRequirements); | ||
| 115 | X(vkGetDeviceQueue); | ||
| 116 | X(vkGetFenceStatus); | ||
| 117 | X(vkGetImageMemoryRequirements); | ||
| 118 | X(vkGetQueryPoolResults); | ||
| 119 | X(vkGetQueueCheckpointDataNV); | ||
| 120 | X(vkMapMemory); | ||
| 121 | X(vkQueueSubmit); | ||
| 122 | X(vkResetFences); | ||
| 123 | X(vkResetQueryPoolEXT); | ||
| 124 | X(vkUnmapMemory); | ||
| 125 | X(vkUpdateDescriptorSetWithTemplateKHR); | ||
| 126 | X(vkUpdateDescriptorSets); | ||
| 127 | X(vkWaitForFences); | ||
| 128 | #undef X | ||
| 129 | } | ||
| 130 | |||
| 131 | } // Anonymous namespace | ||
| 132 | |||
| 133 | bool Load(InstanceDispatch& dld) noexcept { | ||
| 134 | #define X(name) Proc(dld.name, dld, #name) | ||
| 135 | return X(vkCreateInstance) && X(vkEnumerateInstanceExtensionProperties); | ||
| 136 | #undef X | ||
| 137 | } | ||
| 138 | |||
| 139 | bool Load(VkInstance instance, InstanceDispatch& dld) noexcept { | ||
| 140 | #define X(name) Proc(dld.name, dld, #name, instance) | ||
| 141 | // These functions may fail to load depending on the enabled extensions. | ||
| 142 | // Don't return a failure on these. | ||
| 143 | X(vkCreateDebugUtilsMessengerEXT); | ||
| 144 | X(vkDestroyDebugUtilsMessengerEXT); | ||
| 145 | X(vkDestroySurfaceKHR); | ||
| 146 | X(vkGetPhysicalDeviceFeatures2KHR); | ||
| 147 | X(vkGetPhysicalDeviceProperties2KHR); | ||
| 148 | X(vkGetPhysicalDeviceSurfaceCapabilitiesKHR); | ||
| 149 | X(vkGetPhysicalDeviceSurfaceFormatsKHR); | ||
| 150 | X(vkGetPhysicalDeviceSurfacePresentModesKHR); | ||
| 151 | X(vkGetPhysicalDeviceSurfaceSupportKHR); | ||
| 152 | X(vkGetSwapchainImagesKHR); | ||
| 153 | X(vkQueuePresentKHR); | ||
| 154 | |||
| 155 | return X(vkCreateDevice) && X(vkDestroyDevice) && X(vkDestroyDevice) && | ||
| 156 | X(vkEnumerateDeviceExtensionProperties) && X(vkEnumeratePhysicalDevices) && | ||
| 157 | X(vkGetDeviceProcAddr) && X(vkGetPhysicalDeviceFormatProperties) && | ||
| 158 | X(vkGetPhysicalDeviceMemoryProperties) && X(vkGetPhysicalDeviceProperties) && | ||
| 159 | X(vkGetPhysicalDeviceQueueFamilyProperties); | ||
| 160 | #undef X | ||
| 161 | } | ||
| 162 | |||
| 163 | const char* Exception::what() const noexcept { | ||
| 164 | return ToString(result); | ||
| 165 | } | ||
| 166 | |||
| 167 | const char* ToString(VkResult result) noexcept { | ||
| 168 | switch (result) { | ||
| 169 | case VkResult::VK_SUCCESS: | ||
| 170 | return "VK_SUCCESS"; | ||
| 171 | case VkResult::VK_NOT_READY: | ||
| 172 | return "VK_NOT_READY"; | ||
| 173 | case VkResult::VK_TIMEOUT: | ||
| 174 | return "VK_TIMEOUT"; | ||
| 175 | case VkResult::VK_EVENT_SET: | ||
| 176 | return "VK_EVENT_SET"; | ||
| 177 | case VkResult::VK_EVENT_RESET: | ||
| 178 | return "VK_EVENT_RESET"; | ||
| 179 | case VkResult::VK_INCOMPLETE: | ||
| 180 | return "VK_INCOMPLETE"; | ||
| 181 | case VkResult::VK_ERROR_OUT_OF_HOST_MEMORY: | ||
| 182 | return "VK_ERROR_OUT_OF_HOST_MEMORY"; | ||
| 183 | case VkResult::VK_ERROR_OUT_OF_DEVICE_MEMORY: | ||
| 184 | return "VK_ERROR_OUT_OF_DEVICE_MEMORY"; | ||
| 185 | case VkResult::VK_ERROR_INITIALIZATION_FAILED: | ||
| 186 | return "VK_ERROR_INITIALIZATION_FAILED"; | ||
| 187 | case VkResult::VK_ERROR_DEVICE_LOST: | ||
| 188 | return "VK_ERROR_DEVICE_LOST"; | ||
| 189 | case VkResult::VK_ERROR_MEMORY_MAP_FAILED: | ||
| 190 | return "VK_ERROR_MEMORY_MAP_FAILED"; | ||
| 191 | case VkResult::VK_ERROR_LAYER_NOT_PRESENT: | ||
| 192 | return "VK_ERROR_LAYER_NOT_PRESENT"; | ||
| 193 | case VkResult::VK_ERROR_EXTENSION_NOT_PRESENT: | ||
| 194 | return "VK_ERROR_EXTENSION_NOT_PRESENT"; | ||
| 195 | case VkResult::VK_ERROR_FEATURE_NOT_PRESENT: | ||
| 196 | return "VK_ERROR_FEATURE_NOT_PRESENT"; | ||
| 197 | case VkResult::VK_ERROR_INCOMPATIBLE_DRIVER: | ||
| 198 | return "VK_ERROR_INCOMPATIBLE_DRIVER"; | ||
| 199 | case VkResult::VK_ERROR_TOO_MANY_OBJECTS: | ||
| 200 | return "VK_ERROR_TOO_MANY_OBJECTS"; | ||
| 201 | case VkResult::VK_ERROR_FORMAT_NOT_SUPPORTED: | ||
| 202 | return "VK_ERROR_FORMAT_NOT_SUPPORTED"; | ||
| 203 | case VkResult::VK_ERROR_FRAGMENTED_POOL: | ||
| 204 | return "VK_ERROR_FRAGMENTED_POOL"; | ||
| 205 | case VkResult::VK_ERROR_OUT_OF_POOL_MEMORY: | ||
| 206 | return "VK_ERROR_OUT_OF_POOL_MEMORY"; | ||
| 207 | case VkResult::VK_ERROR_INVALID_EXTERNAL_HANDLE: | ||
| 208 | return "VK_ERROR_INVALID_EXTERNAL_HANDLE"; | ||
| 209 | case VkResult::VK_ERROR_SURFACE_LOST_KHR: | ||
| 210 | return "VK_ERROR_SURFACE_LOST_KHR"; | ||
| 211 | case VkResult::VK_ERROR_NATIVE_WINDOW_IN_USE_KHR: | ||
| 212 | return "VK_ERROR_NATIVE_WINDOW_IN_USE_KHR"; | ||
| 213 | case VkResult::VK_SUBOPTIMAL_KHR: | ||
| 214 | return "VK_SUBOPTIMAL_KHR"; | ||
| 215 | case VkResult::VK_ERROR_OUT_OF_DATE_KHR: | ||
| 216 | return "VK_ERROR_OUT_OF_DATE_KHR"; | ||
| 217 | case VkResult::VK_ERROR_INCOMPATIBLE_DISPLAY_KHR: | ||
| 218 | return "VK_ERROR_INCOMPATIBLE_DISPLAY_KHR"; | ||
| 219 | case VkResult::VK_ERROR_VALIDATION_FAILED_EXT: | ||
| 220 | return "VK_ERROR_VALIDATION_FAILED_EXT"; | ||
| 221 | case VkResult::VK_ERROR_INVALID_SHADER_NV: | ||
| 222 | return "VK_ERROR_INVALID_SHADER_NV"; | ||
| 223 | case VkResult::VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT: | ||
| 224 | return "VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT"; | ||
| 225 | case VkResult::VK_ERROR_FRAGMENTATION_EXT: | ||
| 226 | return "VK_ERROR_FRAGMENTATION_EXT"; | ||
| 227 | case VkResult::VK_ERROR_NOT_PERMITTED_EXT: | ||
| 228 | return "VK_ERROR_NOT_PERMITTED_EXT"; | ||
| 229 | case VkResult::VK_ERROR_INVALID_DEVICE_ADDRESS_EXT: | ||
| 230 | return "VK_ERROR_INVALID_DEVICE_ADDRESS_EXT"; | ||
| 231 | case VkResult::VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT: | ||
| 232 | return "VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT"; | ||
| 233 | } | ||
| 234 | return "Unknown"; | ||
| 235 | } | ||
| 236 | |||
| 237 | void Destroy(VkInstance instance, const InstanceDispatch& dld) noexcept { | ||
| 238 | dld.vkDestroyInstance(instance, nullptr); | ||
| 239 | } | ||
| 240 | |||
| 241 | void Destroy(VkDevice device, const InstanceDispatch& dld) noexcept { | ||
| 242 | dld.vkDestroyDevice(device, nullptr); | ||
| 243 | } | ||
| 244 | |||
| 245 | void Destroy(VkDevice device, VkBuffer handle, const DeviceDispatch& dld) noexcept { | ||
| 246 | dld.vkDestroyBuffer(device, handle, nullptr); | ||
| 247 | } | ||
| 248 | |||
| 249 | void Destroy(VkDevice device, VkBufferView handle, const DeviceDispatch& dld) noexcept { | ||
| 250 | dld.vkDestroyBufferView(device, handle, nullptr); | ||
| 251 | } | ||
| 252 | |||
| 253 | void Destroy(VkDevice device, VkCommandPool handle, const DeviceDispatch& dld) noexcept { | ||
| 254 | dld.vkDestroyCommandPool(device, handle, nullptr); | ||
| 255 | } | ||
| 256 | |||
| 257 | void Destroy(VkDevice device, VkDescriptorPool handle, const DeviceDispatch& dld) noexcept { | ||
| 258 | dld.vkDestroyDescriptorPool(device, handle, nullptr); | ||
| 259 | } | ||
| 260 | |||
| 261 | void Destroy(VkDevice device, VkDescriptorSetLayout handle, const DeviceDispatch& dld) noexcept { | ||
| 262 | dld.vkDestroyDescriptorSetLayout(device, handle, nullptr); | ||
| 263 | } | ||
| 264 | |||
| 265 | void Destroy(VkDevice device, VkDescriptorUpdateTemplateKHR handle, | ||
| 266 | const DeviceDispatch& dld) noexcept { | ||
| 267 | dld.vkDestroyDescriptorUpdateTemplateKHR(device, handle, nullptr); | ||
| 268 | } | ||
| 269 | |||
| 270 | void Destroy(VkDevice device, VkDeviceMemory handle, const DeviceDispatch& dld) noexcept { | ||
| 271 | dld.vkFreeMemory(device, handle, nullptr); | ||
| 272 | } | ||
| 273 | |||
| 274 | void Destroy(VkDevice device, VkFence handle, const DeviceDispatch& dld) noexcept { | ||
| 275 | dld.vkDestroyFence(device, handle, nullptr); | ||
| 276 | } | ||
| 277 | |||
| 278 | void Destroy(VkDevice device, VkFramebuffer handle, const DeviceDispatch& dld) noexcept { | ||
| 279 | dld.vkDestroyFramebuffer(device, handle, nullptr); | ||
| 280 | } | ||
| 281 | |||
| 282 | void Destroy(VkDevice device, VkImage handle, const DeviceDispatch& dld) noexcept { | ||
| 283 | dld.vkDestroyImage(device, handle, nullptr); | ||
| 284 | } | ||
| 285 | |||
| 286 | void Destroy(VkDevice device, VkImageView handle, const DeviceDispatch& dld) noexcept { | ||
| 287 | dld.vkDestroyImageView(device, handle, nullptr); | ||
| 288 | } | ||
| 289 | |||
| 290 | void Destroy(VkDevice device, VkPipeline handle, const DeviceDispatch& dld) noexcept { | ||
| 291 | dld.vkDestroyPipeline(device, handle, nullptr); | ||
| 292 | } | ||
| 293 | |||
| 294 | void Destroy(VkDevice device, VkPipelineLayout handle, const DeviceDispatch& dld) noexcept { | ||
| 295 | dld.vkDestroyPipelineLayout(device, handle, nullptr); | ||
| 296 | } | ||
| 297 | |||
| 298 | void Destroy(VkDevice device, VkQueryPool handle, const DeviceDispatch& dld) noexcept { | ||
| 299 | dld.vkDestroyQueryPool(device, handle, nullptr); | ||
| 300 | } | ||
| 301 | |||
| 302 | void Destroy(VkDevice device, VkRenderPass handle, const DeviceDispatch& dld) noexcept { | ||
| 303 | dld.vkDestroyRenderPass(device, handle, nullptr); | ||
| 304 | } | ||
| 305 | |||
| 306 | void Destroy(VkDevice device, VkSampler handle, const DeviceDispatch& dld) noexcept { | ||
| 307 | dld.vkDestroySampler(device, handle, nullptr); | ||
| 308 | } | ||
| 309 | |||
| 310 | void Destroy(VkDevice device, VkSwapchainKHR handle, const DeviceDispatch& dld) noexcept { | ||
| 311 | dld.vkDestroySwapchainKHR(device, handle, nullptr); | ||
| 312 | } | ||
| 313 | |||
| 314 | void Destroy(VkDevice device, VkSemaphore handle, const DeviceDispatch& dld) noexcept { | ||
| 315 | dld.vkDestroySemaphore(device, handle, nullptr); | ||
| 316 | } | ||
| 317 | |||
| 318 | void Destroy(VkDevice device, VkShaderModule handle, const DeviceDispatch& dld) noexcept { | ||
| 319 | dld.vkDestroyShaderModule(device, handle, nullptr); | ||
| 320 | } | ||
| 321 | |||
| 322 | void Destroy(VkInstance instance, VkDebugUtilsMessengerEXT handle, | ||
| 323 | const InstanceDispatch& dld) noexcept { | ||
| 324 | dld.vkDestroyDebugUtilsMessengerEXT(instance, handle, nullptr); | ||
| 325 | } | ||
| 326 | |||
| 327 | void Destroy(VkInstance instance, VkSurfaceKHR handle, const InstanceDispatch& dld) noexcept { | ||
| 328 | dld.vkDestroySurfaceKHR(instance, handle, nullptr); | ||
| 329 | } | ||
| 330 | |||
| 331 | VkResult Free(VkDevice device, VkDescriptorPool handle, Span<VkDescriptorSet> sets, | ||
| 332 | const DeviceDispatch& dld) noexcept { | ||
| 333 | return dld.vkFreeDescriptorSets(device, handle, sets.size(), sets.data()); | ||
| 334 | } | ||
| 335 | |||
| 336 | VkResult Free(VkDevice device, VkCommandPool handle, Span<VkCommandBuffer> buffers, | ||
| 337 | const DeviceDispatch& dld) noexcept { | ||
| 338 | dld.vkFreeCommandBuffers(device, handle, buffers.size(), buffers.data()); | ||
| 339 | return VK_SUCCESS; | ||
| 340 | } | ||
| 341 | |||
| 342 | } // namespace Vulkan::vk | ||
diff --git a/src/video_core/renderer_vulkan/wrapper.h b/src/video_core/renderer_vulkan/wrapper.h new file mode 100644 index 000000000..686c2b9a1 --- /dev/null +++ b/src/video_core/renderer_vulkan/wrapper.h | |||
| @@ -0,0 +1,545 @@ | |||
| 1 | // Copyright 2020 yuzu Emulator Project | ||
| 2 | // Licensed under GPLv2 or any later version | ||
| 3 | // Refer to the license.txt file included. | ||
| 4 | |||
| 5 | #pragma once | ||
| 6 | |||
| 7 | #include <exception> | ||
| 8 | #include <iterator> | ||
| 9 | #include <limits> | ||
| 10 | #include <memory> | ||
| 11 | #include <optional> | ||
| 12 | #include <type_traits> | ||
| 13 | #include <utility> | ||
| 14 | #include <vector> | ||
| 15 | |||
| 16 | #define VK_NO_PROTOTYPES | ||
| 17 | #include <vulkan/vulkan.h> | ||
| 18 | |||
| 19 | #include "common/common_types.h" | ||
| 20 | |||
| 21 | namespace Vulkan::vk { | ||
| 22 | |||
| 23 | /** | ||
| 24 | * Span for Vulkan arrays. | ||
| 25 | * Based on std::span but optimized for array access instead of iterators. | ||
| 26 | * Size returns uint32_t instead of size_t to ease interaction with Vulkan functions. | ||
| 27 | */ | ||
| 28 | template <typename T> | ||
| 29 | class Span { | ||
| 30 | public: | ||
| 31 | using value_type = T; | ||
| 32 | using size_type = u32; | ||
| 33 | using difference_type = std::ptrdiff_t; | ||
| 34 | using reference = const T&; | ||
| 35 | using const_reference = const T&; | ||
| 36 | using pointer = const T*; | ||
| 37 | using const_pointer = const T*; | ||
| 38 | using iterator = const T*; | ||
| 39 | using const_iterator = const T*; | ||
| 40 | |||
| 41 | /// Construct an empty span. | ||
| 42 | constexpr Span() noexcept = default; | ||
| 43 | |||
| 44 | /// Construct a span from a single element. | ||
| 45 | constexpr Span(const T& value) noexcept : ptr{&value}, num{1} {} | ||
| 46 | |||
| 47 | /// Construct a span from a range. | ||
| 48 | template <typename Range> | ||
| 49 | // requires std::data(const Range&) | ||
| 50 | // requires std::size(const Range&) | ||
| 51 | constexpr Span(const Range& range) : ptr{std::data(range)}, num{std::size(range)} {} | ||
| 52 | |||
| 53 | /// Construct a span from a pointer and a size. | ||
| 54 | /// This is inteded for subranges. | ||
| 55 | constexpr Span(const T* ptr, std::size_t num) noexcept : ptr{ptr}, num{num} {} | ||
| 56 | |||
| 57 | /// Returns the data pointer by the span. | ||
| 58 | constexpr const T* data() const noexcept { | ||
| 59 | return ptr; | ||
| 60 | } | ||
| 61 | |||
| 62 | /// Returns the number of elements in the span. | ||
| 63 | /// @note Returns a 32 bits integer because most Vulkan functions expect this type. | ||
| 64 | constexpr u32 size() const noexcept { | ||
| 65 | return static_cast<u32>(num); | ||
| 66 | } | ||
| 67 | |||
| 68 | /// Returns true when the span is empty. | ||
| 69 | constexpr bool empty() const noexcept { | ||
| 70 | return num == 0; | ||
| 71 | } | ||
| 72 | |||
| 73 | /// Returns a reference to the element in the passed index. | ||
| 74 | /// @pre: index < size() | ||
| 75 | constexpr const T& operator[](std::size_t index) const noexcept { | ||
| 76 | return ptr[index]; | ||
| 77 | } | ||
| 78 | |||
| 79 | /// Returns an iterator to the beginning of the span. | ||
| 80 | constexpr const T* begin() const noexcept { | ||
| 81 | return ptr; | ||
| 82 | } | ||
| 83 | |||
| 84 | /// Returns an iterator to the end of the span. | ||
| 85 | constexpr const T* end() const noexcept { | ||
| 86 | return ptr + num; | ||
| 87 | } | ||
| 88 | |||
| 89 | /// Returns an iterator to the beginning of the span. | ||
| 90 | constexpr const T* cbegin() const noexcept { | ||
| 91 | return ptr; | ||
| 92 | } | ||
| 93 | |||
| 94 | /// Returns an iterator to the end of the span. | ||
| 95 | constexpr const T* cend() const noexcept { | ||
| 96 | return ptr + num; | ||
| 97 | } | ||
| 98 | |||
| 99 | private: | ||
| 100 | const T* ptr = nullptr; | ||
| 101 | std::size_t num = 0; | ||
| 102 | }; | ||
| 103 | |||
| 104 | /// Vulkan exception generated from a VkResult. | ||
| 105 | class Exception final : public std::exception { | ||
| 106 | public: | ||
| 107 | /// Construct the exception with a result. | ||
| 108 | /// @pre result != VK_SUCCESS | ||
| 109 | explicit Exception(VkResult result_) : result{result_} {} | ||
| 110 | virtual ~Exception() = default; | ||
| 111 | |||
| 112 | const char* what() const noexcept override; | ||
| 113 | |||
| 114 | private: | ||
| 115 | VkResult result; | ||
| 116 | }; | ||
| 117 | |||
| 118 | /// Converts a VkResult enum into a rodata string | ||
| 119 | const char* ToString(VkResult) noexcept; | ||
| 120 | |||
| 121 | /// Throws a Vulkan exception if result is not success. | ||
| 122 | inline void Check(VkResult result) { | ||
| 123 | if (result != VK_SUCCESS) { | ||
| 124 | throw Exception(result); | ||
| 125 | } | ||
| 126 | } | ||
| 127 | |||
| 128 | /// Throws a Vulkan exception if result is an error. | ||
| 129 | /// @return result | ||
| 130 | inline VkResult Filter(VkResult result) { | ||
| 131 | if (result < 0) { | ||
| 132 | throw Exception(result); | ||
| 133 | } | ||
| 134 | return result; | ||
| 135 | } | ||
| 136 | |||
| 137 | /// Table holding Vulkan instance function pointers. | ||
| 138 | struct InstanceDispatch { | ||
| 139 | PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr; | ||
| 140 | |||
| 141 | PFN_vkCreateInstance vkCreateInstance; | ||
| 142 | PFN_vkDestroyInstance vkDestroyInstance; | ||
| 143 | PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties; | ||
| 144 | |||
| 145 | PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT; | ||
| 146 | PFN_vkCreateDevice vkCreateDevice; | ||
| 147 | PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT; | ||
| 148 | PFN_vkDestroyDevice vkDestroyDevice; | ||
| 149 | PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR; | ||
| 150 | PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties; | ||
| 151 | PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices; | ||
| 152 | PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr; | ||
| 153 | PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR; | ||
| 154 | PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties; | ||
| 155 | PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties; | ||
| 156 | PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties; | ||
| 157 | PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR; | ||
| 158 | PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties; | ||
| 159 | PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR; | ||
| 160 | PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR; | ||
| 161 | PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR; | ||
| 162 | PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR; | ||
| 163 | PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR; | ||
| 164 | PFN_vkQueuePresentKHR vkQueuePresentKHR; | ||
| 165 | }; | ||
| 166 | |||
| 167 | /// Table holding Vulkan device function pointers. | ||
| 168 | struct DeviceDispatch : public InstanceDispatch { | ||
| 169 | PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR; | ||
| 170 | PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers; | ||
| 171 | PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets; | ||
| 172 | PFN_vkAllocateMemory vkAllocateMemory; | ||
| 173 | PFN_vkBeginCommandBuffer vkBeginCommandBuffer; | ||
| 174 | PFN_vkBindBufferMemory vkBindBufferMemory; | ||
| 175 | PFN_vkBindImageMemory vkBindImageMemory; | ||
| 176 | PFN_vkCmdBeginQuery vkCmdBeginQuery; | ||
| 177 | PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass; | ||
| 178 | PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT; | ||
| 179 | PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets; | ||
| 180 | PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer; | ||
| 181 | PFN_vkCmdBindPipeline vkCmdBindPipeline; | ||
| 182 | PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT; | ||
| 183 | PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers; | ||
| 184 | PFN_vkCmdBlitImage vkCmdBlitImage; | ||
| 185 | PFN_vkCmdClearAttachments vkCmdClearAttachments; | ||
| 186 | PFN_vkCmdCopyBuffer vkCmdCopyBuffer; | ||
| 187 | PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage; | ||
| 188 | PFN_vkCmdCopyImage vkCmdCopyImage; | ||
| 189 | PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer; | ||
| 190 | PFN_vkCmdDispatch vkCmdDispatch; | ||
| 191 | PFN_vkCmdDraw vkCmdDraw; | ||
| 192 | PFN_vkCmdDrawIndexed vkCmdDrawIndexed; | ||
| 193 | PFN_vkCmdEndQuery vkCmdEndQuery; | ||
| 194 | PFN_vkCmdEndRenderPass vkCmdEndRenderPass; | ||
| 195 | PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT; | ||
| 196 | PFN_vkCmdFillBuffer vkCmdFillBuffer; | ||
| 197 | PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier; | ||
| 198 | PFN_vkCmdPushConstants vkCmdPushConstants; | ||
| 199 | PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants; | ||
| 200 | PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV; | ||
| 201 | PFN_vkCmdSetDepthBias vkCmdSetDepthBias; | ||
| 202 | PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds; | ||
| 203 | PFN_vkCmdSetScissor vkCmdSetScissor; | ||
| 204 | PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask; | ||
| 205 | PFN_vkCmdSetStencilReference vkCmdSetStencilReference; | ||
| 206 | PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask; | ||
| 207 | PFN_vkCmdSetViewport vkCmdSetViewport; | ||
| 208 | PFN_vkCreateBuffer vkCreateBuffer; | ||
| 209 | PFN_vkCreateBufferView vkCreateBufferView; | ||
| 210 | PFN_vkCreateCommandPool vkCreateCommandPool; | ||
| 211 | PFN_vkCreateComputePipelines vkCreateComputePipelines; | ||
| 212 | PFN_vkCreateDescriptorPool vkCreateDescriptorPool; | ||
| 213 | PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout; | ||
| 214 | PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR; | ||
| 215 | PFN_vkCreateFence vkCreateFence; | ||
| 216 | PFN_vkCreateFramebuffer vkCreateFramebuffer; | ||
| 217 | PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines; | ||
| 218 | PFN_vkCreateImage vkCreateImage; | ||
| 219 | PFN_vkCreateImageView vkCreateImageView; | ||
| 220 | PFN_vkCreatePipelineLayout vkCreatePipelineLayout; | ||
| 221 | PFN_vkCreateQueryPool vkCreateQueryPool; | ||
| 222 | PFN_vkCreateRenderPass vkCreateRenderPass; | ||
| 223 | PFN_vkCreateSampler vkCreateSampler; | ||
| 224 | PFN_vkCreateSemaphore vkCreateSemaphore; | ||
| 225 | PFN_vkCreateShaderModule vkCreateShaderModule; | ||
| 226 | PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR; | ||
| 227 | PFN_vkDestroyBuffer vkDestroyBuffer; | ||
| 228 | PFN_vkDestroyBufferView vkDestroyBufferView; | ||
| 229 | PFN_vkDestroyCommandPool vkDestroyCommandPool; | ||
| 230 | PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool; | ||
| 231 | PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout; | ||
| 232 | PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR; | ||
| 233 | PFN_vkDestroyFence vkDestroyFence; | ||
| 234 | PFN_vkDestroyFramebuffer vkDestroyFramebuffer; | ||
| 235 | PFN_vkDestroyImage vkDestroyImage; | ||
| 236 | PFN_vkDestroyImageView vkDestroyImageView; | ||
| 237 | PFN_vkDestroyPipeline vkDestroyPipeline; | ||
| 238 | PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout; | ||
| 239 | PFN_vkDestroyQueryPool vkDestroyQueryPool; | ||
| 240 | PFN_vkDestroyRenderPass vkDestroyRenderPass; | ||
| 241 | PFN_vkDestroySampler vkDestroySampler; | ||
| 242 | PFN_vkDestroySemaphore vkDestroySemaphore; | ||
| 243 | PFN_vkDestroyShaderModule vkDestroyShaderModule; | ||
| 244 | PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR; | ||
| 245 | PFN_vkDeviceWaitIdle vkDeviceWaitIdle; | ||
| 246 | PFN_vkEndCommandBuffer vkEndCommandBuffer; | ||
| 247 | PFN_vkFreeCommandBuffers vkFreeCommandBuffers; | ||
| 248 | PFN_vkFreeDescriptorSets vkFreeDescriptorSets; | ||
| 249 | PFN_vkFreeMemory vkFreeMemory; | ||
| 250 | PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements; | ||
| 251 | PFN_vkGetDeviceQueue vkGetDeviceQueue; | ||
| 252 | PFN_vkGetFenceStatus vkGetFenceStatus; | ||
| 253 | PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements; | ||
| 254 | PFN_vkGetQueryPoolResults vkGetQueryPoolResults; | ||
| 255 | PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV; | ||
| 256 | PFN_vkMapMemory vkMapMemory; | ||
| 257 | PFN_vkQueueSubmit vkQueueSubmit; | ||
| 258 | PFN_vkResetFences vkResetFences; | ||
| 259 | PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT; | ||
| 260 | PFN_vkUnmapMemory vkUnmapMemory; | ||
| 261 | PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR; | ||
| 262 | PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets; | ||
| 263 | PFN_vkWaitForFences vkWaitForFences; | ||
| 264 | }; | ||
| 265 | |||
| 266 | /// Loads instance agnostic function pointers. | ||
| 267 | /// @return True on success, false on error. | ||
| 268 | bool Load(InstanceDispatch&) noexcept; | ||
| 269 | |||
| 270 | /// Loads instance function pointers. | ||
| 271 | /// @return True on success, false on error. | ||
| 272 | bool Load(VkInstance, InstanceDispatch&) noexcept; | ||
| 273 | |||
| 274 | void Destroy(VkInstance, const InstanceDispatch&) noexcept; | ||
| 275 | void Destroy(VkDevice, const InstanceDispatch&) noexcept; | ||
| 276 | |||
| 277 | void Destroy(VkDevice, VkBuffer, const DeviceDispatch&) noexcept; | ||
| 278 | void Destroy(VkDevice, VkBufferView, const DeviceDispatch&) noexcept; | ||
| 279 | void Destroy(VkDevice, VkCommandPool, const DeviceDispatch&) noexcept; | ||
| 280 | void Destroy(VkDevice, VkDescriptorPool, const DeviceDispatch&) noexcept; | ||
| 281 | void Destroy(VkDevice, VkDescriptorSetLayout, const DeviceDispatch&) noexcept; | ||
| 282 | void Destroy(VkDevice, VkDescriptorUpdateTemplateKHR, const DeviceDispatch&) noexcept; | ||
| 283 | void Destroy(VkDevice, VkDeviceMemory, const DeviceDispatch&) noexcept; | ||
| 284 | void Destroy(VkDevice, VkFence, const DeviceDispatch&) noexcept; | ||
| 285 | void Destroy(VkDevice, VkFramebuffer, const DeviceDispatch&) noexcept; | ||
| 286 | void Destroy(VkDevice, VkImage, const DeviceDispatch&) noexcept; | ||
| 287 | void Destroy(VkDevice, VkImageView, const DeviceDispatch&) noexcept; | ||
| 288 | void Destroy(VkDevice, VkPipeline, const DeviceDispatch&) noexcept; | ||
| 289 | void Destroy(VkDevice, VkPipelineLayout, const DeviceDispatch&) noexcept; | ||
| 290 | void Destroy(VkDevice, VkQueryPool, const DeviceDispatch&) noexcept; | ||
| 291 | void Destroy(VkDevice, VkRenderPass, const DeviceDispatch&) noexcept; | ||
| 292 | void Destroy(VkDevice, VkSampler, const DeviceDispatch&) noexcept; | ||
| 293 | void Destroy(VkDevice, VkSwapchainKHR, const DeviceDispatch&) noexcept; | ||
| 294 | void Destroy(VkDevice, VkSemaphore, const DeviceDispatch&) noexcept; | ||
| 295 | void Destroy(VkDevice, VkShaderModule, const DeviceDispatch&) noexcept; | ||
| 296 | void Destroy(VkInstance, VkDebugUtilsMessengerEXT, const InstanceDispatch&) noexcept; | ||
| 297 | void Destroy(VkInstance, VkSurfaceKHR, const InstanceDispatch&) noexcept; | ||
| 298 | |||
| 299 | VkResult Free(VkDevice, VkDescriptorPool, Span<VkDescriptorSet>, const DeviceDispatch&) noexcept; | ||
| 300 | VkResult Free(VkDevice, VkCommandPool, Span<VkCommandBuffer>, const DeviceDispatch&) noexcept; | ||
| 301 | |||
| 302 | template <typename Type, typename OwnerType, typename Dispatch> | ||
| 303 | class Handle; | ||
| 304 | |||
| 305 | /// Handle with an owning type. | ||
| 306 | /// Analogue to std::unique_ptr. | ||
| 307 | template <typename Type, typename OwnerType, typename Dispatch> | ||
| 308 | class Handle { | ||
| 309 | public: | ||
| 310 | /// Construct a handle and hold it's ownership. | ||
| 311 | explicit Handle(Type handle_, OwnerType owner_, const Dispatch& dld_) noexcept | ||
| 312 | : handle{handle_}, owner{owner_}, dld{&dld_} {} | ||
| 313 | |||
| 314 | /// Construct an empty handle. | ||
| 315 | Handle() = default; | ||
| 316 | |||
| 317 | /// Copying Vulkan objects is not supported and will never be. | ||
| 318 | Handle(const Handle&) = delete; | ||
| 319 | Handle& operator=(const Handle&) = delete; | ||
| 320 | |||
| 321 | /// Construct a handle transfering the ownership from another handle. | ||
| 322 | Handle(Handle&& rhs) noexcept | ||
| 323 | : handle{std::exchange(rhs.handle, nullptr)}, owner{rhs.owner}, dld{rhs.dld} {} | ||
| 324 | |||
| 325 | /// Assign the current handle transfering the ownership from another handle. | ||
| 326 | /// Destroys any previously held object. | ||
| 327 | Handle& operator=(Handle&& rhs) noexcept { | ||
| 328 | Release(); | ||
| 329 | handle = std::exchange(rhs.handle, nullptr); | ||
| 330 | owner = rhs.owner; | ||
| 331 | dld = rhs.dld; | ||
| 332 | return *this; | ||
| 333 | } | ||
| 334 | |||
| 335 | /// Destroys the current handle if it existed. | ||
| 336 | ~Handle() noexcept { | ||
| 337 | Release(); | ||
| 338 | } | ||
| 339 | |||
| 340 | /// Destroys any held object. | ||
| 341 | void reset() noexcept { | ||
| 342 | Release(); | ||
| 343 | handle = nullptr; | ||
| 344 | } | ||
| 345 | |||
| 346 | /// Returns the address of the held object. | ||
| 347 | /// Intended for Vulkan structures that expect a pointer to an array. | ||
| 348 | const Type* address() const noexcept { | ||
| 349 | return std::addressof(handle); | ||
| 350 | } | ||
| 351 | |||
| 352 | /// Returns the held Vulkan handle. | ||
| 353 | Type operator*() const noexcept { | ||
| 354 | return handle; | ||
| 355 | } | ||
| 356 | |||
| 357 | /// Returns true when there's a held object. | ||
| 358 | explicit operator bool() const noexcept { | ||
| 359 | return handle != nullptr; | ||
| 360 | } | ||
| 361 | |||
| 362 | protected: | ||
| 363 | Type handle = nullptr; | ||
| 364 | OwnerType owner = nullptr; | ||
| 365 | const Dispatch* dld = nullptr; | ||
| 366 | |||
| 367 | private: | ||
| 368 | /// Destroys the held object if it exists. | ||
| 369 | void Release() noexcept { | ||
| 370 | if (handle) { | ||
| 371 | Destroy(owner, handle, *dld); | ||
| 372 | } | ||
| 373 | } | ||
| 374 | }; | ||
| 375 | |||
| 376 | /// Dummy type used to specify a handle has no owner. | ||
| 377 | struct NoOwner {}; | ||
| 378 | |||
| 379 | /// Handle without an owning type. | ||
| 380 | /// Analogue to std::unique_ptr | ||
| 381 | template <typename Type, typename Dispatch> | ||
| 382 | class Handle<Type, NoOwner, Dispatch> { | ||
| 383 | public: | ||
| 384 | /// Construct a handle and hold it's ownership. | ||
| 385 | explicit Handle(Type handle_, const Dispatch& dld_) noexcept : handle{handle_}, dld{&dld_} {} | ||
| 386 | |||
| 387 | /// Construct an empty handle. | ||
| 388 | Handle() noexcept = default; | ||
| 389 | |||
| 390 | /// Copying Vulkan objects is not supported and will never be. | ||
| 391 | Handle(const Handle&) = delete; | ||
| 392 | Handle& operator=(const Handle&) = delete; | ||
| 393 | |||
| 394 | /// Construct a handle transfering ownership from another handle. | ||
| 395 | Handle(Handle&& rhs) noexcept : handle{std::exchange(rhs.handle, nullptr)}, dld{rhs.dld} {} | ||
| 396 | |||
| 397 | /// Assign the current handle transfering the ownership from another handle. | ||
| 398 | /// Destroys any previously held object. | ||
| 399 | Handle& operator=(Handle&& rhs) noexcept { | ||
| 400 | Release(); | ||
| 401 | handle = std::exchange(rhs.handle, nullptr); | ||
| 402 | dld = rhs.dld; | ||
| 403 | return *this; | ||
| 404 | } | ||
| 405 | |||
| 406 | /// Destroys the current handle if it existed. | ||
| 407 | ~Handle() noexcept { | ||
| 408 | Release(); | ||
| 409 | } | ||
| 410 | |||
| 411 | /// Destroys any held object. | ||
| 412 | void reset() noexcept { | ||
| 413 | Release(); | ||
| 414 | handle = nullptr; | ||
| 415 | } | ||
| 416 | |||
| 417 | /// Returns the address of the held object. | ||
| 418 | /// Intended for Vulkan structures that expect a pointer to an array. | ||
| 419 | const Type* address() const noexcept { | ||
| 420 | return std::addressof(handle); | ||
| 421 | } | ||
| 422 | |||
| 423 | /// Returns the held Vulkan handle. | ||
| 424 | Type operator*() const noexcept { | ||
| 425 | return handle; | ||
| 426 | } | ||
| 427 | |||
| 428 | /// Returns true when there's a held object. | ||
| 429 | operator bool() const noexcept { | ||
| 430 | return handle != nullptr; | ||
| 431 | } | ||
| 432 | |||
| 433 | protected: | ||
| 434 | Type handle = nullptr; | ||
| 435 | const Dispatch* dld = nullptr; | ||
| 436 | |||
| 437 | private: | ||
| 438 | /// Destroys the held object if it exists. | ||
| 439 | void Release() noexcept { | ||
| 440 | if (handle) { | ||
| 441 | Destroy(handle, *dld); | ||
| 442 | } | ||
| 443 | } | ||
| 444 | }; | ||
| 445 | |||
| 446 | /// Array of a pool allocation. | ||
| 447 | /// Analogue to std::vector | ||
| 448 | template <typename AllocationType, typename PoolType> | ||
| 449 | class PoolAllocations { | ||
| 450 | public: | ||
| 451 | /// Construct an empty allocation. | ||
| 452 | PoolAllocations() = default; | ||
| 453 | |||
| 454 | /// Construct an allocation. Errors are reported through IsOutOfPoolMemory(). | ||
| 455 | explicit PoolAllocations(std::unique_ptr<AllocationType[]> allocations, std::size_t num, | ||
| 456 | VkDevice device, PoolType pool, const DeviceDispatch& dld) noexcept | ||
| 457 | : allocations{std::move(allocations)}, num{num}, device{device}, pool{pool}, dld{&dld} {} | ||
| 458 | |||
| 459 | /// Copying Vulkan allocations is not supported and will never be. | ||
| 460 | PoolAllocations(const PoolAllocations&) = delete; | ||
| 461 | PoolAllocations& operator=(const PoolAllocations&) = delete; | ||
| 462 | |||
| 463 | /// Construct an allocation transfering ownership from another allocation. | ||
| 464 | PoolAllocations(PoolAllocations&& rhs) noexcept | ||
| 465 | : allocations{std::move(rhs.allocations)}, num{rhs.num}, device{rhs.device}, pool{rhs.pool}, | ||
| 466 | dld{rhs.dld} {} | ||
| 467 | |||
| 468 | /// Assign an allocation transfering ownership from another allocation. | ||
| 469 | /// Releases any previously held allocation. | ||
| 470 | PoolAllocations& operator=(PoolAllocations&& rhs) noexcept { | ||
| 471 | Release(); | ||
| 472 | allocations = std::move(rhs.allocations); | ||
| 473 | num = rhs.num; | ||
| 474 | device = rhs.device; | ||
| 475 | pool = rhs.pool; | ||
| 476 | dld = rhs.dld; | ||
| 477 | return *this; | ||
| 478 | } | ||
| 479 | |||
| 480 | /// Destroys any held allocation. | ||
| 481 | ~PoolAllocations() { | ||
| 482 | Release(); | ||
| 483 | } | ||
| 484 | |||
| 485 | /// Returns the number of allocations. | ||
| 486 | std::size_t size() const noexcept { | ||
| 487 | return num; | ||
| 488 | } | ||
| 489 | |||
| 490 | /// Returns a pointer to the array of allocations. | ||
| 491 | AllocationType const* data() const noexcept { | ||
| 492 | return allocations.get(); | ||
| 493 | } | ||
| 494 | |||
| 495 | /// Returns the allocation in the specified index. | ||
| 496 | /// @pre index < size() | ||
| 497 | AllocationType operator[](std::size_t index) const noexcept { | ||
| 498 | return allocations[index]; | ||
| 499 | } | ||
| 500 | |||
| 501 | /// True when a pool fails to construct. | ||
| 502 | bool IsOutOfPoolMemory() const noexcept { | ||
| 503 | return !device; | ||
| 504 | } | ||
| 505 | |||
| 506 | private: | ||
| 507 | /// Destroys the held allocations if they exist. | ||
| 508 | void Release() noexcept { | ||
| 509 | if (!allocations) { | ||
| 510 | return; | ||
| 511 | } | ||
| 512 | const Span<AllocationType> span(allocations.get(), num); | ||
| 513 | const VkResult result = Free(device, pool, span, *dld); | ||
| 514 | // There's no way to report errors from a destructor. | ||
| 515 | if (result != VK_SUCCESS) { | ||
| 516 | std::terminate(); | ||
| 517 | } | ||
| 518 | } | ||
| 519 | |||
| 520 | std::unique_ptr<AllocationType[]> allocations; | ||
| 521 | std::size_t num = 0; | ||
| 522 | VkDevice device = nullptr; | ||
| 523 | PoolType pool = nullptr; | ||
| 524 | const DeviceDispatch* dld = nullptr; | ||
| 525 | }; | ||
| 526 | |||
| 527 | using BufferView = Handle<VkBufferView, VkDevice, DeviceDispatch>; | ||
| 528 | using DebugCallback = Handle<VkDebugUtilsMessengerEXT, VkInstance, InstanceDispatch>; | ||
| 529 | using DescriptorSetLayout = Handle<VkDescriptorSetLayout, VkDevice, DeviceDispatch>; | ||
| 530 | using DescriptorUpdateTemplateKHR = Handle<VkDescriptorUpdateTemplateKHR, VkDevice, DeviceDispatch>; | ||
| 531 | using Framebuffer = Handle<VkFramebuffer, VkDevice, DeviceDispatch>; | ||
| 532 | using ImageView = Handle<VkImageView, VkDevice, DeviceDispatch>; | ||
| 533 | using Pipeline = Handle<VkPipeline, VkDevice, DeviceDispatch>; | ||
| 534 | using PipelineLayout = Handle<VkPipelineLayout, VkDevice, DeviceDispatch>; | ||
| 535 | using QueryPool = Handle<VkQueryPool, VkDevice, DeviceDispatch>; | ||
| 536 | using RenderPass = Handle<VkRenderPass, VkDevice, DeviceDispatch>; | ||
| 537 | using Sampler = Handle<VkSampler, VkDevice, DeviceDispatch>; | ||
| 538 | using Semaphore = Handle<VkSemaphore, VkDevice, DeviceDispatch>; | ||
| 539 | using ShaderModule = Handle<VkShaderModule, VkDevice, DeviceDispatch>; | ||
| 540 | using SurfaceKHR = Handle<VkSurfaceKHR, VkInstance, InstanceDispatch>; | ||
| 541 | |||
| 542 | using DescriptorSets = PoolAllocations<VkDescriptorSet, VkDescriptorPool>; | ||
| 543 | using CommandBuffers = PoolAllocations<VkCommandBuffer, VkCommandPool>; | ||
| 544 | |||
| 545 | } // namespace Vulkan::vk | ||
diff --git a/src/video_core/shader/decode/arithmetic_integer.cpp b/src/video_core/shader/decode/arithmetic_integer.cpp index 2fe787d6f..0f4c3103a 100644 --- a/src/video_core/shader/decode/arithmetic_integer.cpp +++ b/src/video_core/shader/decode/arithmetic_integer.cpp | |||
| @@ -235,34 +235,30 @@ u32 ShaderIR::DecodeArithmeticInteger(NodeBlock& bb, u32 pc) { | |||
| 235 | case OpCode::Id::LEA_IMM: | 235 | case OpCode::Id::LEA_IMM: |
| 236 | case OpCode::Id::LEA_RZ: | 236 | case OpCode::Id::LEA_RZ: |
| 237 | case OpCode::Id::LEA_HI: { | 237 | case OpCode::Id::LEA_HI: { |
| 238 | const auto [op_a, op_b, op_c] = [&]() -> std::tuple<Node, Node, Node> { | 238 | auto [op_a, op_b, op_c] = [&]() -> std::tuple<Node, Node, Node> { |
| 239 | switch (opcode->get().GetId()) { | 239 | switch (opcode->get().GetId()) { |
| 240 | case OpCode::Id::LEA_R2: { | 240 | case OpCode::Id::LEA_R2: { |
| 241 | return {GetRegister(instr.gpr20), GetRegister(instr.gpr39), | 241 | return {GetRegister(instr.gpr20), GetRegister(instr.gpr39), |
| 242 | Immediate(static_cast<u32>(instr.lea.r2.entry_a))}; | 242 | Immediate(static_cast<u32>(instr.lea.r2.entry_a))}; |
| 243 | } | 243 | } |
| 244 | |||
| 245 | case OpCode::Id::LEA_R1: { | 244 | case OpCode::Id::LEA_R1: { |
| 246 | const bool neg = instr.lea.r1.neg != 0; | 245 | const bool neg = instr.lea.r1.neg != 0; |
| 247 | return {GetOperandAbsNegInteger(GetRegister(instr.gpr8), false, neg, true), | 246 | return {GetOperandAbsNegInteger(GetRegister(instr.gpr8), false, neg, true), |
| 248 | GetRegister(instr.gpr20), | 247 | GetRegister(instr.gpr20), |
| 249 | Immediate(static_cast<u32>(instr.lea.r1.entry_a))}; | 248 | Immediate(static_cast<u32>(instr.lea.r1.entry_a))}; |
| 250 | } | 249 | } |
| 251 | |||
| 252 | case OpCode::Id::LEA_IMM: { | 250 | case OpCode::Id::LEA_IMM: { |
| 253 | const bool neg = instr.lea.imm.neg != 0; | 251 | const bool neg = instr.lea.imm.neg != 0; |
| 254 | return {Immediate(static_cast<u32>(instr.lea.imm.entry_a)), | 252 | return {Immediate(static_cast<u32>(instr.lea.imm.entry_a)), |
| 255 | GetOperandAbsNegInteger(GetRegister(instr.gpr8), false, neg, true), | 253 | GetOperandAbsNegInteger(GetRegister(instr.gpr8), false, neg, true), |
| 256 | Immediate(static_cast<u32>(instr.lea.imm.entry_b))}; | 254 | Immediate(static_cast<u32>(instr.lea.imm.entry_b))}; |
| 257 | } | 255 | } |
| 258 | |||
| 259 | case OpCode::Id::LEA_RZ: { | 256 | case OpCode::Id::LEA_RZ: { |
| 260 | const bool neg = instr.lea.rz.neg != 0; | 257 | const bool neg = instr.lea.rz.neg != 0; |
| 261 | return {GetConstBuffer(instr.lea.rz.cb_index, instr.lea.rz.cb_offset), | 258 | return {GetConstBuffer(instr.lea.rz.cb_index, instr.lea.rz.cb_offset), |
| 262 | GetOperandAbsNegInteger(GetRegister(instr.gpr8), false, neg, true), | 259 | GetOperandAbsNegInteger(GetRegister(instr.gpr8), false, neg, true), |
| 263 | Immediate(static_cast<u32>(instr.lea.rz.entry_a))}; | 260 | Immediate(static_cast<u32>(instr.lea.rz.entry_a))}; |
| 264 | } | 261 | } |
| 265 | |||
| 266 | case OpCode::Id::LEA_HI: | 262 | case OpCode::Id::LEA_HI: |
| 267 | default: | 263 | default: |
| 268 | UNIMPLEMENTED_MSG("Unhandled LEA subinstruction: {}", opcode->get().GetName()); | 264 | UNIMPLEMENTED_MSG("Unhandled LEA subinstruction: {}", opcode->get().GetName()); |
| @@ -275,12 +271,9 @@ u32 ShaderIR::DecodeArithmeticInteger(NodeBlock& bb, u32 pc) { | |||
| 275 | UNIMPLEMENTED_IF_MSG(instr.lea.pred48 != static_cast<u64>(Pred::UnusedIndex), | 271 | UNIMPLEMENTED_IF_MSG(instr.lea.pred48 != static_cast<u64>(Pred::UnusedIndex), |
| 276 | "Unhandled LEA Predicate"); | 272 | "Unhandled LEA Predicate"); |
| 277 | 273 | ||
| 278 | const Node shifted_c = | 274 | Node value = Operation(OperationCode::ILogicalShiftLeft, std::move(op_a), std::move(op_c)); |
| 279 | Operation(OperationCode::ILogicalShiftLeft, NO_PRECISE, Immediate(1), op_c); | 275 | value = Operation(OperationCode::IAdd, std::move(op_b), std::move(value)); |
| 280 | const Node mul_bc = Operation(OperationCode::IMul, NO_PRECISE, op_b, shifted_c); | 276 | SetRegister(bb, instr.gpr0, std::move(value)); |
| 281 | const Node value = Operation(OperationCode::IAdd, NO_PRECISE, op_a, mul_bc); | ||
| 282 | |||
| 283 | SetRegister(bb, instr.gpr0, value); | ||
| 284 | 277 | ||
| 285 | break; | 278 | break; |
| 286 | } | 279 | } |
diff --git a/src/video_core/shader/decode/memory.cpp b/src/video_core/shader/decode/memory.cpp index b5fbc4d58..28a49addd 100644 --- a/src/video_core/shader/decode/memory.cpp +++ b/src/video_core/shader/decode/memory.cpp | |||
| @@ -19,7 +19,6 @@ namespace VideoCommon::Shader { | |||
| 19 | using Tegra::Shader::AtomicOp; | 19 | using Tegra::Shader::AtomicOp; |
| 20 | using Tegra::Shader::AtomicType; | 20 | using Tegra::Shader::AtomicType; |
| 21 | using Tegra::Shader::Attribute; | 21 | using Tegra::Shader::Attribute; |
| 22 | using Tegra::Shader::GlobalAtomicOp; | ||
| 23 | using Tegra::Shader::GlobalAtomicType; | 22 | using Tegra::Shader::GlobalAtomicType; |
| 24 | using Tegra::Shader::Instruction; | 23 | using Tegra::Shader::Instruction; |
| 25 | using Tegra::Shader::OpCode; | 24 | using Tegra::Shader::OpCode; |
| @@ -28,6 +27,28 @@ using Tegra::Shader::StoreType; | |||
| 28 | 27 | ||
| 29 | namespace { | 28 | namespace { |
| 30 | 29 | ||
| 30 | Node GetAtomOperation(AtomicOp op, bool is_signed, Node memory, Node data) { | ||
| 31 | const OperationCode operation_code = [op] { | ||
| 32 | switch (op) { | ||
| 33 | case AtomicOp::Add: | ||
| 34 | return OperationCode::AtomicIAdd; | ||
| 35 | case AtomicOp::Min: | ||
| 36 | return OperationCode::AtomicIMin; | ||
| 37 | case AtomicOp::Max: | ||
| 38 | return OperationCode::AtomicIMax; | ||
| 39 | case AtomicOp::And: | ||
| 40 | return OperationCode::AtomicIAnd; | ||
| 41 | case AtomicOp::Or: | ||
| 42 | return OperationCode::AtomicIOr; | ||
| 43 | case AtomicOp::Xor: | ||
| 44 | return OperationCode::AtomicIXor; | ||
| 45 | case AtomicOp::Exch: | ||
| 46 | return OperationCode::AtomicIExchange; | ||
| 47 | } | ||
| 48 | }(); | ||
| 49 | return SignedOperation(operation_code, is_signed, std::move(memory), std::move(data)); | ||
| 50 | } | ||
| 51 | |||
| 31 | bool IsUnaligned(Tegra::Shader::UniformType uniform_type) { | 52 | bool IsUnaligned(Tegra::Shader::UniformType uniform_type) { |
| 32 | return uniform_type == Tegra::Shader::UniformType::UnsignedByte || | 53 | return uniform_type == Tegra::Shader::UniformType::UnsignedByte || |
| 33 | uniform_type == Tegra::Shader::UniformType::UnsignedShort; | 54 | uniform_type == Tegra::Shader::UniformType::UnsignedShort; |
| @@ -363,10 +384,13 @@ u32 ShaderIR::DecodeMemory(NodeBlock& bb, u32 pc) { | |||
| 363 | break; | 384 | break; |
| 364 | } | 385 | } |
| 365 | case OpCode::Id::ATOM: { | 386 | case OpCode::Id::ATOM: { |
| 366 | UNIMPLEMENTED_IF_MSG(instr.atom.operation != GlobalAtomicOp::Add, "operation={}", | 387 | UNIMPLEMENTED_IF_MSG(instr.atom.operation == AtomicOp::Inc || |
| 367 | static_cast<int>(instr.atom.operation.Value())); | 388 | instr.atom.operation == AtomicOp::Dec || |
| 368 | UNIMPLEMENTED_IF_MSG(instr.atom.type != GlobalAtomicType::S32, "type={}", | 389 | instr.atom.operation == AtomicOp::SafeAdd, |
| 369 | static_cast<int>(instr.atom.type.Value())); | 390 | "operation={}", static_cast<int>(instr.atom.operation.Value())); |
| 391 | UNIMPLEMENTED_IF_MSG(instr.atom.type == GlobalAtomicType::S64 || | ||
| 392 | instr.atom.type == GlobalAtomicType::U64, | ||
| 393 | "type={}", static_cast<int>(instr.atom.type.Value())); | ||
| 370 | 394 | ||
| 371 | const auto [real_address, base_address, descriptor] = | 395 | const auto [real_address, base_address, descriptor] = |
| 372 | TrackGlobalMemory(bb, instr, true, true); | 396 | TrackGlobalMemory(bb, instr, true, true); |
| @@ -375,25 +399,29 @@ u32 ShaderIR::DecodeMemory(NodeBlock& bb, u32 pc) { | |||
| 375 | break; | 399 | break; |
| 376 | } | 400 | } |
| 377 | 401 | ||
| 402 | const bool is_signed = | ||
| 403 | instr.atoms.type == AtomicType::S32 || instr.atoms.type == AtomicType::S64; | ||
| 378 | Node gmem = MakeNode<GmemNode>(real_address, base_address, descriptor); | 404 | Node gmem = MakeNode<GmemNode>(real_address, base_address, descriptor); |
| 379 | Node value = Operation(OperationCode::AtomicAdd, std::move(gmem), GetRegister(instr.gpr20)); | 405 | Node value = GetAtomOperation(static_cast<AtomicOp>(instr.atom.operation), is_signed, gmem, |
| 406 | GetRegister(instr.gpr20)); | ||
| 380 | SetRegister(bb, instr.gpr0, std::move(value)); | 407 | SetRegister(bb, instr.gpr0, std::move(value)); |
| 381 | break; | 408 | break; |
| 382 | } | 409 | } |
| 383 | case OpCode::Id::ATOMS: { | 410 | case OpCode::Id::ATOMS: { |
| 384 | UNIMPLEMENTED_IF_MSG(instr.atoms.operation != AtomicOp::Add, "operation={}", | 411 | UNIMPLEMENTED_IF_MSG(instr.atoms.operation == AtomicOp::Inc || |
| 385 | static_cast<int>(instr.atoms.operation.Value())); | 412 | instr.atoms.operation == AtomicOp::Dec, |
| 386 | UNIMPLEMENTED_IF_MSG(instr.atoms.type != AtomicType::U32, "type={}", | 413 | "operation={}", static_cast<int>(instr.atoms.operation.Value())); |
| 387 | static_cast<int>(instr.atoms.type.Value())); | 414 | UNIMPLEMENTED_IF_MSG(instr.atoms.type == AtomicType::S64 || |
| 388 | 415 | instr.atoms.type == AtomicType::U64, | |
| 416 | "type={}", static_cast<int>(instr.atoms.type.Value())); | ||
| 417 | const bool is_signed = | ||
| 418 | instr.atoms.type == AtomicType::S32 || instr.atoms.type == AtomicType::S64; | ||
| 389 | const s32 offset = instr.atoms.GetImmediateOffset(); | 419 | const s32 offset = instr.atoms.GetImmediateOffset(); |
| 390 | Node address = GetRegister(instr.gpr8); | 420 | Node address = GetRegister(instr.gpr8); |
| 391 | address = Operation(OperationCode::IAdd, std::move(address), Immediate(offset)); | 421 | address = Operation(OperationCode::IAdd, std::move(address), Immediate(offset)); |
| 392 | 422 | Node value = | |
| 393 | Node memory = GetSharedMemory(std::move(address)); | 423 | GetAtomOperation(static_cast<AtomicOp>(instr.atoms.operation), is_signed, |
| 394 | Node data = GetRegister(instr.gpr20); | 424 | GetSharedMemory(std::move(address)), GetRegister(instr.gpr20)); |
| 395 | |||
| 396 | Node value = Operation(OperationCode::AtomicAdd, std::move(memory), std::move(data)); | ||
| 397 | SetRegister(bb, instr.gpr0, std::move(value)); | 425 | SetRegister(bb, instr.gpr0, std::move(value)); |
| 398 | break; | 426 | break; |
| 399 | } | 427 | } |
diff --git a/src/video_core/shader/node.h b/src/video_core/shader/node.h index a1828546e..5fcc9da60 100644 --- a/src/video_core/shader/node.h +++ b/src/video_core/shader/node.h | |||
| @@ -162,7 +162,21 @@ enum class OperationCode { | |||
| 162 | AtomicImageXor, /// (MetaImage, int[N] coords) -> void | 162 | AtomicImageXor, /// (MetaImage, int[N] coords) -> void |
| 163 | AtomicImageExchange, /// (MetaImage, int[N] coords) -> void | 163 | AtomicImageExchange, /// (MetaImage, int[N] coords) -> void |
| 164 | 164 | ||
| 165 | AtomicAdd, /// (memory, {u}int) -> {u}int | 165 | AtomicUExchange, /// (memory, uint) -> uint |
| 166 | AtomicUAdd, /// (memory, uint) -> uint | ||
| 167 | AtomicUMin, /// (memory, uint) -> uint | ||
| 168 | AtomicUMax, /// (memory, uint) -> uint | ||
| 169 | AtomicUAnd, /// (memory, uint) -> uint | ||
| 170 | AtomicUOr, /// (memory, uint) -> uint | ||
| 171 | AtomicUXor, /// (memory, uint) -> uint | ||
| 172 | |||
| 173 | AtomicIExchange, /// (memory, int) -> int | ||
| 174 | AtomicIAdd, /// (memory, int) -> int | ||
| 175 | AtomicIMin, /// (memory, int) -> int | ||
| 176 | AtomicIMax, /// (memory, int) -> int | ||
| 177 | AtomicIAnd, /// (memory, int) -> int | ||
| 178 | AtomicIOr, /// (memory, int) -> int | ||
| 179 | AtomicIXor, /// (memory, int) -> int | ||
| 166 | 180 | ||
| 167 | Branch, /// (uint branch_target) -> void | 181 | Branch, /// (uint branch_target) -> void |
| 168 | BranchIndirect, /// (uint branch_target) -> void | 182 | BranchIndirect, /// (uint branch_target) -> void |
diff --git a/src/video_core/shader/node_helper.cpp b/src/video_core/shader/node_helper.cpp index 76c56abb5..7bf4ff387 100644 --- a/src/video_core/shader/node_helper.cpp +++ b/src/video_core/shader/node_helper.cpp | |||
| @@ -86,6 +86,20 @@ OperationCode SignedToUnsignedCode(OperationCode operation_code, bool is_signed) | |||
| 86 | return OperationCode::LogicalUNotEqual; | 86 | return OperationCode::LogicalUNotEqual; |
| 87 | case OperationCode::LogicalIGreaterEqual: | 87 | case OperationCode::LogicalIGreaterEqual: |
| 88 | return OperationCode::LogicalUGreaterEqual; | 88 | return OperationCode::LogicalUGreaterEqual; |
| 89 | case OperationCode::AtomicIExchange: | ||
| 90 | return OperationCode::AtomicUExchange; | ||
| 91 | case OperationCode::AtomicIAdd: | ||
| 92 | return OperationCode::AtomicUAdd; | ||
| 93 | case OperationCode::AtomicIMin: | ||
| 94 | return OperationCode::AtomicUMin; | ||
| 95 | case OperationCode::AtomicIMax: | ||
| 96 | return OperationCode::AtomicUMax; | ||
| 97 | case OperationCode::AtomicIAnd: | ||
| 98 | return OperationCode::AtomicUAnd; | ||
| 99 | case OperationCode::AtomicIOr: | ||
| 100 | return OperationCode::AtomicUOr; | ||
| 101 | case OperationCode::AtomicIXor: | ||
| 102 | return OperationCode::AtomicUXor; | ||
| 89 | case OperationCode::INegate: | 103 | case OperationCode::INegate: |
| 90 | UNREACHABLE_MSG("Can't negate an unsigned integer"); | 104 | UNREACHABLE_MSG("Can't negate an unsigned integer"); |
| 91 | return {}; | 105 | return {}; |
diff --git a/src/yuzu/main.cpp b/src/yuzu/main.cpp index 47615adfe..d7e59d0cd 100644 --- a/src/yuzu/main.cpp +++ b/src/yuzu/main.cpp | |||
| @@ -1034,6 +1034,14 @@ void GMainWindow::BootGame(const QString& filename) { | |||
| 1034 | } | 1034 | } |
| 1035 | 1035 | ||
| 1036 | void GMainWindow::ShutdownGame() { | 1036 | void GMainWindow::ShutdownGame() { |
| 1037 | if (!emulation_running) { | ||
| 1038 | return; | ||
| 1039 | } | ||
| 1040 | |||
| 1041 | if (ui.action_Fullscreen->isChecked()) { | ||
| 1042 | HideFullscreen(); | ||
| 1043 | } | ||
| 1044 | |||
| 1037 | AllowOSSleep(); | 1045 | AllowOSSleep(); |
| 1038 | 1046 | ||
| 1039 | discord_rpc->Pause(); | 1047 | discord_rpc->Pause(); |
| @@ -1716,11 +1724,6 @@ void GMainWindow::OnStartGame() { | |||
| 1716 | } | 1724 | } |
| 1717 | 1725 | ||
| 1718 | void GMainWindow::OnPauseGame() { | 1726 | void GMainWindow::OnPauseGame() { |
| 1719 | Core::System& system{Core::System::GetInstance()}; | ||
| 1720 | if (system.GetExitLock() && !ConfirmForceLockedExit()) { | ||
| 1721 | return; | ||
| 1722 | } | ||
| 1723 | |||
| 1724 | emu_thread->SetRunning(false); | 1727 | emu_thread->SetRunning(false); |
| 1725 | 1728 | ||
| 1726 | ui.action_Start->setEnabled(true); | 1729 | ui.action_Start->setEnabled(true); |
| @@ -1803,7 +1806,7 @@ void GMainWindow::ToggleWindowMode() { | |||
| 1803 | // Render in the main window... | 1806 | // Render in the main window... |
| 1804 | render_window->BackupGeometry(); | 1807 | render_window->BackupGeometry(); |
| 1805 | ui.horizontalLayout->addWidget(render_window); | 1808 | ui.horizontalLayout->addWidget(render_window); |
| 1806 | render_window->setFocusPolicy(Qt::ClickFocus); | 1809 | render_window->setFocusPolicy(Qt::StrongFocus); |
| 1807 | if (emulation_running) { | 1810 | if (emulation_running) { |
| 1808 | render_window->setVisible(true); | 1811 | render_window->setVisible(true); |
| 1809 | render_window->setFocus(); | 1812 | render_window->setFocus(); |