2022-04-23 10:59:50 +02:00
|
|
|
// SPDX-FileCopyrightText: Copyright 2018 yuzu Emulator Project
|
|
|
|
// SPDX-License-Identifier: GPL-2.0-or-later
|
2018-02-12 03:34:20 +01:00
|
|
|
|
2018-10-26 05:42:39 +02:00
|
|
|
#include <cstring>
|
2019-11-28 06:15:34 +01:00
|
|
|
#include <optional>
|
2018-02-12 18:34:41 +01:00
|
|
|
#include "common/assert.h"
|
2018-03-25 06:35:06 +02:00
|
|
|
#include "core/core.h"
|
2018-09-01 05:25:18 +02:00
|
|
|
#include "core/core_timing.h"
|
2022-03-11 14:47:01 +01:00
|
|
|
#include "video_core/dirty_flags.h"
|
2018-02-12 03:34:20 +01:00
|
|
|
#include "video_core/engines/maxwell_3d.h"
|
2020-02-10 15:32:51 +01:00
|
|
|
#include "video_core/gpu.h"
|
2019-04-06 00:21:15 +02:00
|
|
|
#include "video_core/memory_manager.h"
|
2018-03-24 07:41:16 +01:00
|
|
|
#include "video_core/rasterizer_interface.h"
|
2018-03-20 00:00:29 +01:00
|
|
|
#include "video_core/textures/texture.h"
|
2018-02-12 03:34:20 +01:00
|
|
|
|
2018-10-20 21:58:06 +02:00
|
|
|
namespace Tegra::Engines {
|
2018-02-12 03:34:20 +01:00
|
|
|
|
2019-11-28 06:15:34 +01:00
|
|
|
using VideoCore::QueryType;
|
|
|
|
|
2018-03-18 09:13:22 +01:00
|
|
|
/// First register id that is actually a Macro call.
|
|
|
|
constexpr u32 MacroRegistersStart = 0xE00;
|
|
|
|
|
2020-06-11 05:58:57 +02:00
|
|
|
Maxwell3D::Maxwell3D(Core::System& system_, MemoryManager& memory_manager_)
|
|
|
|
: system{system_}, memory_manager{memory_manager_}, macro_engine{GetMacroEngine(*this)},
|
|
|
|
upload_state{memory_manager, regs.upload} {
|
2019-12-27 02:14:10 +01:00
|
|
|
dirty.flags.flip();
|
2018-10-26 05:42:39 +02:00
|
|
|
InitializeRegisterDefaults();
|
|
|
|
}
|
|
|
|
|
2020-06-11 05:58:57 +02:00
|
|
|
Maxwell3D::~Maxwell3D() = default;
|
|
|
|
|
2021-01-05 08:09:39 +01:00
|
|
|
void Maxwell3D::BindRasterizer(VideoCore::RasterizerInterface* rasterizer_) {
|
|
|
|
rasterizer = rasterizer_;
|
2022-01-29 22:00:49 +01:00
|
|
|
upload_state.BindRasterizer(rasterizer_);
|
2020-06-11 05:58:57 +02:00
|
|
|
}
|
|
|
|
|
2018-10-26 05:42:39 +02:00
|
|
|
void Maxwell3D::InitializeRegisterDefaults() {
|
|
|
|
// Initializes registers to their default values - what games expect them to be at boot. This is
|
|
|
|
// for certain registers that may not be explicitly set by games.
|
|
|
|
|
|
|
|
// Reset all registers to zero
|
|
|
|
std::memset(®s, 0, sizeof(regs));
|
|
|
|
|
|
|
|
// Depth range near/far is not always set, but is expected to be the default 0.0f, 1.0f. This is
|
|
|
|
// needed for ARMS.
|
2019-05-14 14:53:16 +02:00
|
|
|
for (auto& viewport : regs.viewports) {
|
|
|
|
viewport.depth_range_near = 0.0f;
|
|
|
|
viewport.depth_range_far = 1.0f;
|
2018-10-26 05:42:39 +02:00
|
|
|
}
|
2020-05-04 22:49:48 +02:00
|
|
|
for (auto& viewport : regs.viewport_transform) {
|
|
|
|
viewport.swizzle.x.Assign(Regs::ViewportSwizzle::PositiveX);
|
|
|
|
viewport.swizzle.y.Assign(Regs::ViewportSwizzle::PositiveY);
|
|
|
|
viewport.swizzle.z.Assign(Regs::ViewportSwizzle::PositiveZ);
|
|
|
|
viewport.swizzle.w.Assign(Regs::ViewportSwizzle::PositiveW);
|
|
|
|
}
|
2019-01-22 08:14:29 +01:00
|
|
|
|
2018-11-02 04:21:25 +01:00
|
|
|
// Doom and Bomberman seems to use the uninitialized registers and just enable blend
|
|
|
|
// so initialize blend registers with sane values
|
|
|
|
regs.blend.equation_rgb = Regs::Blend::Equation::Add;
|
|
|
|
regs.blend.factor_source_rgb = Regs::Blend::Factor::One;
|
|
|
|
regs.blend.factor_dest_rgb = Regs::Blend::Factor::Zero;
|
|
|
|
regs.blend.equation_a = Regs::Blend::Equation::Add;
|
|
|
|
regs.blend.factor_source_a = Regs::Blend::Factor::One;
|
|
|
|
regs.blend.factor_dest_a = Regs::Blend::Factor::Zero;
|
2019-05-14 14:53:16 +02:00
|
|
|
for (auto& blend : regs.independent_blend) {
|
|
|
|
blend.equation_rgb = Regs::Blend::Equation::Add;
|
|
|
|
blend.factor_source_rgb = Regs::Blend::Factor::One;
|
|
|
|
blend.factor_dest_rgb = Regs::Blend::Factor::Zero;
|
|
|
|
blend.equation_a = Regs::Blend::Equation::Add;
|
|
|
|
blend.factor_source_a = Regs::Blend::Factor::One;
|
|
|
|
blend.factor_dest_a = Regs::Blend::Factor::Zero;
|
2018-11-02 04:21:25 +01:00
|
|
|
}
|
2018-11-07 04:27:12 +01:00
|
|
|
regs.stencil_front_op_fail = Regs::StencilOp::Keep;
|
|
|
|
regs.stencil_front_op_zfail = Regs::StencilOp::Keep;
|
|
|
|
regs.stencil_front_op_zpass = Regs::StencilOp::Keep;
|
|
|
|
regs.stencil_front_func_func = Regs::ComparisonOp::Always;
|
|
|
|
regs.stencil_front_func_mask = 0xFFFFFFFF;
|
|
|
|
regs.stencil_front_mask = 0xFFFFFFFF;
|
|
|
|
regs.stencil_two_side_enable = 1;
|
|
|
|
regs.stencil_back_op_fail = Regs::StencilOp::Keep;
|
|
|
|
regs.stencil_back_op_zfail = Regs::StencilOp::Keep;
|
|
|
|
regs.stencil_back_op_zpass = Regs::StencilOp::Keep;
|
|
|
|
regs.stencil_back_func_func = Regs::ComparisonOp::Always;
|
|
|
|
regs.stencil_back_func_mask = 0xFFFFFFFF;
|
|
|
|
regs.stencil_back_mask = 0xFFFFFFFF;
|
2019-01-22 08:14:29 +01:00
|
|
|
|
2019-07-18 01:37:01 +02:00
|
|
|
regs.depth_test_func = Regs::ComparisonOp::Always;
|
2019-12-29 01:41:41 +01:00
|
|
|
regs.front_face = Regs::FrontFace::CounterClockWise;
|
|
|
|
regs.cull_face = Regs::CullFace::Back;
|
2019-07-18 01:37:01 +02:00
|
|
|
|
2018-11-14 00:15:13 +01:00
|
|
|
// TODO(Rodrigo): Most games do not set a point size. I think this is a case of a
|
|
|
|
// register carrying a default value. Assume it's OpenGL's default (1).
|
|
|
|
regs.point_size = 1.0f;
|
2018-11-21 01:57:20 +01:00
|
|
|
|
|
|
|
// TODO(bunnei): Some games do not initialize the color masks (e.g. Sonic Mania). Assuming a
|
|
|
|
// default of enabled fixes rendering here.
|
2019-05-14 14:53:16 +02:00
|
|
|
for (auto& color_mask : regs.color_mask) {
|
|
|
|
color_mask.R.Assign(1);
|
|
|
|
color_mask.G.Assign(1);
|
|
|
|
color_mask.B.Assign(1);
|
|
|
|
color_mask.A.Assign(1);
|
2018-11-21 01:57:20 +01:00
|
|
|
}
|
2019-01-22 08:14:29 +01:00
|
|
|
|
2020-04-17 02:15:07 +02:00
|
|
|
for (auto& format : regs.vertex_attrib_format) {
|
|
|
|
format.constant.Assign(1);
|
|
|
|
}
|
|
|
|
|
2019-12-18 23:26:52 +01:00
|
|
|
// NVN games expect these values to be enabled at boot
|
|
|
|
regs.rasterize_enable = 1;
|
2019-01-22 08:14:29 +01:00
|
|
|
regs.rt_separate_frag_data = 1;
|
2019-09-03 06:05:23 +02:00
|
|
|
regs.framebuffer_srgb = 1;
|
2020-05-20 06:13:40 +02:00
|
|
|
regs.line_width_aliased = 1.0f;
|
|
|
|
regs.line_width_smooth = 1.0f;
|
2019-12-29 01:41:41 +01:00
|
|
|
regs.front_face = Maxwell3D::Regs::FrontFace::ClockWise;
|
2020-05-20 03:01:25 +02:00
|
|
|
regs.polygon_mode_back = Maxwell3D::Regs::PolygonMode::Fill;
|
|
|
|
regs.polygon_mode_front = Maxwell3D::Regs::PolygonMode::Fill;
|
2019-12-18 23:26:52 +01:00
|
|
|
|
2020-03-22 07:35:11 +01:00
|
|
|
shadow_state = regs;
|
|
|
|
|
2019-09-15 17:48:54 +02:00
|
|
|
mme_inline[MAXWELL3D_REG_INDEX(draw.vertex_end_gl)] = true;
|
|
|
|
mme_inline[MAXWELL3D_REG_INDEX(draw.vertex_begin_gl)] = true;
|
|
|
|
mme_inline[MAXWELL3D_REG_INDEX(vertex_buffer.count)] = true;
|
|
|
|
mme_inline[MAXWELL3D_REG_INDEX(index_array.count)] = true;
|
2018-10-26 05:42:39 +02:00
|
|
|
}
|
2018-02-12 03:34:20 +01:00
|
|
|
|
2020-07-12 10:03:05 +02:00
|
|
|
void Maxwell3D::ProcessMacro(u32 method, const u32* base_start, u32 amount, bool is_last_call) {
|
|
|
|
if (executing_macro == 0) {
|
|
|
|
// A macro call must begin by writing the macro method's register, not its argument.
|
|
|
|
ASSERT_MSG((method % 2) == 0,
|
|
|
|
"Can't start macro execution by writing to the ARGS register");
|
|
|
|
executing_macro = method;
|
|
|
|
}
|
|
|
|
|
2020-07-12 10:05:04 +02:00
|
|
|
macro_params.insert(macro_params.end(), base_start, base_start + amount);
|
2020-07-12 10:03:05 +02:00
|
|
|
|
|
|
|
// Call the macro when there are no more parameters in the command buffer
|
|
|
|
if (is_last_call) {
|
|
|
|
CallMacroMethod(executing_macro, macro_params);
|
|
|
|
macro_params.clear();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
u32 Maxwell3D::ProcessShadowRam(u32 method, u32 argument) {
|
|
|
|
// Keep track of the register value in shadow_state when requested.
|
|
|
|
const auto control = shadow_state.shadow_ram_control;
|
|
|
|
if (control == Regs::ShadowRamControl::Track ||
|
|
|
|
control == Regs::ShadowRamControl::TrackWithFilter) {
|
|
|
|
shadow_state.reg_array[method] = argument;
|
|
|
|
return argument;
|
|
|
|
}
|
|
|
|
if (control == Regs::ShadowRamControl::Replay) {
|
|
|
|
return shadow_state.reg_array[method];
|
|
|
|
}
|
|
|
|
return argument;
|
|
|
|
}
|
|
|
|
|
|
|
|
void Maxwell3D::ProcessDirtyRegisters(u32 method, u32 argument) {
|
|
|
|
if (regs.reg_array[method] == argument) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
regs.reg_array[method] = argument;
|
|
|
|
|
|
|
|
for (const auto& table : dirty.tables) {
|
|
|
|
dirty.flags[table[method]] = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void Maxwell3D::ProcessMethodCall(u32 method, u32 argument, u32 nonshadow_argument,
|
|
|
|
bool is_last_call) {
|
|
|
|
switch (method) {
|
|
|
|
case MAXWELL3D_REG_INDEX(wait_for_idle):
|
|
|
|
return rasterizer->WaitForIdle();
|
|
|
|
case MAXWELL3D_REG_INDEX(shadow_ram_control):
|
|
|
|
shadow_state.shadow_ram_control = static_cast<Regs::ShadowRamControl>(nonshadow_argument);
|
|
|
|
return;
|
2022-05-10 23:07:21 +02:00
|
|
|
case MAXWELL3D_REG_INDEX(macros.upload_address):
|
|
|
|
return macro_engine->ClearCode(regs.macros.upload_address);
|
2020-07-12 10:03:05 +02:00
|
|
|
case MAXWELL3D_REG_INDEX(macros.data):
|
|
|
|
return macro_engine->AddCode(regs.macros.upload_address, argument);
|
|
|
|
case MAXWELL3D_REG_INDEX(macros.bind):
|
|
|
|
return ProcessMacroBind(argument);
|
|
|
|
case MAXWELL3D_REG_INDEX(firmware[4]):
|
|
|
|
return ProcessFirmwareCall4();
|
2021-01-24 08:31:41 +01:00
|
|
|
case MAXWELL3D_REG_INDEX(const_buffer.cb_data):
|
|
|
|
case MAXWELL3D_REG_INDEX(const_buffer.cb_data) + 1:
|
|
|
|
case MAXWELL3D_REG_INDEX(const_buffer.cb_data) + 2:
|
|
|
|
case MAXWELL3D_REG_INDEX(const_buffer.cb_data) + 3:
|
|
|
|
case MAXWELL3D_REG_INDEX(const_buffer.cb_data) + 4:
|
|
|
|
case MAXWELL3D_REG_INDEX(const_buffer.cb_data) + 5:
|
|
|
|
case MAXWELL3D_REG_INDEX(const_buffer.cb_data) + 6:
|
|
|
|
case MAXWELL3D_REG_INDEX(const_buffer.cb_data) + 7:
|
|
|
|
case MAXWELL3D_REG_INDEX(const_buffer.cb_data) + 8:
|
|
|
|
case MAXWELL3D_REG_INDEX(const_buffer.cb_data) + 9:
|
|
|
|
case MAXWELL3D_REG_INDEX(const_buffer.cb_data) + 10:
|
|
|
|
case MAXWELL3D_REG_INDEX(const_buffer.cb_data) + 11:
|
|
|
|
case MAXWELL3D_REG_INDEX(const_buffer.cb_data) + 12:
|
|
|
|
case MAXWELL3D_REG_INDEX(const_buffer.cb_data) + 13:
|
|
|
|
case MAXWELL3D_REG_INDEX(const_buffer.cb_data) + 14:
|
|
|
|
case MAXWELL3D_REG_INDEX(const_buffer.cb_data) + 15:
|
2022-03-15 00:11:41 +01:00
|
|
|
return ProcessCBData(argument);
|
2020-07-12 10:03:05 +02:00
|
|
|
case MAXWELL3D_REG_INDEX(cb_bind[0]):
|
|
|
|
return ProcessCBBind(0);
|
|
|
|
case MAXWELL3D_REG_INDEX(cb_bind[1]):
|
|
|
|
return ProcessCBBind(1);
|
|
|
|
case MAXWELL3D_REG_INDEX(cb_bind[2]):
|
|
|
|
return ProcessCBBind(2);
|
|
|
|
case MAXWELL3D_REG_INDEX(cb_bind[3]):
|
|
|
|
return ProcessCBBind(3);
|
|
|
|
case MAXWELL3D_REG_INDEX(cb_bind[4]):
|
|
|
|
return ProcessCBBind(4);
|
|
|
|
case MAXWELL3D_REG_INDEX(draw.vertex_end_gl):
|
|
|
|
return DrawArrays();
|
2022-03-11 01:21:04 +01:00
|
|
|
case MAXWELL3D_REG_INDEX(small_index):
|
|
|
|
regs.index_array.count = regs.small_index.count;
|
|
|
|
regs.index_array.first = regs.small_index.first;
|
2022-03-11 14:47:01 +01:00
|
|
|
dirty.flags[VideoCommon::Dirty::IndexBuffer] = true;
|
2022-03-11 01:21:04 +01:00
|
|
|
return DrawArrays();
|
2022-04-14 15:57:06 +02:00
|
|
|
case MAXWELL3D_REG_INDEX(small_index_2):
|
|
|
|
regs.index_array.count = regs.small_index_2.count;
|
|
|
|
regs.index_array.first = regs.small_index_2.first;
|
|
|
|
dirty.flags[VideoCommon::Dirty::IndexBuffer] = true;
|
2022-03-26 13:43:00 +01:00
|
|
|
// a macro calls this one over and over, should it increase instancing?
|
|
|
|
// Used by Hades and likely other Vulkan games.
|
2022-04-14 15:57:06 +02:00
|
|
|
return DrawArrays();
|
2022-03-11 23:16:56 +01:00
|
|
|
case MAXWELL3D_REG_INDEX(topology_override):
|
|
|
|
use_topology_override = true;
|
|
|
|
return;
|
2020-07-12 10:03:05 +02:00
|
|
|
case MAXWELL3D_REG_INDEX(clear_buffers):
|
|
|
|
return ProcessClearBuffers();
|
|
|
|
case MAXWELL3D_REG_INDEX(query.query_get):
|
|
|
|
return ProcessQueryGet();
|
|
|
|
case MAXWELL3D_REG_INDEX(condition.mode):
|
|
|
|
return ProcessQueryCondition();
|
|
|
|
case MAXWELL3D_REG_INDEX(counter_reset):
|
|
|
|
return ProcessCounterReset();
|
|
|
|
case MAXWELL3D_REG_INDEX(sync_info):
|
|
|
|
return ProcessSyncPoint();
|
|
|
|
case MAXWELL3D_REG_INDEX(exec_upload):
|
|
|
|
return upload_state.ProcessExec(regs.exec_upload.linear != 0);
|
|
|
|
case MAXWELL3D_REG_INDEX(data_upload):
|
|
|
|
upload_state.ProcessData(argument, is_last_call);
|
|
|
|
return;
|
2020-12-30 06:25:23 +01:00
|
|
|
case MAXWELL3D_REG_INDEX(fragment_barrier):
|
|
|
|
return rasterizer->FragmentBarrier();
|
2022-02-06 01:16:11 +01:00
|
|
|
case MAXWELL3D_REG_INDEX(invalidate_texture_data_cache):
|
|
|
|
rasterizer->InvalidateGPUCache();
|
|
|
|
return rasterizer->WaitForIdle();
|
2020-12-30 06:25:23 +01:00
|
|
|
case MAXWELL3D_REG_INDEX(tiled_cache_barrier):
|
|
|
|
return rasterizer->TiledCacheBarrier();
|
2020-07-12 10:03:05 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-03 08:33:38 +02:00
|
|
|
void Maxwell3D::CallMacroMethod(u32 method, const std::vector<u32>& parameters) {
|
2018-08-09 05:22:45 +02:00
|
|
|
// Reset the current macro.
|
|
|
|
executing_macro = 0;
|
|
|
|
|
2018-10-30 04:36:03 +01:00
|
|
|
// Lookup the macro offset
|
2019-11-08 21:08:07 +01:00
|
|
|
const u32 entry =
|
|
|
|
((method - MacroRegistersStart) >> 1) % static_cast<u32>(macro_positions.size());
|
2018-03-18 09:13:22 +01:00
|
|
|
|
2018-08-09 05:22:45 +02:00
|
|
|
// Execute the current macro.
|
2022-01-25 19:41:35 +01:00
|
|
|
macro_engine->Execute(macro_positions[entry], parameters);
|
2019-09-22 13:23:13 +02:00
|
|
|
if (mme_draw.current_mode != MMEDrawMode::Undefined) {
|
2019-09-15 20:25:07 +02:00
|
|
|
FlushMMEInlineDraw();
|
|
|
|
}
|
2018-03-17 02:32:44 +01:00
|
|
|
}
|
|
|
|
|
2020-04-28 03:47:58 +02:00
|
|
|
void Maxwell3D::CallMethod(u32 method, u32 method_argument, bool is_last_call) {
|
2018-03-18 09:13:22 +01:00
|
|
|
// It is an error to write to a register other than the current macro's ARG register before it
|
|
|
|
// has finished execution.
|
|
|
|
if (executing_macro != 0) {
|
2019-02-26 07:01:48 +01:00
|
|
|
ASSERT(method == executing_macro + 1);
|
2018-03-18 09:13:22 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// Methods after 0xE00 are special, they're actually triggers for some microcode that was
|
|
|
|
// uploaded to the GPU during initialization.
|
2019-02-26 07:01:48 +01:00
|
|
|
if (method >= MacroRegistersStart) {
|
2020-07-12 10:03:05 +02:00
|
|
|
ProcessMacro(method, &method_argument, 1, is_last_call);
|
2018-03-18 09:13:22 +01:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2019-02-26 07:01:48 +01:00
|
|
|
ASSERT_MSG(method < Regs::NUM_REGS,
|
2018-04-24 03:03:50 +02:00
|
|
|
"Invalid Maxwell3D register, increase the size of the Regs structure");
|
|
|
|
|
2020-07-12 10:03:05 +02:00
|
|
|
const u32 argument = ProcessShadowRam(method, method_argument);
|
|
|
|
ProcessDirtyRegisters(method, argument);
|
|
|
|
ProcessMethodCall(method, argument, method_argument, is_last_call);
|
2018-02-12 18:34:41 +01:00
|
|
|
}
|
|
|
|
|
2020-04-20 18:27:57 +02:00
|
|
|
void Maxwell3D::CallMultiMethod(u32 method, const u32* base_start, u32 amount,
|
|
|
|
u32 methods_pending) {
|
|
|
|
// Methods after 0xE00 are special, they're actually triggers for some microcode that was
|
|
|
|
// uploaded to the GPU during initialization.
|
|
|
|
if (method >= MacroRegistersStart) {
|
2020-07-12 10:03:05 +02:00
|
|
|
ProcessMacro(method, base_start, amount, amount == methods_pending);
|
2020-04-20 18:27:57 +02:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
switch (method) {
|
2021-01-24 08:31:41 +01:00
|
|
|
case MAXWELL3D_REG_INDEX(const_buffer.cb_data):
|
|
|
|
case MAXWELL3D_REG_INDEX(const_buffer.cb_data) + 1:
|
|
|
|
case MAXWELL3D_REG_INDEX(const_buffer.cb_data) + 2:
|
|
|
|
case MAXWELL3D_REG_INDEX(const_buffer.cb_data) + 3:
|
|
|
|
case MAXWELL3D_REG_INDEX(const_buffer.cb_data) + 4:
|
|
|
|
case MAXWELL3D_REG_INDEX(const_buffer.cb_data) + 5:
|
|
|
|
case MAXWELL3D_REG_INDEX(const_buffer.cb_data) + 6:
|
|
|
|
case MAXWELL3D_REG_INDEX(const_buffer.cb_data) + 7:
|
|
|
|
case MAXWELL3D_REG_INDEX(const_buffer.cb_data) + 8:
|
|
|
|
case MAXWELL3D_REG_INDEX(const_buffer.cb_data) + 9:
|
|
|
|
case MAXWELL3D_REG_INDEX(const_buffer.cb_data) + 10:
|
|
|
|
case MAXWELL3D_REG_INDEX(const_buffer.cb_data) + 11:
|
|
|
|
case MAXWELL3D_REG_INDEX(const_buffer.cb_data) + 12:
|
|
|
|
case MAXWELL3D_REG_INDEX(const_buffer.cb_data) + 13:
|
|
|
|
case MAXWELL3D_REG_INDEX(const_buffer.cb_data) + 14:
|
|
|
|
case MAXWELL3D_REG_INDEX(const_buffer.cb_data) + 15:
|
2022-03-15 00:11:41 +01:00
|
|
|
ProcessCBMultiData(base_start, amount);
|
2020-04-20 18:27:57 +02:00
|
|
|
break;
|
2022-08-14 11:36:36 +02:00
|
|
|
case MAXWELL3D_REG_INDEX(data_upload):
|
|
|
|
upload_state.ProcessData(base_start, static_cast<size_t>(amount));
|
|
|
|
return;
|
2020-07-12 10:03:05 +02:00
|
|
|
default:
|
2020-04-20 18:27:57 +02:00
|
|
|
for (std::size_t i = 0; i < amount; i++) {
|
2020-04-28 03:47:58 +02:00
|
|
|
CallMethod(method, base_start[i], methods_pending - static_cast<u32>(i) <= 1);
|
2020-04-20 18:27:57 +02:00
|
|
|
}
|
2020-07-12 10:03:05 +02:00
|
|
|
break;
|
2020-04-20 18:27:57 +02:00
|
|
|
}
|
2020-04-20 08:16:56 +02:00
|
|
|
}
|
|
|
|
|
2019-09-22 13:23:13 +02:00
|
|
|
void Maxwell3D::StepInstance(const MMEDrawMode expected_mode, const u32 count) {
|
|
|
|
if (mme_draw.current_mode == MMEDrawMode::Undefined) {
|
|
|
|
if (mme_draw.gl_begin_consume) {
|
|
|
|
mme_draw.current_mode = expected_mode;
|
|
|
|
mme_draw.current_count = count;
|
|
|
|
mme_draw.instance_count = 1;
|
|
|
|
mme_draw.gl_begin_consume = false;
|
|
|
|
mme_draw.gl_end_count = 0;
|
|
|
|
}
|
|
|
|
return;
|
|
|
|
} else {
|
|
|
|
if (mme_draw.current_mode == expected_mode && count == mme_draw.current_count &&
|
|
|
|
mme_draw.instance_mode && mme_draw.gl_begin_consume) {
|
|
|
|
mme_draw.instance_count++;
|
|
|
|
mme_draw.gl_begin_consume = false;
|
|
|
|
return;
|
|
|
|
} else {
|
|
|
|
FlushMMEInlineDraw();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// Tail call in case it needs to retry.
|
|
|
|
StepInstance(expected_mode, count);
|
|
|
|
}
|
|
|
|
|
2020-04-28 03:47:58 +02:00
|
|
|
void Maxwell3D::CallMethodFromMME(u32 method, u32 method_argument) {
|
2019-09-15 17:48:54 +02:00
|
|
|
if (mme_inline[method]) {
|
2020-04-28 03:47:58 +02:00
|
|
|
regs.reg_array[method] = method_argument;
|
2019-09-15 17:48:54 +02:00
|
|
|
if (method == MAXWELL3D_REG_INDEX(vertex_buffer.count) ||
|
|
|
|
method == MAXWELL3D_REG_INDEX(index_array.count)) {
|
2019-09-22 13:23:13 +02:00
|
|
|
const MMEDrawMode expected_mode = method == MAXWELL3D_REG_INDEX(vertex_buffer.count)
|
|
|
|
? MMEDrawMode::Array
|
|
|
|
: MMEDrawMode::Indexed;
|
2020-04-28 03:47:58 +02:00
|
|
|
StepInstance(expected_mode, method_argument);
|
2019-09-15 20:25:07 +02:00
|
|
|
} else if (method == MAXWELL3D_REG_INDEX(draw.vertex_begin_gl)) {
|
|
|
|
mme_draw.instance_mode =
|
|
|
|
(regs.draw.instance_next != 0) || (regs.draw.instance_cont != 0);
|
|
|
|
mme_draw.gl_begin_consume = true;
|
|
|
|
} else {
|
|
|
|
mme_draw.gl_end_count++;
|
2019-09-15 17:48:54 +02:00
|
|
|
}
|
|
|
|
} else {
|
2019-09-22 13:23:13 +02:00
|
|
|
if (mme_draw.current_mode != MMEDrawMode::Undefined) {
|
2019-09-15 17:48:54 +02:00
|
|
|
FlushMMEInlineDraw();
|
|
|
|
}
|
2020-04-28 03:47:58 +02:00
|
|
|
CallMethod(method, method_argument, true);
|
2019-09-15 17:48:54 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-03-11 01:21:04 +01:00
|
|
|
void Maxwell3D::ProcessTopologyOverride() {
|
2022-03-14 15:11:28 +01:00
|
|
|
using PrimitiveTopology = Maxwell3D::Regs::PrimitiveTopology;
|
|
|
|
using PrimitiveTopologyOverride = Maxwell3D::Regs::PrimitiveTopologyOverride;
|
|
|
|
|
|
|
|
PrimitiveTopology topology{};
|
|
|
|
|
|
|
|
switch (regs.topology_override) {
|
|
|
|
case PrimitiveTopologyOverride::None:
|
2022-03-14 15:37:51 +01:00
|
|
|
topology = regs.draw.topology;
|
|
|
|
break;
|
2022-03-14 15:11:28 +01:00
|
|
|
case PrimitiveTopologyOverride::Points:
|
|
|
|
topology = PrimitiveTopology::Points;
|
|
|
|
break;
|
|
|
|
case PrimitiveTopologyOverride::Lines:
|
|
|
|
topology = PrimitiveTopology::Lines;
|
|
|
|
break;
|
|
|
|
case PrimitiveTopologyOverride::LineStrip:
|
|
|
|
topology = PrimitiveTopology::LineStrip;
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
topology = static_cast<PrimitiveTopology>(regs.topology_override);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2022-03-11 23:16:56 +01:00
|
|
|
if (use_topology_override) {
|
2022-03-14 15:11:28 +01:00
|
|
|
regs.draw.topology.Assign(topology);
|
2022-03-11 01:21:04 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-15 17:48:54 +02:00
|
|
|
void Maxwell3D::FlushMMEInlineDraw() {
|
2020-12-07 06:41:47 +01:00
|
|
|
LOG_TRACE(HW_GPU, "called, topology={}, count={}", regs.draw.topology.Value(),
|
2019-09-15 17:48:54 +02:00
|
|
|
regs.vertex_buffer.count);
|
|
|
|
ASSERT_MSG(!(regs.index_array.count && regs.vertex_buffer.count), "Both indexed and direct?");
|
2019-09-15 20:25:07 +02:00
|
|
|
ASSERT(mme_draw.instance_count == mme_draw.gl_end_count);
|
2019-09-15 17:48:54 +02:00
|
|
|
|
|
|
|
// Both instance configuration registers can not be set at the same time.
|
|
|
|
ASSERT_MSG(!regs.draw.instance_next || !regs.draw.instance_cont,
|
|
|
|
"Illegal combination of instancing parameters");
|
|
|
|
|
2022-03-11 01:21:04 +01:00
|
|
|
ProcessTopologyOverride();
|
|
|
|
|
2019-09-22 13:23:13 +02:00
|
|
|
const bool is_indexed = mme_draw.current_mode == MMEDrawMode::Indexed;
|
|
|
|
if (ShouldExecute()) {
|
2020-06-11 05:58:57 +02:00
|
|
|
rasterizer->Draw(is_indexed, true);
|
2019-09-22 13:23:13 +02:00
|
|
|
}
|
2019-09-15 17:48:54 +02:00
|
|
|
|
|
|
|
// TODO(bunnei): Below, we reset vertex count so that we can use these registers to determine if
|
|
|
|
// the game is trying to draw indexed or direct mode. This needs to be verified on HW still -
|
|
|
|
// it's possible that it is incorrect and that there is some other register used to specify the
|
|
|
|
// drawing mode.
|
|
|
|
if (is_indexed) {
|
|
|
|
regs.index_array.count = 0;
|
|
|
|
} else {
|
|
|
|
regs.vertex_buffer.count = 0;
|
|
|
|
}
|
2019-09-22 13:23:13 +02:00
|
|
|
mme_draw.current_mode = MMEDrawMode::Undefined;
|
2019-09-15 17:48:54 +02:00
|
|
|
mme_draw.current_count = 0;
|
|
|
|
mme_draw.instance_count = 0;
|
2019-09-15 20:25:07 +02:00
|
|
|
mme_draw.instance_mode = false;
|
|
|
|
mme_draw.gl_begin_consume = false;
|
|
|
|
mme_draw.gl_end_count = 0;
|
2019-09-15 17:48:54 +02:00
|
|
|
}
|
|
|
|
|
2018-04-24 03:01:29 +02:00
|
|
|
void Maxwell3D::ProcessMacroUpload(u32 data) {
|
2020-05-29 06:53:27 +02:00
|
|
|
macro_engine->AddCode(regs.macros.upload_address++, data);
|
2018-10-30 04:36:03 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void Maxwell3D::ProcessMacroBind(u32 data) {
|
2019-09-01 09:59:27 +02:00
|
|
|
macro_positions[regs.macros.entry++] = data;
|
2018-04-24 03:01:29 +02:00
|
|
|
}
|
|
|
|
|
2019-08-31 22:43:19 +02:00
|
|
|
void Maxwell3D::ProcessFirmwareCall4() {
|
|
|
|
LOG_WARNING(HW_GPU, "(STUBBED) called");
|
|
|
|
|
2019-09-15 03:51:18 +02:00
|
|
|
// Firmware call 4 is a blob that changes some registers depending on its parameters.
|
|
|
|
// These registers don't affect emulation and so are stubbed by setting 0xd00 to 1.
|
2019-08-31 22:43:19 +02:00
|
|
|
regs.reg_array[0xd00] = 1;
|
|
|
|
}
|
|
|
|
|
2020-01-28 03:48:15 +01:00
|
|
|
void Maxwell3D::StampQueryResult(u64 payload, bool long_query) {
|
|
|
|
struct LongQueryResult {
|
|
|
|
u64_le value;
|
|
|
|
u64_le timestamp;
|
|
|
|
};
|
|
|
|
static_assert(sizeof(LongQueryResult) == 16, "LongQueryResult has wrong size");
|
2019-02-24 06:15:35 +01:00
|
|
|
const GPUVAddr sequence_address{regs.query.QueryAddress()};
|
2020-01-28 03:48:15 +01:00
|
|
|
if (long_query) {
|
|
|
|
// Write the 128-bit result structure in long mode. Note: We emulate an infinitely fast
|
|
|
|
// GPU, this command may actually take a while to complete in real hardware due to GPU
|
|
|
|
// wait queues.
|
2020-02-13 23:16:07 +01:00
|
|
|
LongQueryResult query_result{payload, system.GPU().GetTicks()};
|
2020-01-28 03:48:15 +01:00
|
|
|
memory_manager.WriteBlock(sequence_address, &query_result, sizeof(query_result));
|
|
|
|
} else {
|
|
|
|
memory_manager.Write<u32>(sequence_address, static_cast<u32>(payload));
|
|
|
|
}
|
|
|
|
}
|
2018-02-12 18:34:41 +01:00
|
|
|
|
2020-01-28 03:48:15 +01:00
|
|
|
void Maxwell3D::ProcessQueryGet() {
|
2018-04-24 00:06:57 +02:00
|
|
|
// TODO(Subv): Support the other query units.
|
2020-05-28 23:23:25 +02:00
|
|
|
if (regs.query.query_get.unit != Regs::QueryUnit::Crop) {
|
|
|
|
LOG_DEBUG(HW_GPU, "Units other than CROP are unimplemented");
|
|
|
|
}
|
2018-04-24 00:06:57 +02:00
|
|
|
|
2020-01-28 03:48:15 +01:00
|
|
|
switch (regs.query.query_get.operation) {
|
2020-02-18 18:19:24 +01:00
|
|
|
case Regs::QueryOperation::Release:
|
2022-02-06 01:16:11 +01:00
|
|
|
if (regs.query.query_get.fence == 1 || regs.query.query_get.short_query != 0) {
|
|
|
|
const GPUVAddr sequence_address{regs.query.QueryAddress()};
|
|
|
|
const u32 payload = regs.query.query_sequence;
|
|
|
|
std::function<void()> operation([this, sequence_address, payload] {
|
|
|
|
memory_manager.Write<u32>(sequence_address, payload);
|
|
|
|
});
|
|
|
|
rasterizer->SignalFence(std::move(operation));
|
2020-02-17 23:10:23 +01:00
|
|
|
} else {
|
2022-02-06 01:16:11 +01:00
|
|
|
struct LongQueryResult {
|
|
|
|
u64_le value;
|
|
|
|
u64_le timestamp;
|
|
|
|
};
|
|
|
|
const GPUVAddr sequence_address{regs.query.QueryAddress()};
|
|
|
|
const u32 payload = regs.query.query_sequence;
|
|
|
|
std::function<void()> operation([this, sequence_address, payload] {
|
|
|
|
LongQueryResult query_result{payload, system.GPU().GetTicks()};
|
|
|
|
memory_manager.WriteBlock(sequence_address, &query_result, sizeof(query_result));
|
|
|
|
});
|
|
|
|
rasterizer->SignalFence(std::move(operation));
|
2020-02-17 23:10:23 +01:00
|
|
|
}
|
2018-04-24 00:06:57 +02:00
|
|
|
break;
|
2019-07-28 00:40:10 +02:00
|
|
|
case Regs::QueryOperation::Acquire:
|
|
|
|
// TODO(Blinkhawk): Under this operation, the GPU waits for the CPU to write a value that
|
|
|
|
// matches the current payload.
|
2020-01-28 03:48:15 +01:00
|
|
|
UNIMPLEMENTED_MSG("Unimplemented query operation ACQUIRE");
|
|
|
|
break;
|
2019-11-26 22:52:15 +01:00
|
|
|
case Regs::QueryOperation::Counter:
|
|
|
|
if (const std::optional<u64> result = GetQueryResult()) {
|
|
|
|
// If the query returns an empty optional it means it's cached and deferred.
|
|
|
|
// In this case we have a non-empty result, so we stamp it immediately.
|
|
|
|
StampQueryResult(*result, regs.query.query_get.short_query == 0);
|
2018-06-04 02:17:31 +02:00
|
|
|
}
|
2020-01-28 03:48:15 +01:00
|
|
|
break;
|
2019-07-28 00:40:10 +02:00
|
|
|
case Regs::QueryOperation::Trap:
|
2020-01-28 03:48:15 +01:00
|
|
|
UNIMPLEMENTED_MSG("Unimplemented query operation TRAP");
|
|
|
|
break;
|
2019-07-28 00:40:10 +02:00
|
|
|
default:
|
2020-01-28 03:48:15 +01:00
|
|
|
UNIMPLEMENTED_MSG("Unknown query operation");
|
2018-02-12 18:34:41 +01:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
2018-03-05 01:13:15 +01:00
|
|
|
|
2019-07-01 04:21:28 +02:00
|
|
|
void Maxwell3D::ProcessQueryCondition() {
|
|
|
|
const GPUVAddr condition_address{regs.condition.Address()};
|
|
|
|
switch (regs.condition.mode) {
|
|
|
|
case Regs::ConditionMode::Always: {
|
|
|
|
execute_on = true;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case Regs::ConditionMode::Never: {
|
|
|
|
execute_on = false;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case Regs::ConditionMode::ResNonZero: {
|
|
|
|
Regs::QueryCompare cmp;
|
2019-11-26 22:52:15 +01:00
|
|
|
memory_manager.ReadBlock(condition_address, &cmp, sizeof(cmp));
|
2019-07-01 04:21:28 +02:00
|
|
|
execute_on = cmp.initial_sequence != 0U && cmp.initial_mode != 0U;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case Regs::ConditionMode::Equal: {
|
|
|
|
Regs::QueryCompare cmp;
|
2019-11-26 22:52:15 +01:00
|
|
|
memory_manager.ReadBlock(condition_address, &cmp, sizeof(cmp));
|
2019-07-01 04:21:28 +02:00
|
|
|
execute_on =
|
|
|
|
cmp.initial_sequence == cmp.current_sequence && cmp.initial_mode == cmp.current_mode;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case Regs::ConditionMode::NotEqual: {
|
|
|
|
Regs::QueryCompare cmp;
|
2019-11-26 22:52:15 +01:00
|
|
|
memory_manager.ReadBlock(condition_address, &cmp, sizeof(cmp));
|
2019-07-01 04:21:28 +02:00
|
|
|
execute_on =
|
|
|
|
cmp.initial_sequence != cmp.current_sequence || cmp.initial_mode != cmp.current_mode;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
default: {
|
|
|
|
UNIMPLEMENTED_MSG("Uninplemented Condition Mode!");
|
|
|
|
execute_on = true;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-07-28 00:40:10 +02:00
|
|
|
void Maxwell3D::ProcessCounterReset() {
|
|
|
|
switch (regs.counter_reset) {
|
|
|
|
case Regs::CounterReset::SampleCnt:
|
2020-06-11 05:58:57 +02:00
|
|
|
rasterizer->ResetCounter(QueryType::SamplesPassed);
|
2019-07-28 00:40:10 +02:00
|
|
|
break;
|
|
|
|
default:
|
2020-12-07 06:41:47 +01:00
|
|
|
LOG_DEBUG(Render_OpenGL, "Unimplemented counter reset={}", regs.counter_reset);
|
2019-07-28 00:40:10 +02:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-04-02 17:46:00 +02:00
|
|
|
void Maxwell3D::ProcessSyncPoint() {
|
|
|
|
const u32 sync_point = regs.sync_info.sync_point.Value();
|
|
|
|
const u32 increment = regs.sync_info.increment.Value();
|
2019-08-30 20:08:00 +02:00
|
|
|
[[maybe_unused]] const u32 cache_flush = regs.sync_info.unknown.Value();
|
2019-06-07 18:56:30 +02:00
|
|
|
if (increment) {
|
2020-06-11 05:58:57 +02:00
|
|
|
rasterizer->SignalSyncPoint(sync_point);
|
2019-06-07 18:56:30 +02:00
|
|
|
}
|
2019-04-02 17:46:00 +02:00
|
|
|
}
|
|
|
|
|
2018-03-05 01:13:15 +01:00
|
|
|
void Maxwell3D::DrawArrays() {
|
2020-12-07 06:41:47 +01:00
|
|
|
LOG_TRACE(HW_GPU, "called, topology={}, count={}", regs.draw.topology.Value(),
|
2018-07-02 18:20:50 +02:00
|
|
|
regs.vertex_buffer.count);
|
2018-04-13 20:18:37 +02:00
|
|
|
ASSERT_MSG(!(regs.index_array.count && regs.vertex_buffer.count), "Both indexed and direct?");
|
2018-03-24 07:41:16 +01:00
|
|
|
|
2018-08-12 02:21:31 +02:00
|
|
|
// Both instance configuration registers can not be set at the same time.
|
|
|
|
ASSERT_MSG(!regs.draw.instance_next || !regs.draw.instance_cont,
|
|
|
|
"Illegal combination of instancing parameters");
|
|
|
|
|
2022-03-11 01:21:04 +01:00
|
|
|
ProcessTopologyOverride();
|
|
|
|
|
2018-08-12 02:21:31 +02:00
|
|
|
if (regs.draw.instance_next) {
|
|
|
|
// Increment the current instance *before* drawing.
|
|
|
|
state.current_instance += 1;
|
|
|
|
} else if (!regs.draw.instance_cont) {
|
|
|
|
// Reset the current instance to 0.
|
|
|
|
state.current_instance = 0;
|
|
|
|
}
|
|
|
|
|
2018-04-13 20:18:37 +02:00
|
|
|
const bool is_indexed{regs.index_array.count && !regs.vertex_buffer.count};
|
2019-09-22 13:23:13 +02:00
|
|
|
if (ShouldExecute()) {
|
2020-06-11 05:58:57 +02:00
|
|
|
rasterizer->Draw(is_indexed, false);
|
2019-09-22 13:23:13 +02:00
|
|
|
}
|
2018-04-29 22:23:31 +02:00
|
|
|
|
|
|
|
// TODO(bunnei): Below, we reset vertex count so that we can use these registers to determine if
|
|
|
|
// the game is trying to draw indexed or direct mode. This needs to be verified on HW still -
|
|
|
|
// it's possible that it is incorrect and that there is some other register used to specify the
|
|
|
|
// drawing mode.
|
|
|
|
if (is_indexed) {
|
|
|
|
regs.index_array.count = 0;
|
|
|
|
} else {
|
|
|
|
regs.vertex_buffer.count = 0;
|
|
|
|
}
|
2018-03-05 01:13:15 +01:00
|
|
|
}
|
|
|
|
|
2019-11-26 22:52:15 +01:00
|
|
|
std::optional<u64> Maxwell3D::GetQueryResult() {
|
|
|
|
switch (regs.query.query_get.select) {
|
2022-06-02 22:45:22 +02:00
|
|
|
case Regs::QuerySelect::Payload:
|
|
|
|
return regs.query.query_sequence;
|
2019-11-26 22:52:15 +01:00
|
|
|
case Regs::QuerySelect::SamplesPassed:
|
|
|
|
// Deferred.
|
2020-12-07 06:41:47 +01:00
|
|
|
rasterizer->Query(regs.query.QueryAddress(), QueryType::SamplesPassed,
|
2020-06-11 05:58:57 +02:00
|
|
|
system.GPU().GetTicks());
|
2020-09-22 23:31:53 +02:00
|
|
|
return std::nullopt;
|
2019-11-26 22:52:15 +01:00
|
|
|
default:
|
2020-05-28 23:23:25 +02:00
|
|
|
LOG_DEBUG(HW_GPU, "Unimplemented query select type {}",
|
2020-12-07 06:41:47 +01:00
|
|
|
regs.query.query_get.select.Value());
|
2019-11-26 22:52:15 +01:00
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-01-17 00:48:58 +01:00
|
|
|
void Maxwell3D::ProcessCBBind(size_t stage_index) {
|
2018-03-17 23:06:23 +01:00
|
|
|
// Bind the buffer currently in CB_ADDRESS to the specified index in the desired shader stage.
|
2021-01-17 00:48:58 +01:00
|
|
|
const auto& bind_data = regs.cb_bind[stage_index];
|
|
|
|
auto& buffer = state.shader_stages[stage_index].const_buffers[bind_data.index];
|
2018-03-17 23:06:23 +01:00
|
|
|
buffer.enabled = bind_data.valid.Value() != 0;
|
|
|
|
buffer.address = regs.const_buffer.BufferAddress();
|
|
|
|
buffer.size = regs.const_buffer.cb_size;
|
2021-01-17 00:48:58 +01:00
|
|
|
|
|
|
|
const bool is_enabled = bind_data.valid.Value() != 0;
|
2021-06-01 19:26:43 +02:00
|
|
|
if (!is_enabled) {
|
|
|
|
rasterizer->DisableGraphicsUniformBuffer(stage_index, bind_data.index);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
const GPUVAddr gpu_addr = regs.const_buffer.BufferAddress();
|
|
|
|
const u32 size = regs.const_buffer.cb_size;
|
2021-01-17 00:48:58 +01:00
|
|
|
rasterizer->BindGraphicsUniformBuffer(stage_index, bind_data.index, gpu_addr, size);
|
2018-03-17 04:06:24 +01:00
|
|
|
}
|
2018-03-17 02:32:44 +01:00
|
|
|
|
2022-03-15 00:11:41 +01:00
|
|
|
void Maxwell3D::ProcessCBMultiData(const u32* start_base, u32 amount) {
|
2018-03-18 21:19:47 +01:00
|
|
|
// Write the input value to the current const buffer at the current position.
|
2019-01-22 07:47:56 +01:00
|
|
|
const GPUVAddr buffer_address = regs.const_buffer.BufferAddress();
|
2018-03-18 21:19:47 +01:00
|
|
|
ASSERT(buffer_address != 0);
|
|
|
|
|
|
|
|
// Don't allow writing past the end of the buffer.
|
2019-07-12 15:25:47 +02:00
|
|
|
ASSERT(regs.const_buffer.cb_pos <= regs.const_buffer.cb_size);
|
2018-03-18 21:19:47 +01:00
|
|
|
|
2022-03-15 00:11:41 +01:00
|
|
|
const GPUVAddr address{buffer_address + regs.const_buffer.cb_pos};
|
|
|
|
const size_t copy_size = amount * sizeof(u32);
|
|
|
|
memory_manager.WriteBlock(address, start_base, copy_size);
|
2018-03-18 21:19:47 +01:00
|
|
|
|
2022-03-15 00:11:41 +01:00
|
|
|
// Increment the current buffer position.
|
|
|
|
regs.const_buffer.cb_pos += static_cast<u32>(copy_size);
|
|
|
|
}
|
2019-02-19 02:58:32 +01:00
|
|
|
|
2022-03-15 00:11:41 +01:00
|
|
|
void Maxwell3D::ProcessCBData(u32 value) {
|
|
|
|
ProcessCBMultiData(&value, 1);
|
2018-03-18 21:19:47 +01:00
|
|
|
}
|
|
|
|
|
2018-03-26 22:46:49 +02:00
|
|
|
Texture::TICEntry Maxwell3D::GetTICEntry(u32 tic_index) const {
|
2020-12-30 06:25:23 +01:00
|
|
|
const GPUVAddr tic_address_gpu{regs.tic.Address() + tic_index * sizeof(Texture::TICEntry)};
|
2018-03-26 22:46:49 +02:00
|
|
|
|
|
|
|
Texture::TICEntry tic_entry;
|
2019-04-16 05:05:05 +02:00
|
|
|
memory_manager.ReadBlockUnsafe(tic_address_gpu, &tic_entry, sizeof(Texture::TICEntry));
|
2018-03-26 22:46:49 +02:00
|
|
|
|
|
|
|
return tic_entry;
|
|
|
|
}
|
|
|
|
|
|
|
|
Texture::TSCEntry Maxwell3D::GetTSCEntry(u32 tsc_index) const {
|
2020-12-30 06:25:23 +01:00
|
|
|
const GPUVAddr tsc_address_gpu{regs.tsc.Address() + tsc_index * sizeof(Texture::TSCEntry)};
|
2018-03-26 22:46:49 +02:00
|
|
|
|
|
|
|
Texture::TSCEntry tsc_entry;
|
2019-04-16 05:05:05 +02:00
|
|
|
memory_manager.ReadBlockUnsafe(tsc_address_gpu, &tsc_entry, sizeof(Texture::TSCEntry));
|
2018-03-26 22:46:49 +02:00
|
|
|
return tsc_entry;
|
|
|
|
}
|
|
|
|
|
2018-03-28 22:14:47 +02:00
|
|
|
u32 Maxwell3D::GetRegisterValue(u32 method) const {
|
|
|
|
ASSERT_MSG(method < Regs::NUM_REGS, "Invalid Maxwell3D register");
|
|
|
|
return regs.reg_array[method];
|
|
|
|
}
|
|
|
|
|
2018-06-07 06:54:25 +02:00
|
|
|
void Maxwell3D::ProcessClearBuffers() {
|
2020-06-11 05:58:57 +02:00
|
|
|
rasterizer->Clear();
|
2018-06-07 06:54:25 +02:00
|
|
|
}
|
|
|
|
|
2018-10-20 21:58:06 +02:00
|
|
|
} // namespace Tegra::Engines
|