Compare commits
4 Commits
docsupdate
...
xbzk-mci-b
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ce4877b424 | ||
|
|
6f4ade37e1 | ||
|
|
09f06a9a41 | ||
|
|
a701ea274f |
@@ -1508,7 +1508,10 @@ void BufferCache<P>::MappedUploadMemory([[maybe_unused]] Buffer& buffer,
|
||||
[[maybe_unused]] u64 total_size_bytes,
|
||||
[[maybe_unused]] std::span<BufferCopy> copies) {
|
||||
if constexpr (USE_MEMORY_MAPS) {
|
||||
auto upload_staging = runtime.UploadStagingBuffer(total_size_bytes);
|
||||
constexpr u64 MAX_STAGING_SIZE = 2_GiB;
|
||||
auto upload_staging = runtime.UploadStagingBuffer((std::min)(total_size_bytes, MAX_STAGING_SIZE));
|
||||
if (upload_staging.mapped_span.size() < total_size_bytes) return;
|
||||
//auto upload_staging = runtime.UploadStagingBuffer(total_size_bytes);
|
||||
const std::span<u8> staging_pointer = upload_staging.mapped_span;
|
||||
for (BufferCopy& copy : copies) {
|
||||
u8* const src_pointer = staging_pointer.data() + copy.src_offset;
|
||||
|
||||
@@ -92,19 +92,25 @@ void MaxwellDMA::Launch() {
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// TODO: allow multisized components.
|
||||
// TODO: xbzk: multisized components support.
|
||||
// validadte this widely!
|
||||
// shipped in PR 3164.
|
||||
auto& accelerate = rasterizer->AccessAccelerateDMA();
|
||||
const bool is_const_a_dst = regs.remap_const.dst_x == RemapConst::Swizzle::CONST_A;
|
||||
if (regs.launch_dma.remap_enable != 0 && is_const_a_dst) {
|
||||
ASSERT(regs.remap_const.component_size_minus_one == 3);
|
||||
const u32 remap_components_size = regs.remap_const.component_size_minus_one + 1;
|
||||
accelerate.BufferClear(regs.offset_out, regs.line_length_in,
|
||||
regs.remap_const.remap_consta_value);
|
||||
read_buffer.resize_destructive(regs.line_length_in * sizeof(u32));
|
||||
std::span<u32> span(reinterpret_cast<u32*>(read_buffer.data()), regs.line_length_in);
|
||||
std::ranges::fill(span, regs.remap_const.remap_consta_value);
|
||||
read_buffer.resize_destructive(regs.line_length_in * remap_components_size);
|
||||
for (u32 i = 0; i < regs.line_length_in; ++i) {
|
||||
for (u32 j = 0; j < remap_components_size; ++j) {
|
||||
read_buffer[i * remap_components_size + j] =
|
||||
(regs.remap_const.remap_consta_value >> (j * 8)) & 0xFF;
|
||||
}
|
||||
}
|
||||
memory_manager.WriteBlockUnsafe(regs.offset_out,
|
||||
reinterpret_cast<u8*>(read_buffer.data()),
|
||||
regs.line_length_in * sizeof(u32));
|
||||
read_buffer.data(),
|
||||
regs.line_length_in * remap_components_size);
|
||||
} else {
|
||||
memory_manager.FlushCaching();
|
||||
const auto convert_linear_2_blocklinear_addr = [](u64 address) {
|
||||
|
||||
@@ -479,6 +479,14 @@ void RasterizerVulkan::Clear(u32 layer_count) {
|
||||
}
|
||||
|
||||
void RasterizerVulkan::DispatchCompute() {
|
||||
#if defined(ANDROID) || defined(__linux__)
|
||||
static u32 dispatch_count = 0;
|
||||
if (dispatch_count < 2) {
|
||||
dispatch_count++;
|
||||
return;
|
||||
}
|
||||
#endif
|
||||
|
||||
FlushWork();
|
||||
gpu_memory->FlushCaching();
|
||||
|
||||
|
||||
Reference in New Issue
Block a user