Line data Source code
1 : /**
2 : Copyright (c) 2021-2022 Roman Katuntsev <sbkarr@stappler.org>
3 : Copyright (c) 2023 Stappler LLC <admin@stappler.dev>
4 :
5 : Permission is hereby granted, free of charge, to any person obtaining a copy
6 : of this software and associated documentation files (the "Software"), to deal
7 : in the Software without restriction, including without limitation the rights
8 : to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 : copies of the Software, and to permit persons to whom the Software is
10 : furnished to do so, subject to the following conditions:
11 :
12 : The above copyright notice and this permission notice shall be included in
13 : all copies or substantial portions of the Software.
14 :
15 : THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 : IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 : FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 : AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 : LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 : OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
21 : THE SOFTWARE.
22 : **/
23 :
24 : #include "XLVkTransferQueue.h"
25 : #include "XLVkDevice.h"
26 : #include "XLVkObject.h"
27 : #include "XLCoreFrameEmitter.h"
28 : #include "XLCoreFrameQueue.h"
29 : #include "XLCoreFrameRequest.h"
30 :
31 : namespace STAPPLER_VERSIONIZED stappler::xenolith::vk {
32 :
33 : class TransferAttachment : public core::GenericAttachment {
34 : public:
35 : virtual ~TransferAttachment();
36 :
37 : virtual Rc<AttachmentHandle> makeFrameHandle(const FrameQueue &) override;
38 : };
39 :
40 : class TransferAttachmentHandle : public core::AttachmentHandle {
41 : public:
42 : virtual ~TransferAttachmentHandle();
43 :
44 : virtual bool setup(FrameQueue &, Function<void(bool)> &&) override;
45 : virtual void submitInput(FrameQueue &, Rc<core::AttachmentInputData> &&, Function<void(bool)> &&) override;
46 :
47 42 : const Rc<TransferResource> &getResource() const { return _resource; }
48 :
49 : protected:
50 : Rc<TransferResource> _resource;
51 : };
52 :
53 : class TransferPass : public QueuePass {
54 : public:
55 : virtual ~TransferPass();
56 :
57 : virtual bool init(QueuePassBuilder &, const AttachmentData *);
58 :
59 : virtual Rc<QueuePassHandle> makeFrameHandle(const FrameQueue &) override;
60 :
61 42 : const AttachmentData *getAttachment() const { return _attachment; }
62 :
63 : protected:
64 : using QueuePass::init;
65 :
66 : const AttachmentData *_attachment = nullptr;
67 : };
68 :
69 : class TransferRenderPassHandle : public QueuePassHandle {
70 : public:
71 : virtual ~TransferRenderPassHandle();
72 :
73 : protected:
74 : virtual Vector<const CommandBuffer *> doPrepareCommands(FrameHandle &) override;
75 : virtual void doComplete(FrameQueue &, Function<void(bool)> &&, bool) override;
76 : };
77 :
78 :
79 84 : TransferQueue::~TransferQueue() { }
80 :
81 42 : bool TransferQueue::init() {
82 : using namespace core;
83 42 : Queue::Builder builder("Transfer");
84 :
85 84 : auto attachment = builder.addAttachemnt("TransferAttachment", [&] (AttachmentBuilder &attachmentBuilder) -> Rc<Attachment> {
86 42 : attachmentBuilder.defineAsInput();
87 42 : attachmentBuilder.defineAsOutput();
88 84 : return Rc<TransferAttachment>::create(attachmentBuilder);
89 42 : });
90 :
91 42 : builder.addPass("TransferRenderPass", PassType::Transfer, RenderOrdering(0), [&] (QueuePassBuilder &passBuilder) -> Rc<core::QueuePass> {
92 84 : return Rc<TransferPass>::create(passBuilder, attachment);
93 : });
94 :
95 42 : if (core::Queue::init(move(builder))) {
96 42 : _attachment = attachment;
97 42 : return true;
98 : }
99 0 : return false;
100 42 : }
101 :
102 21 : auto TransferQueue::makeRequest(Rc<TransferResource> &&req)-> Rc<FrameRequest> {
103 21 : auto ret = Rc<FrameRequest>::create(this);
104 21 : ret->addInput(_attachment, move(req));
105 21 : return ret;
106 0 : }
107 :
108 399 : TransferResource::BufferAllocInfo::BufferAllocInfo(core::BufferData *d) {
109 399 : data = d;
110 399 : info.flags = VkBufferCreateFlags(d->flags);
111 399 : info.size = d->size;
112 399 : info.usage = VkBufferUsageFlags(d->usage) | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
113 399 : info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
114 399 : }
115 :
116 63 : TransferResource::ImageAllocInfo::ImageAllocInfo(core::ImageData *d) {
117 63 : data = d;
118 63 : info.flags = data->flags;
119 63 : info.imageType = VkImageType(data->imageType);
120 63 : info.format = VkFormat(data->format);
121 63 : info.extent = VkExtent3D({ data->extent.width, data->extent.height, data->extent.depth });
122 63 : info.mipLevels = data->mipLevels.get();
123 63 : info.arrayLayers = data->arrayLayers.get();
124 63 : info.samples = VkSampleCountFlagBits(data->samples);
125 63 : info.tiling = VkImageTiling(data->tiling);
126 63 : info.usage = VkImageUsageFlags(data->usage) | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
127 63 : info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
128 63 : if (data->tiling == core::ImageTiling::Optimal) {
129 63 : info.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
130 : } else {
131 0 : info.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
132 : }
133 63 : }
134 :
135 126 : TransferResource::~TransferResource() {
136 63 : if (_alloc) {
137 21 : invalidate(*_alloc->getDevice());
138 : }
139 126 : }
140 :
141 63 : void TransferResource::invalidate(Device &dev) {
142 462 : for (auto &it : _buffers) {
143 399 : if (it.buffer != VK_NULL_HANDLE) {
144 0 : dev.getTable()->vkDestroyBuffer(dev.getDevice(), it.buffer, nullptr);
145 0 : it.buffer = VK_NULL_HANDLE;
146 : }
147 399 : if (it.dedicated != VK_NULL_HANDLE) {
148 0 : dev.getTable()->vkFreeMemory(dev.getDevice(), it.dedicated, nullptr);
149 0 : it.dedicated = VK_NULL_HANDLE;
150 : }
151 : }
152 126 : for (auto &it : _images) {
153 63 : if (it.image != VK_NULL_HANDLE) {
154 0 : dev.getTable()->vkDestroyImage(dev.getDevice(), it.image, nullptr);
155 0 : it.image = VK_NULL_HANDLE;
156 : }
157 63 : if (it.dedicated != VK_NULL_HANDLE) {
158 0 : dev.getTable()->vkFreeMemory(dev.getDevice(), it.dedicated, nullptr);
159 0 : it.dedicated = VK_NULL_HANDLE;
160 : }
161 : }
162 63 : if (_memory != VK_NULL_HANDLE) {
163 0 : dev.getTable()->vkFreeMemory(dev.getDevice(), _memory, nullptr);
164 0 : _memory = VK_NULL_HANDLE;
165 : }
166 :
167 63 : dropStaging(_stagingBuffer);
168 :
169 63 : if (_callback) {
170 0 : _callback(false);
171 0 : _callback = nullptr;
172 : }
173 :
174 63 : _memType = nullptr;
175 63 : _alloc = nullptr;
176 63 : }
177 :
178 0 : bool TransferResource::init(const Rc<Allocator> &alloc, const Rc<core::Resource> &res, Function<void(bool)> &&cb) {
179 0 : _alloc = alloc;
180 0 : _resource = res;
181 0 : if (cb) {
182 0 : _callback = move(cb);
183 : }
184 0 : return true;
185 : }
186 :
187 63 : bool TransferResource::init(const Rc<Allocator> &alloc, Rc<core::Resource> &&res, Function<void(bool)> &&cb) {
188 63 : _alloc = alloc;
189 63 : _resource = move(res);
190 63 : if (cb) {
191 21 : _callback = move(cb);
192 : }
193 63 : return true;
194 : }
195 :
196 63 : bool TransferResource::initialize(AllocationUsage usage) {
197 63 : if (_initialized) {
198 0 : return true;
199 : }
200 :
201 63 : auto dev = _alloc->getDevice();
202 63 : auto table = _alloc->getDevice()->getTable();
203 :
204 0 : auto cleanup = [&, this] (StringView reason) {
205 0 : _resource->clear();
206 0 : invalidate(*_alloc->getDevice());
207 0 : log::error("DeviceResourceTransfer", "Fail to init transfer for ", _resource->getName(), ": ", reason);
208 0 : return false;
209 63 : };
210 :
211 63 : _targetUsage = usage;
212 63 : _buffers.reserve(_resource->getBuffers().size());
213 63 : _images.reserve(_resource->getImages().size());
214 :
215 462 : for (auto &it : _resource->getBuffers()) {
216 399 : _buffers.emplace_back(it);
217 : }
218 :
219 126 : for (auto &it : _resource->getImages()) {
220 63 : _images.emplace_back(it);
221 : }
222 :
223 : // pre-create objects
224 63 : auto mask = _alloc->getInitialTypeMask();
225 462 : for (auto &it : _buffers) {
226 399 : if (table->vkCreateBuffer(dev->getDevice(), &it.info, nullptr, &it.buffer) != VK_SUCCESS) {
227 0 : return cleanup("Fail to create buffer");
228 : }
229 :
230 399 : it.req = _alloc->getBufferMemoryRequirements(it.buffer);
231 399 : if (!it.req.prefersDedicated && !it.req.requiresDedicated) {
232 399 : mask &= it.req.requirements.memoryTypeBits;
233 : }
234 399 : if (mask == 0) {
235 0 : return cleanup("No memory type available");
236 : }
237 : }
238 :
239 126 : for (auto &it : _images) {
240 63 : if (table->vkCreateImage(dev->getDevice(), &it.info, nullptr, &it.image) != VK_SUCCESS) {
241 0 : return cleanup("Fail to create image");
242 : }
243 :
244 63 : it.req = _alloc->getImageMemoryRequirements(it.image);
245 63 : if (!it.req.prefersDedicated && !it.req.requiresDedicated) {
246 63 : mask &= it.req.requirements.memoryTypeBits;
247 : }
248 63 : if (mask == 0) {
249 0 : return cleanup("No memory type available");
250 : }
251 : }
252 :
253 63 : if (mask == 0) {
254 0 : return cleanup("No common memory type for resource found");
255 : }
256 :
257 63 : auto allocMemType = _alloc->findMemoryType(mask, _targetUsage);
258 :
259 63 : if (!allocMemType) {
260 0 : log::error("Vk-Error", "Fail to find memory type for static resource: ", _resource->getName());
261 0 : return cleanup("Memory type not found");
262 : }
263 :
264 63 : if (allocMemType->isHostVisible()) {
265 0 : if (!allocMemType->isHostCoherent()) {
266 0 : _nonCoherentAtomSize = _alloc->getNonCoherentAtomSize();
267 : }
268 : }
269 :
270 126 : for (auto &it : _images) {
271 63 : if (!it.req.requiresDedicated && !it.req.prefersDedicated) {
272 63 : if (it.info.tiling == VK_IMAGE_TILING_OPTIMAL) {
273 63 : _requiredMemory = math::align<VkDeviceSize>(_requiredMemory,
274 63 : std::max(it.req.requirements.alignment, _nonCoherentAtomSize));
275 63 : it.offset = _requiredMemory;
276 63 : _requiredMemory += it.req.requirements.size;
277 : }
278 : }
279 : }
280 :
281 63 : _requiredMemory = math::align<VkDeviceSize>(_requiredMemory, _alloc->getBufferImageGranularity());
282 :
283 126 : for (auto &it : _images) {
284 63 : if (!it.req.requiresDedicated && !it.req.prefersDedicated) {
285 63 : if (it.info.tiling != VK_IMAGE_TILING_OPTIMAL) {
286 0 : _requiredMemory = math::align<VkDeviceSize>(_requiredMemory,
287 0 : std::max(it.req.requirements.alignment, _nonCoherentAtomSize));
288 0 : it.offset = _requiredMemory;
289 0 : _requiredMemory += it.req.requirements.size;
290 : }
291 : }
292 : }
293 :
294 462 : for (auto &it : _buffers) {
295 399 : if (!it.req.requiresDedicated && !it.req.prefersDedicated) {
296 399 : _requiredMemory = math::align<VkDeviceSize>(_requiredMemory,
297 399 : std::max(it.req.requirements.alignment, _nonCoherentAtomSize));
298 399 : it.offset = _requiredMemory;
299 399 : _requiredMemory += it.req.requirements.size;
300 : }
301 : }
302 :
303 63 : _memType = allocMemType;
304 :
305 63 : _initialized = allocate() && upload();
306 63 : return _initialized;
307 : }
308 :
309 63 : bool TransferResource::allocate() {
310 63 : if (!_memType) {
311 0 : return false;
312 : }
313 :
314 63 : auto dev = _alloc->getDevice();
315 63 : auto table = _alloc->getDevice()->getTable();
316 :
317 0 : auto cleanup = [&, this] (StringView reason) {
318 0 : invalidate(*_alloc->getDevice());
319 0 : log::error("DeviceResourceTransfer", "Fail to allocate memory for ", _resource->getName(), ": ", reason);
320 0 : return false;
321 63 : };
322 :
323 63 : if (_requiredMemory > 0) {
324 63 : VkMemoryAllocateInfo allocInfo{};
325 63 : allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
326 63 : allocInfo.allocationSize = _requiredMemory;
327 63 : allocInfo.memoryTypeIndex = _memType->idx;
328 :
329 63 : if (table->vkAllocateMemory(dev->getDevice(), &allocInfo, nullptr, &_memory) != VK_SUCCESS) {
330 0 : log::error("Vk-Error", "Fail to allocate memory for static resource: ", _resource->getName());
331 0 : return cleanup("Fail to allocate memory");
332 : }
333 : }
334 :
335 : // bind memory
336 126 : for (auto &it : _images) {
337 63 : if (it.req.requiresDedicated || it.req.prefersDedicated) {
338 0 : if (!allocateDedicated(_alloc, it)) {
339 0 : return cleanup("Fail to allocate memory");
340 : }
341 : } else {
342 63 : if (it.info.tiling == VK_IMAGE_TILING_OPTIMAL) {
343 63 : table->vkBindImageMemory(dev->getDevice(), it.image, _memory, it.offset);
344 : }
345 : }
346 : }
347 :
348 126 : for (auto &it : _images) {
349 63 : if (!it.req.requiresDedicated && !it.req.prefersDedicated) {
350 63 : if (it.info.tiling != VK_IMAGE_TILING_OPTIMAL) {
351 0 : table->vkBindImageMemory(dev->getDevice(), it.image, _memory, it.offset);
352 : }
353 : }
354 : }
355 :
356 462 : for (auto &it : _buffers) {
357 399 : if (it.req.requiresDedicated || it.req.prefersDedicated) {
358 0 : if (!allocateDedicated(_alloc, it)) {
359 0 : return cleanup("Fail to allocate memory");
360 : }
361 : } else {
362 399 : table->vkBindBufferMemory(dev->getDevice(), it.buffer, _memory, it.offset);
363 : }
364 : }
365 :
366 63 : return true;
367 : }
368 :
369 63 : bool TransferResource::upload() {
370 63 : size_t stagingSize = preTransferData();
371 63 : if (stagingSize == 0) {
372 0 : return true;
373 : }
374 :
375 63 : if (stagingSize == maxOf<size_t>()) {
376 0 : invalidate(*_alloc->getDevice());
377 0 : return false; // failed with error
378 : }
379 :
380 63 : if (createStagingBuffer(_stagingBuffer, stagingSize)) {
381 63 : if (writeStaging(_stagingBuffer)) {
382 63 : return true;
383 : }
384 : }
385 :
386 0 : dropStaging(_stagingBuffer);
387 0 : invalidate(*_alloc->getDevice());
388 0 : return false;
389 : }
390 :
391 63 : bool TransferResource::compile() {
392 63 : Rc<DeviceMemory> mem;
393 63 : if (_memory) {
394 63 : mem = Rc<DeviceMemory>::create(_alloc, DeviceMemoryInfo{
395 63 : _requiredMemory, 1, _memType->idx, false
396 63 : }, _memory, _targetUsage);
397 : }
398 :
399 126 : for (auto &it : _images) {
400 63 : Rc<Image> img;
401 63 : if (it.dedicated) {
402 0 : auto dedicated = Rc<DeviceMemory>::create(_alloc, DeviceMemoryInfo{
403 0 : it.req.requirements.size, it.req.requirements.alignment, it.dedicatedMemType, true
404 0 : }, it.dedicated, _targetUsage);
405 0 : img = Rc<Image>::create(*_alloc->getDevice(), it.image, *it.data, move(dedicated), Rc<core::DataAtlas>(it.data->atlas));
406 0 : it.dedicated = VK_NULL_HANDLE;
407 0 : } else {
408 63 : img = Rc<Image>::create(*_alloc->getDevice(), it.image, *it.data, Rc<DeviceMemory>(mem), Rc<core::DataAtlas>(it.data->atlas));
409 : }
410 63 : if (it.barrier) {
411 63 : img->setPendingBarrier(it.barrier.value());
412 : }
413 63 : it.data->image.set(img);
414 63 : it.image = VK_NULL_HANDLE;
415 63 : }
416 :
417 462 : for (auto &it : _buffers) {
418 399 : Rc<Buffer> buf;
419 399 : if (it.dedicated) {
420 0 : auto dedicated = Rc<DeviceMemory>::create(_alloc, DeviceMemoryInfo{
421 0 : it.req.requirements.size, it.req.requirements.alignment, it.dedicatedMemType, true
422 0 : }, it.dedicated, _targetUsage);
423 0 : buf = Rc<Buffer>::create(*_alloc->getDevice(), it.buffer, *it.data, move(dedicated), 0);
424 0 : it.dedicated = VK_NULL_HANDLE;
425 0 : } else {
426 399 : buf = Rc<Buffer>::create(*_alloc->getDevice(), it.buffer, *it.data, Rc<DeviceMemory>(mem), it.offset);
427 : }
428 399 : if (it.barrier) {
429 399 : buf->setPendingBarrier(it.barrier.value());
430 : }
431 399 : it.data->buffer.set(buf);
432 399 : it.buffer = VK_NULL_HANDLE;
433 399 : }
434 :
435 63 : _memory = VK_NULL_HANDLE;
436 :
437 63 : _resource->setCompiled(true);
438 :
439 63 : if (_callback) {
440 21 : _callback(true);
441 21 : _callback = nullptr;
442 : }
443 :
444 63 : return true;
445 63 : }
446 :
447 189 : static VkImageAspectFlagBits getFormatAspectFlags(VkFormat fmt, bool separateDepthStencil) {
448 189 : switch (fmt) {
449 0 : case VK_FORMAT_D16_UNORM:
450 : case VK_FORMAT_X8_D24_UNORM_PACK32:
451 : case VK_FORMAT_D32_SFLOAT:
452 0 : if (separateDepthStencil) {
453 0 : return VK_IMAGE_ASPECT_DEPTH_BIT;
454 : } else {
455 0 : return VkImageAspectFlagBits(VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT);
456 : }
457 : break;
458 0 : case VK_FORMAT_D16_UNORM_S8_UINT:
459 : case VK_FORMAT_D24_UNORM_S8_UINT:
460 : case VK_FORMAT_D32_SFLOAT_S8_UINT:
461 0 : return VkImageAspectFlagBits(VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT);
462 : break;
463 0 : case VK_FORMAT_S8_UINT:
464 0 : if (separateDepthStencil) {
465 0 : return VK_IMAGE_ASPECT_STENCIL_BIT;
466 : } else {
467 0 : return VkImageAspectFlagBits(VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT);
468 : }
469 : break;
470 189 : default:
471 189 : return VK_IMAGE_ASPECT_COLOR_BIT;
472 : break;
473 : }
474 : }
475 :
476 63 : bool TransferResource::prepareCommands(uint32_t idx, VkCommandBuffer buf,
477 : Vector<VkImageMemoryBarrier> &outputImageBarriers, Vector<VkBufferMemoryBarrier> &outputBufferBarriers) {
478 63 : auto dev = _alloc->getDevice();
479 63 : auto table = _alloc->getDevice()->getTable();
480 :
481 63 : Vector<VkImageMemoryBarrier> inputImageBarriers;
482 525 : for (auto &it : _stagingBuffer.copyData) {
483 462 : if (it.targetImage) {
484 63 : inputImageBarriers.emplace_back(VkImageMemoryBarrier({
485 : VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, nullptr,
486 : VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_WRITE_BIT,
487 : VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
488 : VK_QUEUE_FAMILY_IGNORED, VK_QUEUE_FAMILY_IGNORED,
489 63 : it.targetImage->image, VkImageSubresourceRange({
490 63 : VkImageAspectFlags(getFormatAspectFlags(it.targetImage->info.format, false)),
491 63 : 0, it.targetImage->data->mipLevels.get(), 0, it.targetImage->data->arrayLayers.get()
492 : })
493 : }));
494 : }
495 : }
496 :
497 63 : table->vkCmdPipelineBarrier(buf, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0,
498 : 0, nullptr,
499 : 0, nullptr, // bufferBarriers.size(), bufferBarriers.data(),
500 63 : inputImageBarriers.size(), inputImageBarriers.data());
501 :
502 525 : for (auto &it : _stagingBuffer.copyData) {
503 462 : if (it.targetBuffer) {
504 399 : VkBufferCopy copyRegion{};
505 399 : copyRegion.srcOffset = it.sourceOffet;
506 399 : copyRegion.dstOffset = 0;
507 399 : copyRegion.size = it.sourceSize;
508 399 : table->vkCmdCopyBuffer(buf, _stagingBuffer.buffer.buffer, it.targetBuffer->buffer, 1, ©Region);
509 63 : } else if (it.targetImage) {
510 63 : VkBufferImageCopy copyRegion{};
511 63 : copyRegion.bufferOffset = it.sourceOffet;
512 63 : copyRegion.bufferRowLength = 0; // If either of these values is zero, that aspect of the buffer memory
513 63 : copyRegion.bufferImageHeight = 0; // is considered to be tightly packed according to the imageExtent
514 63 : copyRegion.imageSubresource = VkImageSubresourceLayers({
515 63 : VkImageAspectFlags(getFormatAspectFlags(it.targetImage->info.format, false)), 0, 0, it.targetImage->data->arrayLayers.get()
516 : });
517 63 : copyRegion.imageOffset = VkOffset3D({0, 0, 0});
518 63 : copyRegion.imageExtent = it.targetImage->info.extent;
519 :
520 63 : table->vkCmdCopyBufferToImage(buf, _stagingBuffer.buffer.buffer, it.targetImage->image,
521 : VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ©Region);
522 : }
523 : }
524 :
525 525 : for (auto &it : _stagingBuffer.copyData) {
526 462 : if (it.targetImage) {
527 63 : if (auto q = dev->getQueueFamily(getQueueOperations(it.targetImage->data->type))) {
528 63 : uint32_t srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
529 63 : uint32_t dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
530 :
531 63 : if (q->index != idx) {
532 63 : srcQueueFamilyIndex = idx;
533 63 : dstQueueFamilyIndex = q->index;
534 : }
535 :
536 0 : auto &ref = outputImageBarriers.emplace_back(VkImageMemoryBarrier({
537 : VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, nullptr,
538 63 : VK_ACCESS_TRANSFER_WRITE_BIT, VkAccessFlags(it.targetImage->data->targetAccess),
539 63 : VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VkImageLayout(it.targetImage->data->targetLayout),
540 : srcQueueFamilyIndex, dstQueueFamilyIndex,
541 63 : it.targetImage->image, VkImageSubresourceRange({
542 63 : VkImageAspectFlags(getFormatAspectFlags(it.targetImage->info.format, false)),
543 63 : 0, it.targetImage->data->mipLevels.get(), 0, it.targetImage->data->arrayLayers.get()
544 : })
545 : }));
546 :
547 63 : if (q->index != idx) {
548 63 : it.targetImage->barrier.emplace(ref);
549 : }
550 : }
551 399 : } else if (it.targetBuffer) {
552 399 : if (auto q = dev->getQueueFamily(getQueueOperations(it.targetBuffer->data->type))) {
553 399 : uint32_t srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
554 399 : uint32_t dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
555 :
556 399 : if (q->index != idx) {
557 399 : srcQueueFamilyIndex = idx;
558 399 : dstQueueFamilyIndex = q->index;
559 : }
560 :
561 0 : auto &ref = outputBufferBarriers.emplace_back(VkBufferMemoryBarrier({
562 : VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, nullptr,
563 399 : VK_ACCESS_TRANSFER_WRITE_BIT, VkAccessFlags(it.targetBuffer->data->targetAccess),
564 : srcQueueFamilyIndex, dstQueueFamilyIndex,
565 399 : it.targetBuffer->buffer, 0, VK_WHOLE_SIZE
566 : }));
567 :
568 399 : if (q->index != idx) {
569 399 : it.targetBuffer->barrier.emplace(ref);
570 : }
571 : }
572 : }
573 : }
574 :
575 63 : return true;
576 63 : }
577 :
578 0 : bool TransferResource::transfer(const Rc<DeviceQueue> &queue, const Rc<CommandPool> &pool, const Rc<Fence> &fence) {
579 0 : auto dev = _alloc->getDevice();
580 0 : auto table = _alloc->getDevice()->getTable();
581 0 : auto buf = pool->recordBuffer(*dev, [&, this] (CommandBuffer &buf) {
582 0 : Vector<VkImageMemoryBarrier> outputImageBarriers;
583 0 : Vector<VkBufferMemoryBarrier> outputBufferBarriers;
584 :
585 0 : if (!prepareCommands(queue->getIndex(), buf.getBuffer(), outputImageBarriers, outputBufferBarriers)) {
586 0 : return false;
587 : }
588 :
589 0 : table->vkCmdPipelineBarrier(buf.getBuffer(), VK_PIPELINE_STAGE_TRANSFER_BIT,
590 : VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, 0,
591 : 0, nullptr,
592 0 : outputBufferBarriers.size(), outputBufferBarriers.data(),
593 0 : outputImageBarriers.size(), outputImageBarriers.data());
594 0 : return true;
595 0 : });
596 :
597 0 : if (buf) {
598 0 : return queue->submit(*fence, buf);
599 : }
600 0 : return false;
601 : }
602 :
603 63 : void TransferResource::dropStaging(StagingBuffer &buffer) const {
604 63 : auto dev = _alloc->getDevice();
605 63 : auto table = _alloc->getDevice()->getTable();
606 :
607 63 : if (buffer.buffer.buffer != VK_NULL_HANDLE) {
608 63 : table->vkDestroyBuffer(dev->getDevice(), buffer.buffer.buffer, nullptr);
609 63 : buffer.buffer.buffer = VK_NULL_HANDLE;
610 : }
611 63 : if (buffer.buffer.dedicated != VK_NULL_HANDLE) {
612 63 : table->vkFreeMemory(dev->getDevice(), buffer.buffer.dedicated, nullptr);
613 63 : buffer.buffer.dedicated = VK_NULL_HANDLE;
614 : }
615 63 : }
616 :
617 0 : bool TransferResource::allocateDedicated(const Rc<Allocator> &alloc, BufferAllocInfo &it) {
618 0 : auto dev = alloc->getDevice();
619 0 : auto table = alloc->getDevice()->getTable();
620 0 : auto type = alloc->findMemoryType(it.req.requirements.memoryTypeBits, AllocationUsage::DeviceLocal);
621 0 : if (!type) {
622 0 : return false;
623 : }
624 :
625 : VkMemoryDedicatedAllocateInfo dedicatedInfo;
626 0 : dedicatedInfo.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO;
627 0 : dedicatedInfo.pNext = nullptr;
628 0 : dedicatedInfo.image = VK_NULL_HANDLE;
629 0 : dedicatedInfo.buffer = it.buffer;
630 :
631 0 : VkMemoryAllocateInfo allocInfo{};
632 0 : allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
633 0 : allocInfo.pNext = &dedicatedInfo;
634 0 : allocInfo.allocationSize = it.req.requirements.size;
635 0 : allocInfo.memoryTypeIndex = type->idx;
636 :
637 0 : if (table->vkAllocateMemory(dev->getDevice(), &allocInfo, nullptr, &it.dedicated) != VK_SUCCESS) {
638 0 : log::error("Vk-Error", "Fail to allocate memory for static resource: ", _resource->getName());
639 0 : return false;
640 : }
641 :
642 0 : table->vkBindBufferMemory(dev->getDevice(), it.buffer, it.dedicated, 0);
643 0 : it.dedicatedMemType = type->idx;
644 0 : return true;
645 : }
646 :
647 0 : bool TransferResource::allocateDedicated(const Rc<Allocator> &alloc, ImageAllocInfo &it) {
648 0 : auto dev = alloc->getDevice();
649 0 : auto table = alloc->getDevice()->getTable();
650 0 : auto type = alloc->findMemoryType(it.req.requirements.memoryTypeBits, AllocationUsage::DeviceLocal);
651 0 : if (!type) {
652 0 : return false;
653 : }
654 :
655 : VkMemoryDedicatedAllocateInfo dedicatedInfo;
656 0 : dedicatedInfo.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO;
657 0 : dedicatedInfo.pNext = nullptr;
658 0 : dedicatedInfo.image = it.image;
659 0 : dedicatedInfo.buffer = VK_NULL_HANDLE;
660 :
661 0 : VkMemoryAllocateInfo allocInfo{};
662 0 : allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
663 0 : allocInfo.pNext = &dedicatedInfo;
664 0 : allocInfo.allocationSize = it.req.requirements.size;
665 0 : allocInfo.memoryTypeIndex = type->idx;
666 :
667 0 : if (table->vkAllocateMemory(dev->getDevice(), &allocInfo, nullptr, &it.dedicated) != VK_SUCCESS) {
668 0 : log::error("Vk-Error", "Fail to allocate memory for static resource: ", _resource->getName());
669 0 : return false;
670 : }
671 :
672 0 : table->vkBindImageMemory(dev->getDevice(), it.image, it.dedicated, 0);
673 0 : it.dedicatedMemType = type->idx;
674 0 : return true;
675 : }
676 :
677 399 : size_t TransferResource::writeData(uint8_t *mem, BufferAllocInfo &info) {
678 399 : return info.data->writeData(mem, info.data->size);
679 : }
680 :
681 63 : size_t TransferResource::writeData(uint8_t *mem, ImageAllocInfo &info) {
682 63 : uint64_t expectedSize = getFormatBlockSize(info.data->format) * info.data->extent.width * info.data->extent.height * info.data->extent.depth;
683 63 : return info.data->writeData(mem, expectedSize);
684 : }
685 :
686 63 : size_t TransferResource::preTransferData() {
687 63 : auto dev = _alloc->getDevice();
688 63 : auto table = _alloc->getDevice()->getTable();
689 :
690 63 : uint8_t *generalMem = nullptr;
691 63 : if (_memType->isHostVisible()) {
692 0 : void *targetMem = nullptr;
693 0 : if (table->vkMapMemory(dev->getDevice(), _memory, 0, VK_WHOLE_SIZE, 0, &targetMem) != VK_SUCCESS) {
694 0 : log::error("Vk-Error", "Fail to map internal memory: ", _resource->getName());
695 0 : return maxOf<size_t>();
696 : }
697 0 : generalMem = (uint8_t *)targetMem;
698 : }
699 :
700 63 : size_t alignment = std::max(VkDeviceSize(0x10), _alloc->getNonCoherentAtomSize());
701 63 : size_t stagingSize = 0;
702 :
703 126 : for (auto &it : _images) {
704 63 : if (it.dedicated && _alloc->getType(it.dedicatedMemType)->isHostVisible() && it.info.tiling != VK_IMAGE_TILING_OPTIMAL) {
705 0 : void *targetMem = nullptr;
706 0 : if (table->vkMapMemory(dev->getDevice(), it.dedicated, 0, VK_WHOLE_SIZE, 0, &targetMem) != VK_SUCCESS) {
707 0 : log::error("Vk-Error", "Fail to map dedicated memory: ", _resource->getName());
708 0 : return maxOf<size_t>();
709 : }
710 0 : writeData((uint8_t *)targetMem, it);
711 0 : table->vkUnmapMemory(dev->getDevice(), it.dedicated);
712 0 : if (!_alloc->getType(it.dedicatedMemType)->isHostCoherent()) {
713 : VkMappedMemoryRange range;
714 0 : range.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
715 0 : range.pNext = nullptr;
716 0 : range.memory = it.dedicated;
717 0 : range.offset = 0;
718 0 : range.size = VK_WHOLE_SIZE;
719 0 : table->vkFlushMappedMemoryRanges(dev->getDevice(), 1, &range);
720 : }
721 63 : } else if (it.info.tiling == VK_IMAGE_TILING_OPTIMAL || it.dedicated || generalMem == nullptr) {
722 63 : it.useStaging = true;
723 63 : stagingSize = math::align<VkDeviceSize>(stagingSize, alignment);
724 63 : it.stagingOffset = stagingSize;
725 63 : stagingSize += getFormatBlockSize(it.info.format) * it.info.extent.width * it.info.extent.height * it.info.extent.depth;
726 : } else {
727 0 : writeData(generalMem + it.offset, it);
728 : }
729 : }
730 :
731 462 : for (auto &it : _buffers) {
732 399 : if (it.dedicated && _alloc->getType(it.dedicatedMemType)->isHostVisible()) {
733 0 : void *targetMem = nullptr;
734 0 : if (table->vkMapMemory(dev->getDevice(), it.dedicated, 0, VK_WHOLE_SIZE, 0, &targetMem) != VK_SUCCESS) {
735 0 : log::error("Vk-Error", "Fail to map dedicated memory: ", _resource->getName());
736 0 : return maxOf<size_t>();
737 : }
738 0 : writeData((uint8_t *)targetMem, it);
739 0 : table->vkUnmapMemory(dev->getDevice(), it.dedicated);
740 0 : if (!_alloc->getType(it.dedicatedMemType)->isHostCoherent()) {
741 : VkMappedMemoryRange range;
742 0 : range.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
743 0 : range.pNext = nullptr;
744 0 : range.memory = it.dedicated;
745 0 : range.offset = 0;
746 0 : range.size = VK_WHOLE_SIZE;
747 0 : table->vkFlushMappedMemoryRanges(dev->getDevice(), 1, &range);
748 : }
749 399 : } else if (generalMem == nullptr || it.dedicated) {
750 399 : it.useStaging = true;
751 399 : stagingSize = math::align<VkDeviceSize>(stagingSize, alignment);
752 399 : it.stagingOffset = stagingSize;
753 399 : stagingSize += it.data->size;
754 : } else {
755 0 : writeData(generalMem + it.offset, it);
756 : }
757 : }
758 :
759 63 : if (generalMem) {
760 0 : table->vkUnmapMemory(dev->getDevice(), _memory);
761 0 : if (!_memType->isHostCoherent()) {
762 : VkMappedMemoryRange range;
763 0 : range.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
764 0 : range.pNext = nullptr;
765 0 : range.memory = _memory;
766 0 : range.offset = 0;
767 0 : range.size = VK_WHOLE_SIZE;
768 0 : table->vkFlushMappedMemoryRanges(dev->getDevice(), 1, &range);
769 : }
770 0 : generalMem = nullptr;
771 : }
772 :
773 63 : return stagingSize;
774 : }
775 :
776 63 : bool TransferResource::createStagingBuffer(StagingBuffer &buffer, size_t stagingSize) const {
777 63 : auto dev = _alloc->getDevice();
778 63 : auto table = _alloc->getDevice()->getTable();
779 :
780 63 : buffer.buffer.info.flags = 0;
781 63 : buffer.buffer.info.size = stagingSize;
782 63 : buffer.buffer.info.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
783 63 : buffer.buffer.info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
784 :
785 63 : if (table->vkCreateBuffer(dev->getDevice(), &buffer.buffer.info, nullptr, &buffer.buffer.buffer) != VK_SUCCESS) {
786 0 : log::error("Vk-Error", "Fail to create staging buffer for static resource: ", _resource->getName());
787 0 : return false;
788 : }
789 :
790 63 : auto mask = _alloc->getInitialTypeMask();
791 63 : buffer.buffer.req = _alloc->getBufferMemoryRequirements(buffer.buffer.buffer);
792 :
793 63 : mask &= buffer.buffer.req.requirements.memoryTypeBits;
794 :
795 63 : if (mask == 0) {
796 0 : log::error("Vk-Error", "Fail to find staging memory mask for static resource: ", _resource->getName());
797 0 : return false;
798 : }
799 :
800 63 : auto type = _alloc->findMemoryType(mask, AllocationUsage::HostTransitionSource);
801 :
802 63 : if (!type) {
803 0 : log::error("Vk-Error", "Fail to find staging memory type for static resource: ", _resource->getName());
804 0 : return false;
805 : }
806 :
807 63 : buffer.memoryTypeIndex = type->idx;
808 :
809 63 : if (_alloc->hasDedicatedFeature()) {
810 : VkMemoryDedicatedAllocateInfo dedicatedInfo;
811 63 : dedicatedInfo.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO;
812 63 : dedicatedInfo.pNext = nullptr;
813 63 : dedicatedInfo.image = VK_NULL_HANDLE;
814 63 : dedicatedInfo.buffer = buffer.buffer.buffer;
815 :
816 63 : VkMemoryAllocateInfo allocInfo{};
817 63 : allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
818 63 : allocInfo.pNext = &dedicatedInfo;
819 63 : allocInfo.allocationSize = buffer.buffer.req.requirements.size;
820 63 : allocInfo.memoryTypeIndex = buffer.memoryTypeIndex;
821 :
822 63 : if (table->vkAllocateMemory(dev->getDevice(), &allocInfo, nullptr, &buffer.buffer.dedicated) != VK_SUCCESS) {
823 0 : log::error("Vk-Error", "Fail to allocate staging memory for static resource: ", _resource->getName());
824 0 : return false;
825 : }
826 :
827 63 : table->vkBindBufferMemory(dev->getDevice(), buffer.buffer.buffer, buffer.buffer.dedicated, 0);
828 : } else {
829 0 : VkMemoryAllocateInfo allocInfo{};
830 0 : allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
831 0 : allocInfo.pNext = nullptr;
832 0 : allocInfo.allocationSize = buffer.buffer.req.requirements.size;
833 0 : allocInfo.memoryTypeIndex = buffer.memoryTypeIndex;
834 :
835 0 : if (table->vkAllocateMemory(dev->getDevice(), &allocInfo, nullptr, &buffer.buffer.dedicated) != VK_SUCCESS) {
836 0 : log::error("Vk-Error", "Fail to allocate staging memory for static resource: ", _resource->getName());
837 0 : return false;
838 : }
839 :
840 0 : table->vkBindBufferMemory(dev->getDevice(), buffer.buffer.buffer, buffer.buffer.dedicated, 0);
841 : }
842 :
843 63 : return true;
844 : }
845 :
846 63 : bool TransferResource::writeStaging(StagingBuffer &buffer) {
847 63 : auto dev = _alloc->getDevice();
848 63 : auto table = _alloc->getDevice()->getTable();
849 :
850 63 : uint8_t *stagingMem = nullptr;
851 : do {
852 63 : void *targetMem = nullptr;
853 63 : if (table->vkMapMemory(dev->getDevice(), buffer.buffer.dedicated, 0, VK_WHOLE_SIZE, 0, &targetMem) != VK_SUCCESS) {
854 0 : return false;
855 : }
856 63 : stagingMem = (uint8_t *)targetMem;
857 : } while (0);
858 :
859 63 : if (!stagingMem) {
860 0 : log::error("Vk-Error", "Fail to map staging memory for static resource: ", _resource->getName());
861 0 : return false;
862 : }
863 :
864 126 : for (auto &it : _images) {
865 63 : if (it.useStaging) {
866 63 : auto size = writeData(stagingMem + it.stagingOffset, it);
867 63 : buffer.copyData.emplace_back(StagingCopy({it.stagingOffset, size, &it, nullptr}));
868 : }
869 : }
870 :
871 462 : for (auto &it : _buffers) {
872 399 : if (it.useStaging) {
873 399 : auto size = writeData(stagingMem + it.stagingOffset, it);
874 399 : buffer.copyData.emplace_back(StagingCopy({it.stagingOffset, size, nullptr, &it}));
875 : }
876 : }
877 :
878 63 : table->vkUnmapMemory(dev->getDevice(), buffer.buffer.dedicated);
879 63 : if (!_alloc->getType(buffer.memoryTypeIndex)->isHostCoherent()) {
880 : VkMappedMemoryRange range;
881 0 : range.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
882 0 : range.pNext = nullptr;
883 0 : range.memory = _memory;
884 0 : range.offset = 0;
885 0 : range.size = VK_WHOLE_SIZE;
886 0 : table->vkFlushMappedMemoryRanges(dev->getDevice(), 1, &range);
887 : }
888 :
889 63 : return true;
890 : }
891 :
892 :
893 84 : TransferAttachment::~TransferAttachment() { }
894 :
895 21 : auto TransferAttachment::makeFrameHandle(const FrameQueue &handle) -> Rc<AttachmentHandle> {
896 42 : return Rc<TransferAttachmentHandle>::create(this, handle);
897 : }
898 :
899 42 : TransferAttachmentHandle::~TransferAttachmentHandle() { }
900 :
901 21 : bool TransferAttachmentHandle::setup(FrameQueue &handle, Function<void(bool)> &&) {
902 21 : return true;
903 : }
904 :
905 21 : void TransferAttachmentHandle::submitInput(FrameQueue &q, Rc<core::AttachmentInputData> &&data, Function<void(bool)> &&cb) {
906 21 : _resource = data.cast<TransferResource>();
907 21 : if (!_resource || q.isFinalized()) {
908 0 : cb(false);
909 0 : return;
910 : }
911 :
912 21 : q.getFrame()->waitForDependencies(data->waitDependencies, [this, cb = move(cb)] (FrameHandle &handle, bool success) {
913 21 : if (!success || !handle.isValidFlag()) {
914 0 : cb(false);
915 0 : return;
916 : }
917 :
918 42 : handle.performInQueue([this] (FrameHandle &frame) -> bool {
919 21 : if (_resource->initialize()) {
920 21 : return true;
921 : }
922 0 : return false;
923 21 : }, [cb = move(cb)] (FrameHandle &frame, bool success) {
924 21 : cb(success);
925 21 : }, nullptr, "TransferAttachmentHandle::submitInput");
926 : });
927 : }
928 :
929 :
930 84 : TransferPass::~TransferPass() { }
931 :
932 42 : bool TransferPass::init(QueuePassBuilder &passBuilder, const AttachmentData *attachment) {
933 42 : passBuilder.addAttachment(attachment);
934 :
935 42 : _attachment = attachment;
936 :
937 42 : return QueuePass::init(passBuilder);
938 : }
939 :
940 21 : auto TransferPass::makeFrameHandle(const FrameQueue &handle) -> Rc<QueuePassHandle> {
941 42 : return Rc<TransferRenderPassHandle>::create(*this, handle);
942 : }
943 :
944 42 : TransferRenderPassHandle::~TransferRenderPassHandle() { }
945 :
946 21 : Vector<const CommandBuffer *> TransferRenderPassHandle::doPrepareCommands(FrameHandle &) {
947 21 : auto pass = static_cast<TransferPass *>(_queuePass.get());
948 21 : TransferAttachmentHandle *transfer = nullptr;
949 42 : for (auto &it : _queueData->attachments) {
950 21 : if (it.first->attachment == pass->getAttachment()) {
951 21 : transfer = static_cast<TransferAttachmentHandle *>(it.second->handle.get());
952 : }
953 : }
954 :
955 21 : if (!transfer) {
956 0 : return Vector<const CommandBuffer *>();
957 : }
958 :
959 21 : auto table = _device->getTable();
960 21 : auto buf = _pool->recordBuffer(*_device, [&, this] (CommandBuffer &buf) {
961 21 : Vector<VkImageMemoryBarrier> outputImageBarriers;
962 21 : Vector<VkBufferMemoryBarrier> outputBufferBarriers;
963 :
964 21 : if (!transfer->getResource()->prepareCommands(_pool->getFamilyIdx(), buf.getBuffer(), outputImageBarriers, outputBufferBarriers)) {
965 0 : return false;
966 : }
967 :
968 21 : VkPipelineStageFlags targetMask = 0;
969 21 : if ((_pool->getClass() & QueueOperations::Graphics) != QueueOperations::None) {
970 0 : targetMask |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
971 : }
972 21 : if ((_pool->getClass() & QueueOperations::Compute) != QueueOperations::None) {
973 0 : targetMask |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
974 : }
975 21 : if (targetMask == 0) {
976 21 : targetMask = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
977 : }
978 :
979 21 : table->vkCmdPipelineBarrier(buf.getBuffer(), VK_PIPELINE_STAGE_TRANSFER_BIT, targetMask, 0,
980 : 0, nullptr,
981 21 : outputBufferBarriers.size(), outputBufferBarriers.data(),
982 21 : outputImageBarriers.size(), outputImageBarriers.data());
983 21 : return true;
984 21 : });
985 :
986 21 : return Vector<const CommandBuffer *>{buf};
987 : }
988 :
989 21 : void TransferRenderPassHandle::doComplete(FrameQueue &queue, Function<void(bool)> &&func, bool success) {
990 21 : if (success) {
991 21 : auto pass = static_cast<TransferPass *>(_queuePass.get());
992 21 : TransferAttachmentHandle *transfer = nullptr;
993 42 : for (auto &it : _queueData->attachments) {
994 21 : if (it.first->attachment == pass->getAttachment()) {
995 21 : transfer = static_cast<TransferAttachmentHandle *>(it.second->handle.get());
996 : }
997 : }
998 21 : transfer->getResource()->compile();
999 : }
1000 :
1001 21 : QueuePassHandle::doComplete(queue, move(func), success);
1002 21 : }
1003 :
1004 : }
|