Renames: Vk_Resources -> Vk_World, glActive- > gl_active.

Added comments to clarify purpose of Vk_Instance, Vk_World structures.
This commit is contained in:
Artem Kharytoniuk 2017-05-29 14:22:51 +03:00
parent bee5d0a5ea
commit 7b3917119f
10 changed files with 131 additions and 121 deletions

View File

@ -651,7 +651,7 @@ static void GLW_InitExtensions( void )
*/ */
void GLimp_EndFrame (void) void GLimp_EndFrame (void)
{ {
if (!glActive) if (!gl_active)
return; return;
// //
@ -771,7 +771,7 @@ void GLimp_Shutdown( void )
WG_RestoreGamma(); WG_RestoreGamma();
glActive = false; gl_active = false;
memset(&glConfig, 0, sizeof(glConfig)); memset(&glConfig, 0, sizeof(glConfig));
memset(&glState, 0, sizeof(glState)); memset(&glState, 0, sizeof(glState));
@ -792,7 +792,7 @@ void vk_imp_init() {
ri.Printf(PRINT_ALL, "Initializing Vulkan subsystem\n"); ri.Printf(PRINT_ALL, "Initializing Vulkan subsystem\n");
// This will set qgl pointers to no-op placeholders. // This will set qgl pointers to no-op placeholders.
if (!glActive) { if (!gl_active) {
QGL_Init(nullptr); QGL_Init(nullptr);
qglActiveTextureARB = [] (GLenum) {}; qglActiveTextureARB = [] (GLenum) {};
qglClientActiveTextureARB = [](GLenum) {}; qglClientActiveTextureARB = [](GLenum) {};

View File

@ -65,8 +65,8 @@ void GL_Bind( image_t *image ) {
// VULKAN // VULKAN
if (vk.active) { if (vk.active) {
VkDescriptorSet set = vk_resources.images[final_image->index].descriptor_set; VkDescriptorSet set = vk_world.images[final_image->index].descriptor_set;
vk_resources.current_descriptor_sets[glState.currenttmu] = set; vk_world.current_descriptor_sets[glState.currenttmu] = set;
} }
} }
} }
@ -438,7 +438,7 @@ void RB_BeginDrawingView (void) {
qglClear( clearBits ); qglClear( clearBits );
// VULKAN // VULKAN
vk_clear_attachments(vk_resources.dirty_depth_attachment, fast_sky, fast_sky_color); vk_clear_attachments(vk_world.dirty_depth_attachment, fast_sky, fast_sky_color);
if ( ( backEnd.refdef.rdflags & RDF_HYPERSPACE ) ) if ( ( backEnd.refdef.rdflags & RDF_HYPERSPACE ) )
{ {
@ -571,7 +571,7 @@ void RB_RenderDrawSurfList( drawSurf_t *drawSurfs, int numDrawSurfs ) {
qglLoadMatrixf( backEnd.or.modelMatrix ); qglLoadMatrixf( backEnd.or.modelMatrix );
// VULKAN // VULKAN
Com_Memcpy(vk_resources.modelview_transform, backEnd.or.modelMatrix, 64); Com_Memcpy(vk_world.modelview_transform, backEnd.or.modelMatrix, 64);
// //
// change depthrange if needed // change depthrange if needed
@ -603,7 +603,7 @@ void RB_RenderDrawSurfList( drawSurf_t *drawSurfs, int numDrawSurfs ) {
qglLoadMatrixf( backEnd.viewParms.world.modelMatrix ); qglLoadMatrixf( backEnd.viewParms.world.modelMatrix );
// VULKAN // VULKAN
Com_Memcpy(vk_resources.modelview_transform, backEnd.viewParms.world.modelMatrix, 64); Com_Memcpy(vk_world.modelview_transform, backEnd.viewParms.world.modelMatrix, 64);
if ( depthRange ) { if ( depthRange ) {
qglDepthRange (0, 1); qglDepthRange (0, 1);
@ -712,7 +712,7 @@ void RE_UploadCinematic (int w, int h, int cols, int rows, const byte *data, int
// VULKAN // VULKAN
if (vk.active) { if (vk.active) {
Vk_Image& image = vk_resources.images[tr.scratchImage[client]->index]; Vk_Image& image = vk_world.images[tr.scratchImage[client]->index];
vkDestroyImage(vk.device, image.handle, nullptr); vkDestroyImage(vk.device, image.handle, nullptr);
vkDestroyImageView(vk.device, image.view, nullptr); vkDestroyImageView(vk.device, image.view, nullptr);
vkFreeDescriptorSets(vk.device, vk.descriptor_pool, 1, &image.descriptor_set); vkFreeDescriptorSets(vk.device, vk.descriptor_pool, 1, &image.descriptor_set);
@ -727,7 +727,7 @@ void RE_UploadCinematic (int w, int h, int cols, int rows, const byte *data, int
// VULKAN // VULKAN
if (vk.active) { if (vk.active) {
const Vk_Image& image = vk_resources.images[tr.scratchImage[client]->index]; const Vk_Image& image = vk_world.images[tr.scratchImage[client]->index];
vk_upload_image_data(image.handle, cols, rows, false, data, 4); vk_upload_image_data(image.handle, cols, rows, false, data, 4);
} }
} }
@ -901,7 +901,7 @@ void RB_ShowImages( void ) {
float x, y, w, h; float x, y, w, h;
int start, end; int start, end;
if (!glActive) if (!gl_active)
return; return;
if ( !backEnd.projection2D ) { if ( !backEnd.projection2D ) {

View File

@ -150,7 +150,7 @@ void GL_TextureMode( const char *string ) {
for ( i = 0 ; i < tr.numImages ; i++ ) { for ( i = 0 ; i < tr.numImages ; i++ ) {
image_t* glt = tr.images[i]; image_t* glt = tr.images[i];
if (glt->mipmap) { if (glt->mipmap) {
Vk_Image& image = vk_resources.images[i]; Vk_Image& image = vk_world.images[i];
vk_update_descriptor_set(image.descriptor_set, image.view, true, glt->wrapClampMode == GL_REPEAT); vk_update_descriptor_set(image.descriptor_set, image.view, true, glt->wrapClampMode == GL_REPEAT);
} }
} }
@ -569,7 +569,7 @@ static Image_Upload_Data generate_image_upload_data(const byte* data, int width,
// scale both axis down equally so we don't have to // scale both axis down equally so we don't have to
// deal with a half mip resampling // deal with a half mip resampling
// //
int max_texture_size = glActive ? glConfig.maxTextureSize : 2048; int max_texture_size = gl_active ? glConfig.maxTextureSize : 2048;
while ( scaled_width > max_texture_size while ( scaled_width > max_texture_size
|| scaled_height > max_texture_size ) { || scaled_height > max_texture_size ) {
scaled_width >>= 1; scaled_width >>= 1;
@ -781,12 +781,12 @@ image_t *R_CreateImage( const char *name, const byte *pic, int width, int height
Image_Upload_Data upload_data = generate_image_upload_data(pic, width, height, mipmap, allowPicmip); Image_Upload_Data upload_data = generate_image_upload_data(pic, width, height, mipmap, allowPicmip);
if (glActive) { if (gl_active) {
image->internalFormat = upload_gl_image(upload_data, glWrapClampMode); image->internalFormat = upload_gl_image(upload_data, glWrapClampMode);
} }
// VULKAN // VULKAN
if (vk.active) { if (vk.active) {
vk_resources.images[image->index] = upload_vk_image(upload_data, glWrapClampMode == GL_REPEAT); vk_world.images[image->index] = upload_vk_image(upload_data, glWrapClampMode == GL_REPEAT);
} }
if (isLightmap) { if (isLightmap) {

View File

@ -23,13 +23,13 @@ Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#include "tr_local.h" #include "tr_local.h"
bool glActive; bool gl_active;
glconfig_t glConfig; glconfig_t glConfig;
glstate_t glState; glstate_t glState;
// VULKAN // VULKAN
Vk_Instance vk; Vk_Instance vk;
Vk_Resources vk_resources; Vk_World vk_world;
static void GfxInfo_f( void ); static void GfxInfo_f( void );
@ -194,7 +194,7 @@ static void InitRenderAPI( void )
qglGetIntegerv( GL_MAX_TEXTURE_SIZE, &temp ); qglGetIntegerv( GL_MAX_TEXTURE_SIZE, &temp );
glConfig.maxTextureSize = temp; glConfig.maxTextureSize = temp;
glActive = true; gl_active = true;
} }
// VULKAN // VULKAN
@ -781,7 +781,7 @@ void GfxInfo_f( void )
"fullscreen" "fullscreen"
}; };
if (glActive) { if (gl_active) {
ri.Printf( PRINT_ALL, "\nGL_VENDOR: %s\n", glConfig.vendor_string ); ri.Printf( PRINT_ALL, "\nGL_VENDOR: %s\n", glConfig.vendor_string );
ri.Printf( PRINT_ALL, "GL_RENDERER: %s\n", glConfig.renderer_string ); ri.Printf( PRINT_ALL, "GL_RENDERER: %s\n", glConfig.renderer_string );
ri.Printf( PRINT_ALL, "GL_VERSION: %s\n", glConfig.version_string ); ri.Printf( PRINT_ALL, "GL_VERSION: %s\n", glConfig.version_string );
@ -807,7 +807,7 @@ void GfxInfo_f( void )
ri.Printf( PRINT_ALL, "picmip: %d\n", r_picmip->integer ); ri.Printf( PRINT_ALL, "picmip: %d\n", r_picmip->integer );
ri.Printf( PRINT_ALL, "texture bits: %d\n", r_texturebits->integer ); ri.Printf( PRINT_ALL, "texture bits: %d\n", r_texturebits->integer );
if (glActive) { if (gl_active) {
ri.Printf( PRINT_ALL, "compiled vertex arrays: %s\n", enablestrings[qglLockArraysEXT != 0 ] ); ri.Printf( PRINT_ALL, "compiled vertex arrays: %s\n", enablestrings[qglLockArraysEXT != 0 ] );
ri.Printf( PRINT_ALL, "texenv add: %s\n", enablestrings[glConfig.textureEnvAddAvailable != 0] ); ri.Printf( PRINT_ALL, "texenv add: %s\n", enablestrings[glConfig.textureEnvAddAvailable != 0] );
ri.Printf( PRINT_ALL, "compressed textures: %s\n", enablestrings[glConfig.textureCompression!=TC_NONE] ); ri.Printf( PRINT_ALL, "compressed textures: %s\n", enablestrings[glConfig.textureCompression!=TC_NONE] );
@ -817,7 +817,7 @@ void GfxInfo_f( void )
{ {
ri.Printf( PRINT_ALL, "HACK: using vertex lightmap approximation\n" ); ri.Printf( PRINT_ALL, "HACK: using vertex lightmap approximation\n" );
} }
if ( glActive && glConfig.smpActive ) { if ( gl_active && glConfig.smpActive ) {
ri.Printf( PRINT_ALL, "Using dual processor acceleration\n" ); ri.Printf( PRINT_ALL, "Using dual processor acceleration\n" );
} }
@ -1002,7 +1002,7 @@ void R_Init( void ) {
Com_Memset( &tr, 0, sizeof( tr ) ); Com_Memset( &tr, 0, sizeof( tr ) );
Com_Memset( &backEnd, 0, sizeof( backEnd ) ); Com_Memset( &backEnd, 0, sizeof( backEnd ) );
Com_Memset( &tess, 0, sizeof( tess ) ); Com_Memset( &tess, 0, sizeof( tess ) );
Com_Memset( &vk_resources, 0, sizeof( vk_resources ) ); Com_Memset( &vk_world, 0, sizeof( vk_world ) );
if ( (intptr_t)tess.xyz & 15 ) { if ( (intptr_t)tess.xyz & 15 ) {
Com_Printf( "WARNING: tess.xyz not 16 byte aligned\n" ); Com_Printf( "WARNING: tess.xyz not 16 byte aligned\n" );
@ -1111,7 +1111,7 @@ void RE_Shutdown( qboolean destroyWindow ) {
R_DoneFreeType(); R_DoneFreeType();
// shut down platform specific OpenGL stuff // shut down platform specific OpenGL stuff
if ( glActive ) { if ( gl_active ) {
if (destroyWindow) if (destroyWindow)
GLimp_Shutdown(); GLimp_Shutdown();
} }
@ -1143,7 +1143,7 @@ void RE_EndRegistration( void ) {
} }
// VULKAN // VULKAN
ri.Printf(PRINT_ALL, "Vulkan: pipelines create time %d msec\n", (int)(vk_resources.pipeline_create_time * 1000)); ri.Printf(PRINT_ALL, "Vulkan: pipelines create time %d msec\n", (int)(vk_world.pipeline_create_time * 1000));
} }

View File

@ -924,13 +924,13 @@ typedef struct {
extern backEndState_t backEnd; extern backEndState_t backEnd;
extern trGlobals_t tr; extern trGlobals_t tr;
extern bool glActive; // set to true if OpenGL is used for rendering extern bool gl_active; // set to true if OpenGL is used for rendering
extern glconfig_t glConfig; // outside of TR since it shouldn't be cleared during ref re-init extern glconfig_t glConfig; // outside of TR since it shouldn't be cleared during ref re-init
extern glstate_t glState; // outside of TR since it shouldn't be cleared during ref re-init extern glstate_t glState; // outside of TR since it shouldn't be cleared during ref re-init
// VULKAN // VULKAN
extern Vk_Instance vk; // shouldn't be cleared during ref re-init extern Vk_Instance vk; // shouldn't be cleared during ref re-init
extern Vk_Resources vk_resources; // this data is cleared during ref re-init extern Vk_World vk_world; // this data is cleared during ref re-init
// //

View File

@ -1416,7 +1416,7 @@ void R_DebugPolygon( int color, int numPoints, float *points ) {
// Backface culling. // Backface culling.
auto transform_to_eye_space = [](vec3_t v, vec3_t v_eye) { auto transform_to_eye_space = [](vec3_t v, vec3_t v_eye) {
auto m = vk_resources.modelview_transform; auto m = vk_world.modelview_transform;
v_eye[0] = m[0]*v[0] + m[4]*v[1] + m[8 ]*v[2] + m[12]; v_eye[0] = m[0]*v[0] + m[4]*v[1] + m[8 ]*v[2] + m[12];
v_eye[1] = m[1]*v[0] + m[5]*v[1] + m[9 ]*v[2] + m[13]; v_eye[1] = m[1]*v[0] + m[5]*v[1] + m[9 ]*v[2] + m[13];
v_eye[2] = m[2]*v[0] + m[6]*v[1] + m[10]*v[2] + m[14]; v_eye[2] = m[2]*v[0] + m[6]*v[1] + m[10]*v[2] + m[14];

View File

@ -331,17 +331,17 @@ void RB_ShadowFinish( void ) {
// set backEnd.or.modelMatrix to identity matrix // set backEnd.or.modelMatrix to identity matrix
float tmp[16]; float tmp[16];
Com_Memcpy(tmp, vk_resources.modelview_transform, 64); Com_Memcpy(tmp, vk_world.modelview_transform, 64);
Com_Memset(vk_resources.modelview_transform, 0, 64); Com_Memset(vk_world.modelview_transform, 0, 64);
vk_resources.modelview_transform[0] = 1.0f; vk_world.modelview_transform[0] = 1.0f;
vk_resources.modelview_transform[5] = 1.0f; vk_world.modelview_transform[5] = 1.0f;
vk_resources.modelview_transform[10] = 1.0f; vk_world.modelview_transform[10] = 1.0f;
vk_resources.modelview_transform[15] = 1.0f; vk_world.modelview_transform[15] = 1.0f;
vk_bind_geometry(); vk_bind_geometry();
vk_shade_geometry(vk.shadow_finish_pipeline, false, Vk_Depth_Range::normal); vk_shade_geometry(vk.shadow_finish_pipeline, false, Vk_Depth_Range::normal);
Com_Memcpy(vk_resources.modelview_transform, tmp, 64); Com_Memcpy(vk_world.modelview_transform, tmp, 64);
tess.numIndexes = 0; tess.numIndexes = 0;
tess.numVertexes = 0; tess.numVertexes = 0;
} }

View File

@ -724,7 +724,7 @@ void RB_StageIteratorSky( void ) {
// draw the outer skybox // draw the outer skybox
if ( tess.shader->sky.outerbox[0] && tess.shader->sky.outerbox[0] != tr.defaultImage ) { if ( tess.shader->sky.outerbox[0] && tess.shader->sky.outerbox[0] != tr.defaultImage ) {
float modelMatrix_original[16]; float modelMatrix_original[16];
Com_Memcpy(modelMatrix_original, vk_resources.modelview_transform, sizeof(float[16])); Com_Memcpy(modelMatrix_original, vk_world.modelview_transform, sizeof(float[16]));
float skybox_translate[16] = { float skybox_translate[16] = {
1, 0, 0, 0, 1, 0, 0, 0,
@ -732,16 +732,16 @@ void RB_StageIteratorSky( void ) {
0, 0, 1, 0, 0, 0, 1, 0,
backEnd.viewParms.or.origin[0], backEnd.viewParms.or.origin[1], backEnd.viewParms.or.origin[2], 1 backEnd.viewParms.or.origin[0], backEnd.viewParms.or.origin[1], backEnd.viewParms.or.origin[2], 1
}; };
myGlMultMatrix(skybox_translate, modelMatrix_original, vk_resources.modelview_transform); myGlMultMatrix(skybox_translate, modelMatrix_original, vk_world.modelview_transform);
GL_State( 0 ); GL_State( 0 );
qglColor3f( tr.identityLight, tr.identityLight, tr.identityLight ); qglColor3f( tr.identityLight, tr.identityLight, tr.identityLight );
qglPushMatrix (); qglPushMatrix ();
qglLoadMatrixf(vk_resources.modelview_transform); qglLoadMatrixf(vk_world.modelview_transform);
DrawSkyBox( tess.shader ); DrawSkyBox( tess.shader );
qglPopMatrix(); qglPopMatrix();
Com_Memcpy(vk_resources.modelview_transform, modelMatrix_original, sizeof(float[16])); Com_Memcpy(vk_world.modelview_transform, modelMatrix_original, sizeof(float[16]));
} }
// generate the vertexes for all the clouds, which will be drawn // generate the vertexes for all the clouds, which will be drawn

View File

@ -259,16 +259,16 @@ static void allocate_and_bind_image_memory(VkImage image) {
ri.Error(ERR_FATAL, "Vulkan: could not allocate memory, image is too large."); ri.Error(ERR_FATAL, "Vulkan: could not allocate memory, image is too large.");
} }
Vk_Resources::Chunk* chunk = nullptr; Vk_World::Chunk* chunk = nullptr;
// Try to find an existing chunk of sufficient capacity. // Try to find an existing chunk of sufficient capacity.
const auto mask = ~(memory_requirements.alignment - 1); const auto mask = ~(memory_requirements.alignment - 1);
for (int i = 0; i < vk_resources.num_image_chunks; i++) { for (int i = 0; i < vk_world.num_image_chunks; i++) {
// ensure that memory region has proper alignment // ensure that memory region has proper alignment
VkDeviceSize offset = (vk_resources.image_chunks[i].used + memory_requirements.alignment - 1) & mask; VkDeviceSize offset = (vk_world.image_chunks[i].used + memory_requirements.alignment - 1) & mask;
if (offset + memory_requirements.size <= IMAGE_CHUNK_SIZE) { if (offset + memory_requirements.size <= IMAGE_CHUNK_SIZE) {
chunk = &vk_resources.image_chunks[i]; chunk = &vk_world.image_chunks[i];
chunk->used = offset + memory_requirements.size; chunk->used = offset + memory_requirements.size;
break; break;
} }
@ -276,7 +276,7 @@ static void allocate_and_bind_image_memory(VkImage image) {
// Allocate a new chunk in case we couldn't find suitable existing chunk. // Allocate a new chunk in case we couldn't find suitable existing chunk.
if (chunk == nullptr) { if (chunk == nullptr) {
if (vk_resources.num_image_chunks >= MAX_IMAGE_CHUNKS) { if (vk_world.num_image_chunks >= MAX_IMAGE_CHUNKS) {
ri.Error(ERR_FATAL, "Vulkan: image chunk limit has been reached"); ri.Error(ERR_FATAL, "Vulkan: image chunk limit has been reached");
} }
@ -289,8 +289,8 @@ static void allocate_and_bind_image_memory(VkImage image) {
VkDeviceMemory memory; VkDeviceMemory memory;
VK_CHECK(vkAllocateMemory(vk.device, &alloc_info, nullptr, &memory)); VK_CHECK(vkAllocateMemory(vk.device, &alloc_info, nullptr, &memory));
chunk = &vk_resources.image_chunks[vk_resources.num_image_chunks]; chunk = &vk_world.image_chunks[vk_world.num_image_chunks];
vk_resources.num_image_chunks++; vk_world.num_image_chunks++;
chunk->memory = memory; chunk->memory = memory;
chunk->used = memory_requirements.size; chunk->used = memory_requirements.size;
} }
@ -299,16 +299,16 @@ static void allocate_and_bind_image_memory(VkImage image) {
} }
static void ensure_staging_buffer_allocation(VkDeviceSize size) { static void ensure_staging_buffer_allocation(VkDeviceSize size) {
if (vk_resources.staging_buffer_size >= size) if (vk_world.staging_buffer_size >= size)
return; return;
if (vk_resources.staging_buffer != VK_NULL_HANDLE) if (vk_world.staging_buffer != VK_NULL_HANDLE)
vkDestroyBuffer(vk.device, vk_resources.staging_buffer, nullptr); vkDestroyBuffer(vk.device, vk_world.staging_buffer, nullptr);
if (vk_resources.staging_buffer_memory != VK_NULL_HANDLE) if (vk_world.staging_buffer_memory != VK_NULL_HANDLE)
vkFreeMemory(vk.device, vk_resources.staging_buffer_memory, nullptr); vkFreeMemory(vk.device, vk_world.staging_buffer_memory, nullptr);
vk_resources.staging_buffer_size = size; vk_world.staging_buffer_size = size;
VkBufferCreateInfo buffer_desc; VkBufferCreateInfo buffer_desc;
buffer_desc.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO; buffer_desc.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
@ -319,10 +319,10 @@ static void ensure_staging_buffer_allocation(VkDeviceSize size) {
buffer_desc.sharingMode = VK_SHARING_MODE_EXCLUSIVE; buffer_desc.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
buffer_desc.queueFamilyIndexCount = 0; buffer_desc.queueFamilyIndexCount = 0;
buffer_desc.pQueueFamilyIndices = nullptr; buffer_desc.pQueueFamilyIndices = nullptr;
VK_CHECK(vkCreateBuffer(vk.device, &buffer_desc, nullptr, &vk_resources.staging_buffer)); VK_CHECK(vkCreateBuffer(vk.device, &buffer_desc, nullptr, &vk_world.staging_buffer));
VkMemoryRequirements memory_requirements; VkMemoryRequirements memory_requirements;
vkGetBufferMemoryRequirements(vk.device, vk_resources.staging_buffer, &memory_requirements); vkGetBufferMemoryRequirements(vk.device, vk_world.staging_buffer, &memory_requirements);
uint32_t memory_type = find_memory_type(vk.physical_device, memory_requirements.memoryTypeBits, uint32_t memory_type = find_memory_type(vk.physical_device, memory_requirements.memoryTypeBits,
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT); VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
@ -332,12 +332,12 @@ static void ensure_staging_buffer_allocation(VkDeviceSize size) {
alloc_info.pNext = nullptr; alloc_info.pNext = nullptr;
alloc_info.allocationSize = memory_requirements.size; alloc_info.allocationSize = memory_requirements.size;
alloc_info.memoryTypeIndex = memory_type; alloc_info.memoryTypeIndex = memory_type;
VK_CHECK(vkAllocateMemory(vk.device, &alloc_info, nullptr, &vk_resources.staging_buffer_memory)); VK_CHECK(vkAllocateMemory(vk.device, &alloc_info, nullptr, &vk_world.staging_buffer_memory));
VK_CHECK(vkBindBufferMemory(vk.device, vk_resources.staging_buffer, vk_resources.staging_buffer_memory, 0)); VK_CHECK(vkBindBufferMemory(vk.device, vk_world.staging_buffer, vk_world.staging_buffer_memory, 0));
void* data; void* data;
VK_CHECK(vkMapMemory(vk.device, vk_resources.staging_buffer_memory, 0, VK_WHOLE_SIZE, 0, &data)); VK_CHECK(vkMapMemory(vk.device, vk_world.staging_buffer_memory, 0, VK_WHOLE_SIZE, 0, &data));
vk_resources.staging_buffer_ptr = (byte*)data; vk_world.staging_buffer_ptr = (byte*)data;
} }
static void create_instance() { static void create_instance() {
@ -739,6 +739,25 @@ void vk_initialize() {
} }
} }
//
// Sync primitives.
//
{
VkSemaphoreCreateInfo desc;
desc.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
desc.pNext = nullptr;
desc.flags = 0;
VK_CHECK(vkCreateSemaphore(vk.device, &desc, nullptr, &vk.image_acquired));
VK_CHECK(vkCreateSemaphore(vk.device, &desc, nullptr, &vk.rendering_finished));
VkFenceCreateInfo fence_desc;
fence_desc.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
fence_desc.pNext = nullptr;
fence_desc.flags = VK_FENCE_CREATE_SIGNALED_BIT;
VK_CHECK(vkCreateFence(vk.device, &fence_desc, nullptr, &vk.rendering_finished_fence));
}
// //
// Command pool. // Command pool.
// //
@ -982,25 +1001,6 @@ void vk_initialize() {
vk.index_buffer_ptr = (byte*)data + index_buffer_offset; vk.index_buffer_ptr = (byte*)data + index_buffer_offset;
} }
//
// Sync primitives.
//
{
VkSemaphoreCreateInfo desc;
desc.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
desc.pNext = nullptr;
desc.flags = 0;
VK_CHECK(vkCreateSemaphore(vk.device, &desc, nullptr, &vk.image_acquired));
VK_CHECK(vkCreateSemaphore(vk.device, &desc, nullptr, &vk.rendering_finished));
VkFenceCreateInfo fence_desc;
fence_desc.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
fence_desc.pNext = nullptr;
fence_desc.flags = VK_FENCE_CREATE_SIGNALED_BIT;
VK_CHECK(vkCreateFence(vk.device, &fence_desc, nullptr, &vk.rendering_finished_fence));
}
// //
// Shader modules. // Shader modules.
// //
@ -1239,27 +1239,26 @@ void vk_shutdown() {
void vk_release_resources() { void vk_release_resources() {
vkDeviceWaitIdle(vk.device); vkDeviceWaitIdle(vk.device);
auto& res = vk_resources;
for (int i = 0; i < res.num_image_chunks; i++) for (int i = 0; i < vk_world.num_image_chunks; i++)
vkFreeMemory(vk.device, res.image_chunks[i].memory, nullptr); vkFreeMemory(vk.device, vk_world.image_chunks[i].memory, nullptr);
if (res.staging_buffer != VK_NULL_HANDLE) if (vk_world.staging_buffer != VK_NULL_HANDLE)
vkDestroyBuffer(vk.device, res.staging_buffer, nullptr); vkDestroyBuffer(vk.device, vk_world.staging_buffer, nullptr);
if (res.staging_buffer_memory != VK_NULL_HANDLE) if (vk_world.staging_buffer_memory != VK_NULL_HANDLE)
vkFreeMemory(vk.device, res.staging_buffer_memory, nullptr); vkFreeMemory(vk.device, vk_world.staging_buffer_memory, nullptr);
for (int i = 0; i < res.num_samplers; i++) for (int i = 0; i < vk_world.num_samplers; i++)
vkDestroySampler(vk.device, res.samplers[i], nullptr); vkDestroySampler(vk.device, vk_world.samplers[i], nullptr);
for (int i = 0; i < res.num_pipelines; i++) for (int i = 0; i < vk_world.num_pipelines; i++)
vkDestroyPipeline(vk.device, res.pipelines[i], nullptr); vkDestroyPipeline(vk.device, vk_world.pipelines[i], nullptr);
vk_resources.pipeline_create_time = 0.0f; vk_world.pipeline_create_time = 0.0f;
for (int i = 0; i < MAX_VK_IMAGES; i++) { for (int i = 0; i < MAX_VK_IMAGES; i++) {
Vk_Image& image = res.images[i]; Vk_Image& image = vk_world.images[i];
if (image.handle != VK_NULL_HANDLE) { if (image.handle != VK_NULL_HANDLE) {
vkDestroyImage(vk.device, image.handle, nullptr); vkDestroyImage(vk.device, image.handle, nullptr);
@ -1267,7 +1266,7 @@ void vk_release_resources() {
} }
} }
Com_Memset(&res, 0, sizeof(res)); Com_Memset(&vk_world, 0, sizeof(vk_world));
VK_CHECK(vkResetDescriptorPool(vk.device, vk.descriptor_pool, 0)); VK_CHECK(vkResetDescriptorPool(vk.device, vk.descriptor_pool, 0));
@ -1355,7 +1354,7 @@ Vk_Image vk_create_image(int width, int height, VkFormat format, int mip_levels,
VK_CHECK(vkAllocateDescriptorSets(vk.device, &desc, &image.descriptor_set)); VK_CHECK(vkAllocateDescriptorSets(vk.device, &desc, &image.descriptor_set));
vk_update_descriptor_set(image.descriptor_set, image.view, mip_levels > 1, repeat_texture); vk_update_descriptor_set(image.descriptor_set, image.view, mip_levels > 1, repeat_texture);
vk_resources.current_descriptor_sets[glState.currenttmu] = image.descriptor_set; vk_world.current_descriptor_sets[glState.currenttmu] = image.descriptor_set;
} }
return image; return image;
@ -1395,19 +1394,19 @@ void vk_upload_image_data(VkImage image, int width, int height, bool mipmap, con
} }
ensure_staging_buffer_allocation(buffer_size); ensure_staging_buffer_allocation(buffer_size);
Com_Memcpy(vk_resources.staging_buffer_ptr, pixels, buffer_size); Com_Memcpy(vk_world.staging_buffer_ptr, pixels, buffer_size);
record_and_run_commands(vk.command_pool, vk.queue, record_and_run_commands(vk.command_pool, vk.queue,
[&image, &num_regions, &regions](VkCommandBuffer command_buffer) { [&image, &num_regions, &regions](VkCommandBuffer command_buffer) {
record_buffer_memory_barrier(command_buffer, vk_resources.staging_buffer, record_buffer_memory_barrier(command_buffer, vk_world.staging_buffer,
VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT); VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT);
record_image_layout_transition(command_buffer, image, VK_IMAGE_ASPECT_COLOR_BIT, record_image_layout_transition(command_buffer, image, VK_IMAGE_ASPECT_COLOR_BIT,
0, VK_IMAGE_LAYOUT_UNDEFINED, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL); 0, VK_IMAGE_LAYOUT_UNDEFINED, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
vkCmdCopyBufferToImage(command_buffer, vk_resources.staging_buffer, image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, num_regions, regions); vkCmdCopyBufferToImage(command_buffer, vk_world.staging_buffer, image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, num_regions, regions);
record_image_layout_transition(command_buffer, image, VK_IMAGE_ASPECT_COLOR_BIT, record_image_layout_transition(command_buffer, image, VK_IMAGE_ASPECT_COLOR_BIT,
VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL); VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
@ -1788,19 +1787,19 @@ static VkPipeline create_pipeline(const Vk_Pipeline_Def& def) {
VkSampler vk_find_sampler(const Vk_Sampler_Def& def) { VkSampler vk_find_sampler(const Vk_Sampler_Def& def) {
// Look for sampler among existing samplers. // Look for sampler among existing samplers.
for (int i = 0; i < vk_resources.num_samplers; i++) { for (int i = 0; i < vk_world.num_samplers; i++) {
const auto& cur_def = vk_resources.sampler_defs[i]; const auto& cur_def = vk_world.sampler_defs[i];
if (cur_def.repeat_texture == def.repeat_texture && if (cur_def.repeat_texture == def.repeat_texture &&
cur_def.gl_mag_filter == def.gl_mag_filter && cur_def.gl_mag_filter == def.gl_mag_filter &&
cur_def.gl_min_filter == def.gl_min_filter) cur_def.gl_min_filter == def.gl_min_filter)
{ {
return vk_resources.samplers[i]; return vk_world.samplers[i];
} }
} }
// Create new sampler. // Create new sampler.
if (vk_resources.num_samplers >= MAX_VK_SAMPLERS) { if (vk_world.num_samplers >= MAX_VK_SAMPLERS) {
ri.Error(ERR_DROP, "vk_find_sampler: MAX_VK_SAMPLERS hit\n"); ri.Error(ERR_DROP, "vk_find_sampler: MAX_VK_SAMPLERS hit\n");
} }
@ -1865,9 +1864,9 @@ VkSampler vk_find_sampler(const Vk_Sampler_Def& def) {
VkSampler sampler; VkSampler sampler;
VK_CHECK(vkCreateSampler(vk.device, &desc, nullptr, &sampler)); VK_CHECK(vkCreateSampler(vk.device, &desc, nullptr, &sampler));
vk_resources.sampler_defs[vk_resources.num_samplers] = def; vk_world.sampler_defs[vk_world.num_samplers] = def;
vk_resources.samplers[vk_resources.num_samplers] = sampler; vk_world.samplers[vk_world.num_samplers] = sampler;
vk_resources.num_samplers++; vk_world.num_samplers++;
return sampler; return sampler;
} }
@ -1884,8 +1883,8 @@ struct Timer {
}; };
VkPipeline vk_find_pipeline(const Vk_Pipeline_Def& def) { VkPipeline vk_find_pipeline(const Vk_Pipeline_Def& def) {
for (int i = 0; i < vk_resources.num_pipelines; i++) { for (int i = 0; i < vk_world.num_pipelines; i++) {
const auto& cur_def = vk_resources.pipeline_defs[i]; const auto& cur_def = vk_world.pipeline_defs[i];
if (cur_def.shader_type == def.shader_type && if (cur_def.shader_type == def.shader_type &&
cur_def.state_bits == def.state_bits && cur_def.state_bits == def.state_bits &&
@ -1896,21 +1895,21 @@ VkPipeline vk_find_pipeline(const Vk_Pipeline_Def& def) {
cur_def.line_primitives == def.line_primitives && cur_def.line_primitives == def.line_primitives &&
cur_def.shadow_phase == def.shadow_phase) cur_def.shadow_phase == def.shadow_phase)
{ {
return vk_resources.pipelines[i]; return vk_world.pipelines[i];
} }
} }
if (vk_resources.num_pipelines >= MAX_VK_PIPELINES) { if (vk_world.num_pipelines >= MAX_VK_PIPELINES) {
ri.Error(ERR_DROP, "vk_find_pipeline: MAX_VK_PIPELINES hit\n"); ri.Error(ERR_DROP, "vk_find_pipeline: MAX_VK_PIPELINES hit\n");
} }
Timer t; Timer t;
VkPipeline pipeline = create_pipeline(def); VkPipeline pipeline = create_pipeline(def);
vk_resources.pipeline_create_time += t.elapsed_seconds(); vk_world.pipeline_create_time += t.elapsed_seconds();
vk_resources.pipeline_defs[vk_resources.num_pipelines] = def; vk_world.pipeline_defs[vk_world.num_pipelines] = def;
vk_resources.pipelines[vk_resources.num_pipelines] = pipeline; vk_world.pipelines[vk_world.num_pipelines] = pipeline;
vk_resources.num_pipelines++; vk_world.num_pipelines++;
return pipeline; return pipeline;
} }
@ -2050,7 +2049,7 @@ static void get_mvp_transform(float* mvp) {
p[12], p[13], P14, p[15] p[12], p[13], P14, p[15]
}; };
myGlMultMatrix(vk_resources.modelview_transform, proj, mvp); myGlMultMatrix(vk_world.modelview_transform, proj, mvp);
} }
} }
@ -2164,7 +2163,7 @@ void vk_shade_geometry(VkPipeline pipeline, bool multitexture, Vk_Depth_Range de
// bind descriptor sets // bind descriptor sets
uint32_t set_count = multitexture ? 2 : 1; uint32_t set_count = multitexture ? 2 : 1;
vkCmdBindDescriptorSets(vk.command_buffer, VK_PIPELINE_BIND_POINT_GRAPHICS, vk.pipeline_layout, 0, set_count, vk_resources.current_descriptor_sets, 0, nullptr); vkCmdBindDescriptorSets(vk.command_buffer, VK_PIPELINE_BIND_POINT_GRAPHICS, vk.pipeline_layout, 0, set_count, vk_world.current_descriptor_sets, 0, nullptr);
// bind pipeline // bind pipeline
vkCmdBindPipeline(vk.command_buffer, VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline); vkCmdBindPipeline(vk.command_buffer, VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline);
@ -2186,7 +2185,7 @@ void vk_shade_geometry(VkPipeline pipeline, bool multitexture, Vk_Depth_Range de
else else
vkCmdDraw(vk.command_buffer, tess.numVertexes, 1, 0, 0); vkCmdDraw(vk.command_buffer, tess.numVertexes, 1, 0, 0);
vk_resources.dirty_depth_attachment = true; vk_world.dirty_depth_attachment = true;
} }
void vk_begin_frame() { void vk_begin_frame() {
@ -2233,7 +2232,7 @@ void vk_begin_frame() {
vkCmdBeginRenderPass(vk.command_buffer, &render_pass_begin_info, VK_SUBPASS_CONTENTS_INLINE); vkCmdBeginRenderPass(vk.command_buffer, &render_pass_begin_info, VK_SUBPASS_CONTENTS_INLINE);
vk_resources.dirty_depth_attachment = false; vk_world.dirty_depth_attachment = false;
vk.xyz_elements = 0; vk.xyz_elements = 0;
vk.color_st_elements = 0; vk.color_st_elements = 0;
vk.index_buffer_offset = 0; vk.index_buffer_offset = 0;

View File

@ -104,7 +104,8 @@ void vk_end_frame();
void vk_read_pixels(byte* buffer); void vk_read_pixels(byte* buffer);
// Vulkan specific structures used by the engine. // Vk_Instance contains engine-specific vulkan resources that persist entire renderer lifetime.
// This structure is initialized/deinitialized by vk_initialize/vk_shutdown functions correspondingly.
struct Vk_Instance { struct Vk_Instance {
bool active = false; bool active = false;
VkInstance instance = VK_NULL_HANDLE; VkInstance instance = VK_NULL_HANDLE;
@ -120,6 +121,11 @@ struct Vk_Instance {
uint32_t swapchain_image_count = 0; uint32_t swapchain_image_count = 0;
VkImage swapchain_images[MAX_SWAPCHAIN_IMAGES]; VkImage swapchain_images[MAX_SWAPCHAIN_IMAGES];
VkImageView swapchain_image_views[MAX_SWAPCHAIN_IMAGES]; VkImageView swapchain_image_views[MAX_SWAPCHAIN_IMAGES];
uint32_t swapchain_image_index = -1;
VkSemaphore image_acquired = VK_NULL_HANDLE;
VkSemaphore rendering_finished = VK_NULL_HANDLE;
VkFence rendering_finished_fence = VK_NULL_HANDLE;
VkCommandPool command_pool = VK_NULL_HANDLE; VkCommandPool command_pool = VK_NULL_HANDLE;
VkCommandBuffer command_buffer = VK_NULL_HANDLE; VkCommandBuffer command_buffer = VK_NULL_HANDLE;
@ -147,12 +153,9 @@ struct Vk_Instance {
// host visible memory that holds both vertex and index data // host visible memory that holds both vertex and index data
VkDeviceMemory geometry_buffer_memory = VK_NULL_HANDLE; VkDeviceMemory geometry_buffer_memory = VK_NULL_HANDLE;
VkSemaphore image_acquired = VK_NULL_HANDLE; //
uint32_t swapchain_image_index = -1; // Shader modules.
//
VkSemaphore rendering_finished = VK_NULL_HANDLE;
VkFence rendering_finished_fence = VK_NULL_HANDLE;
VkShaderModule single_texture_vs = VK_NULL_HANDLE; VkShaderModule single_texture_vs = VK_NULL_HANDLE;
VkShaderModule single_texture_clipping_plane_vs = VK_NULL_HANDLE; VkShaderModule single_texture_clipping_plane_vs = VK_NULL_HANDLE;
VkShaderModule single_texture_fs = VK_NULL_HANDLE; VkShaderModule single_texture_fs = VK_NULL_HANDLE;
@ -161,6 +164,9 @@ struct Vk_Instance {
VkShaderModule multi_texture_mul_fs = VK_NULL_HANDLE; VkShaderModule multi_texture_mul_fs = VK_NULL_HANDLE;
VkShaderModule multi_texture_add_fs = VK_NULL_HANDLE; VkShaderModule multi_texture_add_fs = VK_NULL_HANDLE;
//
// Standard pipelines.
//
VkPipeline skybox_pipeline = VK_NULL_HANDLE; VkPipeline skybox_pipeline = VK_NULL_HANDLE;
// dim 0: 0 - front side, 1 - back size // dim 0: 0 - front side, 1 - back size
@ -178,6 +184,7 @@ struct Vk_Instance {
// dim 2 is a polygon offset value (0 - off, 1 - on). // dim 2 is a polygon offset value (0 - off, 1 - on).
VkPipeline dlight_pipelines[2][3][2]; VkPipeline dlight_pipelines[2][3][2];
// debug visualization pipelines
VkPipeline tris_debug_pipeline; VkPipeline tris_debug_pipeline;
VkPipeline tris_mirror_debug_pipeline; VkPipeline tris_mirror_debug_pipeline;
VkPipeline normals_debug_pipeline; VkPipeline normals_debug_pipeline;
@ -186,7 +193,11 @@ struct Vk_Instance {
VkPipeline images_debug_pipeline; VkPipeline images_debug_pipeline;
}; };
struct Vk_Resources { //
// Vk_World contains vulkan resources/state requested by the game code.
// It is reinitialized on a map change.
//
struct Vk_World {
// //
// Resources. // Resources.
// //