Get the program binary type correctly or else ATI makes massive (~400MB) shader caches. Also, don't need the line in the PixelShaderGen.

This commit is contained in:
Ryan Houdek 2011-12-29 01:35:50 -06:00 committed by Sonicadvance1
parent 2e15440896
commit 03b09bed5d
3 changed files with 19 additions and 19 deletions

View file

@ -577,8 +577,6 @@ const char *GeneratePixelShaderCode(DSTALPHA_MODE dstAlphaMode, API_TYPE ApiType
if (g_ActiveConfig.backend_info.bSupportsGLSLATTRBind)
WRITE(p, "#extension GL_ARB_explicit_attrib_location : enable\n");
if (g_ActiveConfig.backend_info.bSupportsGLSLBlend)
WRITE(p, "#extension GL_ARB_blend_func_extended : enable\n");
// Silly differences
WRITE(p, "#define float2 vec2\n");
WRITE(p, "#define float3 vec3\n");