2015-12-03 02:36:18 +01:00
|
|
|
import os
|
2017-03-15 12:30:14 +01:00
|
|
|
import arm.utils
|
2017-12-20 15:37:58 +01:00
|
|
|
import arm.assets as assets
|
2015-12-03 02:36:18 +01:00
|
|
|
|
2018-09-13 12:13:32 +02:00
|
|
|
def parse_context(c, sres, asset, defs, vert=None, frag=None):
|
|
|
|
con = {}
|
|
|
|
sres['contexts'].append(con)
|
|
|
|
con['name'] = c['name']
|
|
|
|
con['constants'] = []
|
|
|
|
con['texture_units'] = []
|
2018-12-14 15:27:43 +01:00
|
|
|
con['vertex_elements'] = []
|
2018-09-13 12:13:32 +02:00
|
|
|
|
|
|
|
# Names
|
|
|
|
con['vertex_shader'] = c['vertex_shader'].rsplit('.', 1)[0].split('/')[-1]
|
|
|
|
if con['vertex_shader'] not in asset:
|
|
|
|
asset.append(con['vertex_shader'])
|
|
|
|
|
|
|
|
con['fragment_shader'] = c['fragment_shader'].rsplit('.', 1)[0].split('/')[-1]
|
|
|
|
if con['fragment_shader'] not in asset:
|
|
|
|
asset.append(con['fragment_shader'])
|
|
|
|
|
|
|
|
if 'geometry_shader' in c:
|
|
|
|
con['geometry_shader'] = c['geometry_shader'].rsplit('.', 1)[0].split('/')[-1]
|
|
|
|
if con['geometry_shader'] not in asset:
|
|
|
|
asset.append(con['geometry_shader'])
|
|
|
|
|
|
|
|
if 'tesscontrol_shader' in c:
|
|
|
|
con['tesscontrol_shader'] = c['tesscontrol_shader'].rsplit('.', 1)[0].split('/')[-1]
|
|
|
|
if con['tesscontrol_shader'] not in asset:
|
|
|
|
asset.append(con['tesscontrol_shader'])
|
|
|
|
|
|
|
|
if 'tesseval_shader' in c:
|
|
|
|
con['tesseval_shader'] = c['tesseval_shader'].rsplit('.', 1)[0].split('/')[-1]
|
|
|
|
if con['tesseval_shader'] not in asset:
|
|
|
|
asset.append(con['tesseval_shader'])
|
|
|
|
|
2020-05-10 10:46:12 +02:00
|
|
|
if 'color_attachments' in c:
|
|
|
|
con['color_attachments'] = c['color_attachments']
|
|
|
|
for i in range(len(con['color_attachments'])):
|
|
|
|
if con['color_attachments'][i] == '_HDR':
|
|
|
|
con['color_attachments'][i] = 'RGBA32' if '_LDR' in defs else 'RGBA64'
|
2020-05-06 18:11:02 +02:00
|
|
|
|
2018-09-13 12:13:32 +02:00
|
|
|
# Params
|
2019-04-06 13:03:04 +02:00
|
|
|
params = ['depth_write', 'compare_mode', 'cull_mode', \
|
2018-09-13 12:13:32 +02:00
|
|
|
'blend_source', 'blend_destination', 'blend_operation', \
|
|
|
|
'alpha_blend_source', 'alpha_blend_destination', 'alpha_blend_operation' \
|
2018-10-18 14:53:28 +02:00
|
|
|
'color_writes_red', 'color_writes_green', 'color_writes_blue', 'color_writes_alpha', \
|
|
|
|
'conservative_raster']
|
2018-09-13 12:13:32 +02:00
|
|
|
|
|
|
|
for p in params:
|
|
|
|
if p in c:
|
|
|
|
con[p] = c[p]
|
|
|
|
|
|
|
|
# Parse shaders
|
|
|
|
if vert == None:
|
2017-12-20 15:37:58 +01:00
|
|
|
with open(c['vertex_shader']) as f:
|
|
|
|
vert = f.read().splitlines()
|
2018-09-13 12:13:32 +02:00
|
|
|
parse_shader(sres, c, con, defs, vert, True) # Parse attribs for vertex shader
|
2017-12-20 15:37:58 +01:00
|
|
|
|
2018-09-13 12:13:32 +02:00
|
|
|
if frag == None:
|
2017-12-20 15:37:58 +01:00
|
|
|
with open(c['fragment_shader']) as f:
|
|
|
|
frag = f.read().splitlines()
|
2018-09-13 12:13:32 +02:00
|
|
|
parse_shader(sres, c, con, defs, frag, False)
|
|
|
|
|
|
|
|
if 'geometry_shader' in c:
|
|
|
|
with open(c['geometry_shader']) as f:
|
|
|
|
geom = f.read().splitlines()
|
|
|
|
parse_shader(sres, c, con, defs, geom, False)
|
|
|
|
|
|
|
|
if 'tesscontrol_shader' in c:
|
|
|
|
with open(c['tesscontrol_shader']) as f:
|
|
|
|
tesc = f.read().splitlines()
|
|
|
|
parse_shader(sres, c, con, defs, tesc, False)
|
2020-05-06 18:11:02 +02:00
|
|
|
|
2018-09-13 12:13:32 +02:00
|
|
|
if 'tesseval_shader' in c:
|
|
|
|
with open(c['tesseval_shader']) as f:
|
|
|
|
tese = f.read().splitlines()
|
|
|
|
parse_shader(sres, c, con, defs, tese, False)
|
2016-01-28 00:58:00 +01:00
|
|
|
|
2016-09-28 00:00:59 +02:00
|
|
|
def parse_shader(sres, c, con, defs, lines, parse_attributes):
|
2016-10-19 13:28:06 +02:00
|
|
|
skip_till_endif = 0
|
|
|
|
skip_else = False
|
2018-12-14 15:27:43 +01:00
|
|
|
vertex_elements_parsed = False
|
|
|
|
vertex_elements_parsing = False
|
2020-05-06 18:11:02 +02:00
|
|
|
|
2016-12-02 00:13:09 +01:00
|
|
|
stack = []
|
|
|
|
|
2016-09-28 00:00:59 +02:00
|
|
|
if parse_attributes == False:
|
2018-12-14 15:27:43 +01:00
|
|
|
vertex_elements_parsed = True
|
2020-05-06 18:11:02 +02:00
|
|
|
|
2016-09-28 00:00:59 +02:00
|
|
|
for line in lines:
|
|
|
|
line = line.lstrip()
|
2016-12-02 00:13:09 +01:00
|
|
|
|
|
|
|
# Preprocessor
|
2018-10-09 12:38:30 +02:00
|
|
|
if line.startswith('#if'): # if, ifdef, ifndef
|
2016-09-28 00:00:59 +02:00
|
|
|
s = line.split(' ')[1]
|
2017-12-13 14:21:42 +01:00
|
|
|
found = s in defs
|
2016-12-02 00:13:09 +01:00
|
|
|
if line.startswith('#ifndef'):
|
|
|
|
found = not found
|
2017-01-14 17:37:18 +01:00
|
|
|
if found == False:
|
2016-12-02 00:13:09 +01:00
|
|
|
stack.append(0)
|
|
|
|
else:
|
|
|
|
stack.append(1)
|
2016-09-28 00:00:59 +02:00
|
|
|
continue
|
2016-01-28 00:58:00 +01:00
|
|
|
|
2016-12-02 00:13:09 +01:00
|
|
|
if line.startswith('#else'):
|
|
|
|
stack[-1] = 1 - stack[-1]
|
2016-09-28 00:00:59 +02:00
|
|
|
continue
|
2016-01-28 00:58:00 +01:00
|
|
|
|
2016-12-02 00:13:09 +01:00
|
|
|
if line.startswith('#endif'):
|
|
|
|
stack.pop()
|
2016-09-28 00:00:59 +02:00
|
|
|
continue
|
2015-12-17 20:07:23 +01:00
|
|
|
|
2018-11-22 18:07:32 +01:00
|
|
|
skip = False
|
|
|
|
for i in stack:
|
|
|
|
if i == 0:
|
|
|
|
skip = True
|
|
|
|
break
|
|
|
|
if skip:
|
2016-09-28 00:00:59 +02:00
|
|
|
continue
|
2016-07-20 17:33:17 +02:00
|
|
|
|
2018-12-14 15:27:43 +01:00
|
|
|
if vertex_elements_parsed == False and line.startswith('in '):
|
|
|
|
vertex_elements_parsing = True
|
2016-09-28 00:00:59 +02:00
|
|
|
vd = {}
|
|
|
|
s = line.split(' ')
|
2018-12-14 15:27:43 +01:00
|
|
|
vd['data'] = 'float' + s[1][-1:]
|
2016-09-28 00:00:59 +02:00
|
|
|
vd['name'] = s[2][:-1]
|
2018-12-14 15:27:43 +01:00
|
|
|
con['vertex_elements'].append(vd)
|
|
|
|
if vertex_elements_parsing == True and len(line) > 0 and line.startswith('//') == False and line.startswith('in ') == False:
|
|
|
|
vertex_elements_parsed = True
|
2016-05-19 22:22:41 +02:00
|
|
|
|
2016-10-17 17:39:40 +02:00
|
|
|
if line.startswith('uniform ') or line.startswith('//!uniform'): # Uniforms included from header files
|
2016-09-28 00:00:59 +02:00
|
|
|
s = line.split(' ')
|
2016-10-09 16:06:18 +02:00
|
|
|
# uniform sampler2D myname;
|
|
|
|
# uniform layout(RGBA8) image3D myname;
|
|
|
|
if s[1].startswith('layout'):
|
|
|
|
ctype = s[2]
|
2018-06-12 13:50:27 +02:00
|
|
|
cid = s[3]
|
|
|
|
if cid[-1] == ';':
|
|
|
|
cid = cid[:-1]
|
2016-10-09 16:06:18 +02:00
|
|
|
else:
|
|
|
|
ctype = s[1]
|
2018-06-12 13:50:27 +02:00
|
|
|
cid = s[2]
|
|
|
|
if cid[-1] == ';':
|
|
|
|
cid = cid[:-1]
|
2016-10-09 16:06:18 +02:00
|
|
|
|
2016-09-28 00:00:59 +02:00
|
|
|
found = False # Unique check
|
2018-11-29 23:43:33 +01:00
|
|
|
if ctype.startswith('sampler') or ctype.startswith('image') or ctype.startswith('uimage'): # Texture unit
|
2016-10-09 16:06:18 +02:00
|
|
|
for tu in con['texture_units']: # Texture already present
|
2016-09-28 00:00:59 +02:00
|
|
|
if tu['name'] == cid:
|
|
|
|
found = True
|
|
|
|
break
|
|
|
|
if found == False:
|
2018-12-11 23:05:18 +01:00
|
|
|
if cid[-1] == ']': # Array of samplers - sampler2D mySamplers[2]
|
|
|
|
# Add individual units - mySamplers[0], mySamplers[1]
|
|
|
|
for i in range(int(cid[-2])):
|
|
|
|
tu = {}
|
|
|
|
con['texture_units'].append(tu)
|
|
|
|
tu['name'] = cid[:-2] + str(i) + ']'
|
|
|
|
else:
|
|
|
|
tu = {}
|
|
|
|
con['texture_units'].append(tu)
|
|
|
|
tu['name'] = cid
|
|
|
|
if ctype.startswith('image') or ctype.startswith('uimage'):
|
|
|
|
tu['is_image'] = True
|
|
|
|
# Check for link
|
|
|
|
for l in c['links']:
|
|
|
|
if l['name'] == cid:
|
|
|
|
valid_link = True
|
|
|
|
|
|
|
|
if 'ifdef' in l:
|
|
|
|
def_found = False
|
|
|
|
for d in defs:
|
|
|
|
for link_def in l['ifdef']:
|
|
|
|
if d == link_def:
|
|
|
|
def_found = True
|
|
|
|
break
|
|
|
|
if def_found:
|
2016-09-28 00:00:59 +02:00
|
|
|
break
|
2018-12-11 23:05:18 +01:00
|
|
|
if not def_found:
|
|
|
|
valid_link = False
|
|
|
|
|
|
|
|
if 'ifndef' in l:
|
|
|
|
def_found = False
|
|
|
|
for d in defs:
|
|
|
|
for link_def in l['ifndef']:
|
|
|
|
if d == link_def:
|
|
|
|
def_found = True
|
|
|
|
break
|
|
|
|
if def_found:
|
2016-10-12 17:52:27 +02:00
|
|
|
break
|
|
|
|
if def_found:
|
2018-12-11 23:05:18 +01:00
|
|
|
valid_link = False
|
2016-10-12 17:52:27 +02:00
|
|
|
|
2018-12-11 23:05:18 +01:00
|
|
|
if valid_link:
|
|
|
|
tu['link'] = l['link']
|
|
|
|
break
|
2016-09-28 00:00:59 +02:00
|
|
|
else: # Constant
|
|
|
|
if cid.find('[') != -1: # Float arrays
|
|
|
|
cid = cid.split('[')[0]
|
2017-04-12 13:25:09 +02:00
|
|
|
ctype = 'floats'
|
2016-09-28 00:00:59 +02:00
|
|
|
for const in con['constants']:
|
|
|
|
if const['name'] == cid:
|
|
|
|
found = True
|
|
|
|
break
|
|
|
|
if found == False:
|
|
|
|
const = {}
|
2018-12-11 23:05:18 +01:00
|
|
|
con['constants'].append(const)
|
2016-09-28 00:00:59 +02:00
|
|
|
const['type'] = ctype
|
|
|
|
const['name'] = cid
|
|
|
|
# Check for link
|
|
|
|
for l in c['links']:
|
|
|
|
if l['name'] == cid:
|
|
|
|
valid_link = True
|
2016-10-12 17:52:27 +02:00
|
|
|
|
2016-09-28 00:00:59 +02:00
|
|
|
if 'ifdef' in l:
|
2016-10-12 17:52:27 +02:00
|
|
|
def_found = False
|
2016-09-28 00:00:59 +02:00
|
|
|
for d in defs:
|
2016-10-12 17:52:27 +02:00
|
|
|
for link_def in l['ifdef']:
|
|
|
|
if d == link_def:
|
|
|
|
def_found = True
|
2016-09-28 00:00:59 +02:00
|
|
|
break
|
2016-10-12 17:52:27 +02:00
|
|
|
if def_found:
|
2016-09-28 00:00:59 +02:00
|
|
|
break
|
2016-10-12 17:52:27 +02:00
|
|
|
if not def_found:
|
|
|
|
valid_link = False
|
|
|
|
|
|
|
|
if 'ifndef' in l:
|
|
|
|
def_found = False
|
|
|
|
for d in defs:
|
2016-11-05 20:57:04 +01:00
|
|
|
for link_def in l['ifndef']:
|
2016-10-12 17:52:27 +02:00
|
|
|
if d == link_def:
|
|
|
|
def_found = True
|
|
|
|
break
|
|
|
|
if def_found:
|
|
|
|
break
|
|
|
|
if def_found:
|
|
|
|
valid_link = False
|
|
|
|
|
2016-09-28 00:00:59 +02:00
|
|
|
if valid_link:
|
|
|
|
const['link'] = l['link']
|
|
|
|
break
|
2015-12-17 20:07:23 +01:00
|
|
|
|
2019-01-27 18:56:04 +01:00
|
|
|
def make(res, base_name, json_data, fp, defs, make_variants):
|
2018-09-13 12:13:32 +02:00
|
|
|
sres = {}
|
|
|
|
res['shader_datas'].append(sres)
|
|
|
|
sres['name'] = base_name
|
|
|
|
sres['contexts'] = []
|
|
|
|
asset = assets.shader_passes_assets[base_name]
|
|
|
|
|
|
|
|
vert = None
|
|
|
|
frag = None
|
2019-01-27 18:56:04 +01:00
|
|
|
has_variants = 'variants' in json_data and len(json_data['variants']) > 0
|
|
|
|
if make_variants and has_variants:
|
2018-09-13 12:13:32 +02:00
|
|
|
d = json_data['variants'][0]
|
|
|
|
if d in defs:
|
|
|
|
# Write shader variant with define
|
|
|
|
c = json_data['contexts'][0]
|
|
|
|
with open(c['vertex_shader']) as f:
|
|
|
|
vert = f.read().split('\n', 1)[1]
|
|
|
|
vert = "#version 450\n#define " + d + "\n" + vert
|
|
|
|
|
|
|
|
with open(c['fragment_shader']) as f:
|
|
|
|
frag = f.read().split('\n', 1)[1]
|
|
|
|
frag = "#version 450\n#define " + d + "\n" + frag
|
|
|
|
|
|
|
|
with open(arm.utils.get_fp_build() + '/compiled/Shaders/' + base_name + d + '.vert.glsl', 'w') as f:
|
|
|
|
f.write(vert)
|
|
|
|
|
|
|
|
with open(arm.utils.get_fp_build() + '/compiled/Shaders/' + base_name + d + '.frag.glsl', 'w') as f:
|
|
|
|
f.write(frag)
|
|
|
|
|
|
|
|
# Add context variant
|
|
|
|
c2 = c.copy()
|
|
|
|
c2['vertex_shader'] = base_name + d + '.vert.glsl'
|
|
|
|
c2['fragment_shader'] = base_name + d + '.frag.glsl'
|
|
|
|
c2['name'] = c['name'] + d
|
|
|
|
parse_context(c2, sres, asset, defs, vert.splitlines(), frag.splitlines())
|
|
|
|
|
|
|
|
for c in json_data['contexts']:
|
|
|
|
parse_context(c, sres, asset, defs)
|