print( "Convexer reload" )
#from mathutils import *
-import bpy, gpu, math, os, time, mathutils, blf
+import bpy, gpu, math, os, time, mathutils, blf, subprocess, shutil, hashlib
from ctypes import *
from gpu_extras.batch import batch_for_shader
from bpy.app.handlers import persistent
blf.position(0,ji[0]*w,35,0)
blf.size(0,50,20)
blf.draw(0,ji[1])
+
+ py = 80
+ blf.size(0,50,16)
+ for ln in reversed(CXR_COMPILER_CHAIN.LOG[-25:]):
+ blf.position(0,2,py,0)
+ blf.draw(0,ln[:-1])
+ py += 16
if CXR_PREVIEW_OPERATOR.LASTERR != None:
blf.position(0,2,80,0)
# Standard entity functions, think of like base.fgd
#
-def cxr_get_origin(obj,context):
- return obj.location * context['scale'] + mathutils.Vector(context['offset'])
+def cxr_get_origin(context):
+ return context['object'].location * context['transform']['scale'] + \
+ mathutils.Vector(context['transform']['offset'])
-def cxr_get_angles(obj,context):
+def cxr_get_angles(context):
+ obj = context['object']
euler = [ a*57.295779513 for a in obj.rotation_euler ]
angle = [0,0,0]
angle[0] = euler[1]
# EEVEE Light component converter -> Source 1
#
-def ent_lights(obj,context):
+def ent_lights(context):
+ obj = context['object']
kvs = cxr_baseclass([ent_origin],\
{
"_distance": (0.0 if obj.data.cxr_data.realtime else -1.0),
return kvs
-def ent_cubemap(obj,context):
+def ent_prop(context):
+ kvs = {}
+ if isinstance( context['object'], bpy.types.Collection ):
+ kvs['angles'] = [0,180,0]
+ kvs['enablelightbounce'] = 1
+ kvs['disableshadows'] = 0
+ kvs['fademindist'] = -1
+ kvs['fadescale'] = 1
+ kvs['model'] = F"{asset_path('models',context['object'])}.mdl".lower()
+ kvs['renderamt'] = 255
+ kvs['rendercolor'] = [255, 255, 255]
+ kvs['skin'] = 0
+ kvs['solid'] = 6
+ kvs['uniformscale'] = 1.0
+
+ pos = mathutils.Vector(context['origin'])
+ pos += mathutils.Vector(context['transform']['offset'])
+
+ kvs['origin'] = [pos[1],-pos[0],pos[2]]
+
+ return kvs
+
+def ent_cubemap(context):
+ obj = context['object']
return cxr_baseclass([ent_origin], {"cubemapsize": obj.data.cxr_data.size})
ent_origin = { "origin": cxr_get_origin }
return asset
# Create a unique ID string
- base = "ABCDEFGHIJKLMNOPQRSTUV"
+ base = "bopshei"
v = asset.cxr_data.asset_id
name = ""
#
# Error: None
#
-def cxr_entity_keyvalues(obj,context,classname):
+def cxr_entity_keyvalues(context):
+ classname = context['classname']
+ obj = context['object']
if classname not in cxr_entities: return None
result = []
entdef = cxr_entities[classname]
kvs = entdef['keyvalues']
- if callable(kvs): kvs = kvs(obj, context)
+ if callable(kvs): kvs = kvs(context)
for k in kvs:
kv = kvs[k]
value = obj[ F"cxrkv_{k}" ]
else:
if callable(kv):
- value = kv(obj,context)
+ value = kv(context)
if isinstance(value,mathutils.Vector):
value = [_ for _ in value]
return info
+def vec3_min( a, b ):
+ return mathutils.Vector((min(a[0],b[0]),min(a[1],b[1]),min(a[2],b[2])))
+def vec3_max( a, b ):
+ return mathutils.Vector((max(a[0],b[0]),max(a[1],b[1]),max(a[2],b[2])))
+
+def cxr_collection_center(collection, transform):
+ BIG=999999999
+ bounds_min = mathutils.Vector((BIG,BIG,BIG))
+ bounds_max = mathutils.Vector((-BIG,-BIG,-BIG))
+
+ for obj in collection.objects:
+ if obj.type == 'MESH':
+ corners = [ mathutils.Vector(c) for c in obj.bound_box ]
+
+ for corner in [ obj.matrix_world@c for c in corners ]:
+ bounds_min = vec3_min( bounds_min, corner )
+ bounds_max = vec3_max( bounds_max, corner )
+
+ center = (bounds_min + bounds_max) / 2.0
+
+ origin = mathutils.Vector((-center[1],center[0],center[2]))
+ origin *= transform['scale']
+
+ return origin
+
# Prepares Scene into dictionary format
#
def cxr_scene_collect():
if collection.hide_render: return
if collection.name.startswith('mdl_'):
+ sceneinfo['entities'] += [{
+ "object": collection,
+ "classname": "prop_static",
+ "transform": transform,
+ "origin": cxr_collection_center( collection, transform )
+ }]
+
sceneinfo['heros'] += [{
"collection": collection,
- "transform": transform
+ "transform": transform,
+ "origin": cxr_collection_center( collection, transform )
}]
return
}]
for c in collection.children:
- cxr_scene_collect( c, transform )
+ _collect( c, transform )
transform_main = {
"scale": context.scene.cxr_data.scale_factor,
# Write VMF out to file (JOB HANDLER)
#
-def cxr_export_vmf(sceneinfo):
- cxr_reset_lines()
-
- # Setup output and state
- filepath = bpy.data.filepath
- directory = os.path.dirname(filepath)
- settings = bpy.context.scene.cxr_data
-
- asset_dir = F"{directory}/bin"
- material_dir = F"{settings.subdir}/materials/{settings.project_name}"
- model_dir = F"{settings.subdir}/models/{settings.project_name}"
-
- os.makedirs( asset_dir, exist_ok=True )
- os.makedirs( material_dir, exist_ok=True )
- os.makedirs( model_dir, exist_ok=True )
-
- # States
+def cxr_export_vmf(sceneinfo, output_vmf):
cxr_reset_lines()
- output_vmf = F"{directory}/{settings.project_name}.vmf"
with vdf_structure(output_vmf) as m:
print( F"Write: {output_vmf}" )
m.node( 'entity' )
m.kv( 'classname', cls )
- kvs = cxr_entity_keyvalues( obj, ctx, cls )
+ kvs = cxr_entity_keyvalues( ent )
for kv in kvs:
if isinstance(kv[2], list):
m.kv( kv[0], ' '.join([str(_) for _ in kv[2]]) )
else: m.kv( kv[0], str(kv[2]) )
- if obj.type == 'MESH':
- if not _buildsolid( ent ):
- cxr_batch_lines()
- scene_redraw()
- return False
+ if not isinstance( obj, bpy.types.Collection ):
+ if obj.type == 'MESH':
+ if not _buildsolid( ent ):
+ cxr_batch_lines()
+ scene_redraw()
+ return False
m.edon()
# job handler.
#
def compile_material(mat):
- print( F"Compile {asset_full_path('materials',mat)}.vmt" )
-
info = material_info(mat)
properties = mat.cxr_data
+ print( F"Compile {asset_full_path('materials',mat)}.vmt" )
+ if properties.shader == 'Builtin':
+ return []
+
props = []
# Walk the property tree
vmt.edon()
return props
+def cxr_export_modelsrc( mdl, origin, asset_dir, project_name, transform ):
+ dgraph = bpy.context.evaluated_depsgraph_get()
+
+ # Compute hash value
+ chash = asset_uid(mdl)+str(origin)+str(transform)
+
+ #for obj in mdl.objects:
+ # if obj.type != 'MESH':
+ # continue
+
+ # ev = obj.evaluated_get(dgraph).data
+ # srcverts=[(v.co[0],v.co[1],v.co[2]) for v in ev.vertices]
+ # srcloops=[(l.normal[0],l.normal[1],l.normal[2]) for l in ev.loops]
+
+ # chash=hashlib.sha224((str(srcverts)+chash).encode()).hexdigest()
+ # chash=hashlib.sha224((str(srcloops)+chash).encode()).hexdigest()
+
+ # if ev.uv_layers.active != None:
+ # uv_layer = ev.uv_layers.active.data
+ # srcuv=[(uv.uv[0],uv.uv[1]) for uv in uv_layer]
+ # else:
+ # srcuv=['none']
+
+ # chash=hashlib.sha224((str(srcuv)+chash).encode()).hexdigest()
+ # srcmats=[ ms.material.name for ms in obj.material_slots ]
+ # chash=hashlib.sha224((str(srcmats)+chash).encode()).hexdigest()
+ # transforms=[ obj.location, obj.rotation_euler, obj.scale ]
+ # srctr=[(v[0],v[1],v[2]) for v in transforms]
+ # chash=hashlib.sha224((str(srctr)+chash).encode()).hexdigest()
+
+ #if chash != mdl.cxr_data.last_hash:
+ # mdl.cxr_data.last_hash = chash
+ # print( F"Compile: {mdl.name}" )
+ #else:
+ # return True
+
+ bpy.ops.object.select_all(action='DESELECT')
+
+ # Get viewlayer
+ def _get_layer(col,name):
+ for c in col.children:
+ if c.name == name:
+ return c
+ sub = _get_layer(c,name)
+ if sub != None:
+ return sub
+ return None
+ layer = _get_layer(bpy.context.view_layer.layer_collection,mdl.name)
+
+ prev_state = layer.hide_viewport
+ layer.hide_viewport=False
+
+ # Collect materials to be compiled, and temp rename for export
+ mat_dict = {}
+
+ for obj in mdl.objects:
+ obj.select_set(state=True)
+ for ms in obj.material_slots:
+ if ms.material != None:
+ if ms.material not in mat_dict:
+ mat_dict[ms.material] = ms.material.name
+ ms.material.name = asset_uid(ms.material)
+ ms.material.use_nodes = False
+
+ uid=asset_uid(mdl)
+ bpy.ops.export_scene.fbx( filepath=F'{asset_dir}/{uid}_ref.fbx',\
+ check_existing=False,
+ use_selection=True,
+ apply_unit_scale=False,
+ bake_space_transform=False
+ )
+
+ # Fix material names back to original
+ for mat in mat_dict:
+ mat.name = mat_dict[mat]
+ mat.use_nodes = True
+
+ layer.hide_viewport=prev_state
+
+ # Write out QC file
+ with open(F'{asset_dir}/{uid}.qc','w') as o:
+ o.write(F'$modelname "{project_name}/{uid}"\n')
+ #o.write(F'$scale .32\n')
+ o.write(F'$scale {transform["scale"]/100.0}\n')
+ o.write(F'$body _ "{uid}_ref.fbx"\n')
+ o.write(F'$staticprop\n')
+ o.write(F'$origin {origin[0]} {origin[1]} {origin[2]}\n')
+
+ #TODO: vphys
+ o.write(F'$cdmaterials {project_name}\n')
+ o.write(F'$sequence idle {uid}_ref.fbx\n')
+
+ return True
+#
+# Copy bsp file (and also lightpatch it)
+#
+def cxr_patchmap( src, dst ):
+ libcxr_lightpatch_bsp.call( src.encode('utf-8') )
+ shutil.copyfile( src, dst )
+ return True
+
# Convexer operators
# ------------------------------------------------------------------------------
TIMER = None
TIMER_LAST = 0.0
WAIT_REDRAW = False
+ FILE = None
+ LOG = []
JOBINFO = None
JOBID = 0
+ JOBSYS = None
def cancel(_,context):
+ global cxr_jobs_batch
static = _.__class__
wm = context.window_manager
if static.TIMER != None:
wm.event_timer_remove( static.TIMER )
static.TIMER = None
+
+ static.FILE.close()
+ cxr_jobs_batch = None
scene_redraw()
return {'FINISHED'}
static = _.__class__
if ev.type == 'TIMER':
+ global cxr_jobs_batch
+
if static.WAIT_REDRAW:
+ scene_redraw()
return {'PASS_THROUGH'}
static.WAIT_REDRAW = True
if static.SUBPROC != None:
# Deal with async modes
- pass
+ status = static.SUBPROC.poll()
+ if status == None:
+
+ # Cannot redirect STDOUT through here without causing
+ # undefined behaviour due to the Blender Python specification.
+ #
+ # Have to write it out to a file and read it back in.
+ #
+ with open("/tmp/convexer_compile_log.txt","r") as log:
+ static.LOG = log.readlines()
+ return {'PASS_THROUGH'}
+ else:
+ #for l in static.SUBPROC.stdout:
+ # print( F'-> {l.decode("utf-8")}',end='' )
+ static.SUBPROC = None
+
+ if status != 0:
+ print(F'Compiler () error: {status}')
+ return _.cancel(context)
+
+ static.JOBSYS['jobs'][static.JOBID] = None
+ cxr_jobs_update_graph( static.JOBINFO )
+ scene_redraw()
+ return {'PASS_THROUGH'}
# Compile syncronous thing
for sys in static.JOBINFO:
for i,target in enumerate(sys['jobs']):
if target != None:
- print( F"Start job: {static.JOBID} @{time.time()}" )
-
- if not sys['exec'](target):
- print( "Job failed" )
- return _.cancel(context)
- sys['jobs'][i] = None
- static.JOBID += 1
+ if callable(sys['exec']):
+ print( F"Run (sync): {static.JOBID} @{time.time()}" )
+
+ if not sys['exec'](*target):
+ print( "Job failed" )
+ return _.cancel(context)
+
+ sys['jobs'][i] = None
+ static.JOBID += 1
+ else:
+ # Run external executable (wine)
+ static.SUBPROC = subprocess.Popen( target,
+ stdout=static.FILE,\
+ stderr=subprocess.PIPE,\
+ cwd=sys['cwd'])
+ static.JOBSYS = sys
+ static.JOBID = i
cxr_jobs_update_graph( static.JOBINFO )
scene_redraw()
# All completed
print( "All jobs completed!" )
- global cxr_jobs_batch
cxr_jobs_batch = None
scene_redraw()
print("Launching compiler toolchain")
# Run static compilation units now (collect, vmt..)
+ filepath = bpy.data.filepath
+ directory = os.path.dirname(filepath)
+ settings = bpy.context.scene.cxr_data
+
+ asset_dir = F"{directory}/modelsrc"
+ material_dir = F"{settings.subdir}/materials/{settings.project_name}"
+ model_dir = F"{settings.subdir}/models/{settings.project_name}"
+ output_vmf = F"{directory}/{settings.project_name}.vmf"
+
+ os.makedirs( asset_dir, exist_ok=True )
+ os.makedirs( material_dir, exist_ok=True )
+ os.makedirs( model_dir, exist_ok=True )
+
+ static.FILE = open(F"/tmp/convexer_compile_log.txt","w")
+ static.LOG = []
+
sceneinfo = cxr_scene_collect()
image_jobs = []
+ qc_jobs = []
# Collect materials
a_materials = set()
for ms in brush['object'].material_slots:
a_materials.add( ms.material )
+ for ent in sceneinfo['entities']:
+ if isinstance(ent['object'],bpy.types.Collection): continue
+
+ if ent['object'].type == 'MESH':
+ for ms in ent['object'].material_slots:
+ a_materials.add( ms.material )
+
+ # TODO.. this should just be in the entity loop
+ for hero in sceneinfo['heros']:
+ uid = asset_uid(hero['collection'])
+ qc_jobs += [F'{uid}.qc']
+ for obj in hero['collection'].objects:
+ for ms in obj.material_slots:
+ a_materials.add( ms.material )
+
# Collect images
for mat in a_materials:
for pair in compile_material(mat):
flags = 0
if 'flags' in pdef: flags = pdef['flags']
if prop not in image_jobs:
- image_jobs += [prop]
+ image_jobs += [(prop,)]
prop.cxr_data.flags = flags
# Convexer jobs
static.JOBINFO += [{
"title": "Convexer",
- "w": 40,
+ "w": 20,
"colour": (1.0,0.3,0.1,1.0),
"exec": cxr_export_vmf,
- "jobs": [sceneinfo]
+ "jobs": [(sceneinfo,output_vmf)]
}]
if len(image_jobs) > 0:
"exec": compile_image,
"jobs": image_jobs
}]
+
+ # FBX stage
+
+ if len(sceneinfo['heros']) > 0:
+ static.JOBINFO += [{
+ "title": "Batches",
+ "w": 25,
+ "colour": (0.5,0.5,1.0,1.0),
+ "exec": cxr_export_modelsrc,
+ "jobs": [(h['collection'], h['origin'], asset_dir, \
+ settings.project_name, h['transform']) for h in \
+ sceneinfo['heros']]
+ }]
+ # VBSP stage
+ game = 'z:'+settings.subdir.replace('/','\\')
+ args = [ \
+ '-game', game, settings.project_name
+ ]
+
+ if len(qc_jobs) > 0:
+ static.JOBINFO += [{
+ "title": "StudioMDL",
+ "w": 20,
+ "colour": (0.8,0.1,0.1,1.0),
+ "exec": "studiomdl",
+ "jobs": [[settings[F'exe_studiomdl']] + [\
+ '-nop4', '-game', game, qc] for qc in qc_jobs],
+ "cwd": asset_dir
+ }]
+
+ static.JOBINFO += [{
+ "title": "VBSP",
+ "w": 25,
+ "colour": (0.1,0.2,1.0,1.0),
+ "exec": "vbsp",
+ "jobs": [[settings[F'exe_vbsp']] + args],
+ "cwd": directory
+ }]
+
+ static.JOBINFO += [{
+ "title": "VVIS",
+ "w": 25,
+ "colour": (0.9,0.5,0.5,1.0),
+ "exec": "vvis",
+ "jobs": [[settings[F'exe_vvis']] + args],
+ "cwd": directory
+ }]
+
+ static.JOBINFO += [{
+ "title": "VRAD",
+ "w": 25,
+ "colour": (0.9,0.2,0.3,1.0),
+ "exec": "vrad",
+ "jobs": [[settings[F'exe_vrad']] + args],
+ "cwd": directory
+ }]
+
+ static.JOBINFO += [{
+ "title": "CXR",
+ "w": 5,
+ "colour": (0.0,1.0,0.4,1.0),
+ "exec": cxr_patchmap,
+ "jobs": [(F"{directory}/{settings.project_name}.bsp",\
+ F"{settings.subdir}/maps/{settings.project_name}.bsp")]
+ }]
+
static.USER_EXIT=False
static.TIMER=wm.event_timer_add(0.1,window=context.window)
wm.modal_handler_add(_)
static.USER_EXIT=True
return {'RUNNING_MODAL'}
+class CXR_RESET_HASHES(bpy.types.Operator):
+ bl_idname="convexer.hash_reset"
+ bl_label="Reset asset hashes"
+
+ def execute(_,context):
+ for c in bpy.data.collections:
+ c.cxr_data.last_hash = F"<RESET>{time.time()}"
+ c.cxr_data.asset_id=0
+
+ for t in bpy.data.images:
+ t.cxr_data.last_hash = F"<RESET>{time.time()}"
+ t.cxr_data.asset_id=0
+
+ return {'FINISHED'}
+
# Convexer panels
# ------------------------------------------------------------------------------
_.layout.operator("convexer.reload")
_.layout.operator("convexer.dev_test")
_.layout.operator("convexer.preview")
+ _.layout.operator("convexer.hash_reset")
settings = context.scene.cxr_data
# Create ID properties
entdef = None
- classname = active_object.cxr_data.classname
+ classname = cxr_custom_class(active_object)
if classname in cxr_entities:
entdef = cxr_entities[classname]
if active_object == None: return
- default_context = cxr_object_context( \
- bpy.context.scene.cxr_data.scale_factor, 0.0 )
+ default_context = {
+ "scale": bpy.context.scene.cxr_data.scale_factor,
+ "offset": (0,0,0)
+ }
ecn = cxr_intrinsic_classname( active_object )
classname = cxr_custom_class( active_object )
_.layout.enabled=False
classname = ecn
- kvs = cxr_entity_keyvalues( active_object, default_context, classname )
+ kvs = cxr_entity_keyvalues( {
+ "object": active_object,
+ "transform": default_context,
+ "classname": classname
+ })
+
if kvs != None:
for kv in kvs:
if kv[1]:
CXR_MODEL_SETTINGS, CXR_ENTITY_SETTINGS, CXR_CUBEMAP_SETTINGS,\
CXR_LIGHT_SETTINGS, CXR_SCENE_SETTINGS, CXR_DETECT_COMPILERS,\
CXR_ENTITY_PANEL, CXR_LIGHT_PANEL, CXR_PREVIEW_OPERATOR,\
- CXR_VIEW3D, CXR_COMPILER_CHAIN ]
+ CXR_VIEW3D, CXR_COMPILER_CHAIN, CXR_RESET_HASHES ]
vmt_param_dynamic_class = None