_pack_ = 1
_fields_ = [("co",c_float*3),
("norm",c_float*3),
- ("colour",c_float*4),
- ("uv",c_float*2)]
+ ("uv",c_float*2),
+ ("colour",c_uint8*4),
+ ("weights",c_uint16*4),
+ ("groups",c_uint8*4)]
class mdl_submesh(Structure):
_pack_ = 1
("submesh_count",c_uint32),
("classtype",c_uint32),
("offset",c_uint32),
+ ("children",c_uint32),
("pstr_name",c_uint32)]
class mdl_header(Structure):
("node_offset",c_uint32),
("strings_offset",c_uint32),
- ("entdata_offset",c_uint32)
+ ("entdata_offset",c_uint32),
+
+ ("anim_count",c_uint32),
+ ("anim_offset",c_uint32)
]
# Entity types
("id_start",c_uint32),
("colour",c_float*3)]
+class classtype_skin(Structure):
+ _pack_ = 1
+ _fields_ = [("skeleton",c_uint32)]
+
+class classtype_skeleton(Structure):
+ _pack_ = 1
+ _fields_ = [("anim_start",c_uint32),
+ ("anim_count",c_uint32)]
+
+class classtype_bone(Structure):
+ _pack_ = 1
+ _fields_ = [("deform",c_uint32)]
+
# Exporter
# ==============================================================================
-def write_model(name):
- print( F"Create mode {name}" )
+def write_model(collection_name):
+ print( F"Model graph | Create mode '{collection_name}'" )
header = mdl_header()
header.identifier = 0xABCD0000
return material_cache[mat.name]
# Create root or empty node and materials
+ # this is to designate id 0 as 'NULL'
#
none_material = c_uint32(69)
none_material.name = ""
# Do exporting
#
print( " assigning ids" )
- collection = bpy.data.collections[name]
+ collection = bpy.data.collections[collection_name]
+
+ # Scene graph
+ # ==========================================
- header.node_count = 1
- for obj in collection.all_objects:
- obj.cv_data.uid = header.node_count
+ header.node_count = 0
+ def _uid():
+ nonlocal header
+ uid = header.node_count
header.node_count += 1
+ return uid
- print( " compiling data" )
- for obj in collection.all_objects:
- print( F" [{obj.cv_data.uid}/{header.node_count-1}] {obj.name}" )
+ print( " creating scene graph" )
+ graph = {"obj": None, "depth": 0, "children": [], "uid": _uid()}
+ graph_lookup = {} # object can lookup its graph def here
- node = mdl_node()
- node.co[0] = obj.location[0]
- node.co[1] = obj.location[2]
- node.co[2] = -obj.location[1]
-
- # Convert rotation quat to our space type
- quat = obj.matrix_world.to_quaternion()
- node.q[0] = quat[1]
- node.q[1] = quat[3]
- node.q[2] = -quat[2]
- node.q[3] = quat[0]
-
- node.s[0] = obj.scale[0]
- node.s[1] = obj.scale[2]
- node.s[2] = obj.scale[1]
- node.pstr_name = emplace_string( obj.name )
-
- # Process entity data
- #
- node.offset = entdata_length
- classtype = obj.cv_data.classtype
-
- if classtype == 'k_classtype_gate':
- node.classtype = 1
- entdata_length += sizeof( classtype_gate )
-
- gate = classtype_gate()
- gate.target = 0
- if obj.cv_data.target != None:
- gate.target = obj.cv_data.target.cv_data.uid
+ for obj in collection.all_objects:
+ if not obj.parent:
- if obj.type == 'MESH':
- gate.dims[0] = obj.data.cv_data.v0[0]
- gate.dims[1] = obj.data.cv_data.v0[1]
- gate.dims[2] = obj.data.cv_data.v0[2]
- else:
- gate.dims[0] = obj.cv_data.v0[0]
- gate.dims[1] = obj.cv_data.v0[1]
- gate.dims[2] = obj.cv_data.v0[2]
+ def _extend( p, n, d ):
+ uid = _uid()
+ tree = {"obj":n, "depth": d, "children":[], "uid": uid}
+ n.cv_data.uid = uid
- entdata_buffer += [gate]
+ if n.type == 'ARMATURE':
+ tree["bones"] = [None] # None is the root transform
- elif classtype == 'k_classtype_block':
- node.classtype = 2
- entdata_length += sizeof( classtype_block )
+ def _extendb( p, n, d ):
+ nonlocal tree
- source = obj.data.cv_data
+ btree = {"bone":n, "depth": d, "children":[], "uid": _uid()}
+ for c in n.children:
+ _extendb( btree, c, d+1 )
- block = classtype_block()
- block.bbx[0][0] = source.v0[0]
- block.bbx[0][1] = source.v0[2]
- block.bbx[0][2] = -source.v1[1]
+ btree['deform'] = n.use_deform
+ p['children'] += [btree]
- block.bbx[1][0] = source.v1[0]
- block.bbx[1][1] = source.v1[2]
- block.bbx[1][2] = -source.v0[1]
- entdata_buffer += [block]
+ if n.use_deform:
+ tree["bones"] += [n.name]
- elif classtype == 'k_classtype_spawn':
- node.classtype = 3
+ for b in n.data.bones:
+ if not b.parent:
+ _extendb( tree, b, d+1 )
- elif classtype == 'k_classtype_water':
- node.classtype = 4
- elif classtype == 'k_classtype_car_path':
- node.classtype = 5
- entdata_length += sizeof( classtype_car_path )
+ for obj1 in n.children:
+ _extend( tree, obj1, d+1 )
- pn = classtype_car_path()
- pn.target = 0
- pn.target1 = 0
+ p["children"] += [tree]
+ graph_lookup[n] = tree
- if obj.cv_data.target != None:
- pn.target = obj.cv_data.target.cv_data.uid
- if obj.cv_data.target1 != None:
- pn.target1 = obj.cv_data.target1.cv_data.uid
+ _extend( graph, obj, 1 )
- entdata_buffer += [pn]
- elif obj.is_instancer:
- target = obj.instance_collection
- node.classtype = 6
- entdata_length += sizeof( classtype_instance )
+ def _graph_iter(p):
+ for c in p['children']:
+ yield c
+ yield from _graph_iter(c)
- inst = classtype_instance()
- inst.pstr_file = emplace_string( F"models/{target.name}.mdl" )
- entdata_buffer += [inst]
- elif classtype == 'k_classtype_capsule':
- node.classtype = 7
- elif classtype == 'k_classtype_route_node':
- node.classtype = 8
- entdata_length += sizeof( classtype_route_node )
+ it = _graph_iter(graph)
- rn = classtype_route_node()
- if obj.cv_data.target != None:
- rn.target = obj.cv_data.target.cv_data.uid
- if obj.cv_data.target1 != None:
- rn.target1 = obj.cv_data.target1.cv_data.uid
+ root.children = len(graph['children'])
- entdata_buffer += [rn]
- elif classtype == 'k_classtype_route':
- node.classtype = 9
- entdata_length += sizeof( classtype_route )
- r = classtype_route()
- r.pstr_name = emplace_string("not-implemented")
- r.colour[0] = obj.cv_data.colour[0]
- r.colour[1] = obj.cv_data.colour[1]
- r.colour[2] = obj.cv_data.colour[2]
+ # Compile
+ # ==============================================
+ it = _graph_iter(graph)
+ print( " compiling data" )
+ for node_def in it:
+ if 'obj' in node_def:
+ obj = node_def['obj']
+ objt = obj.type
+ objco = obj.location
+ elif 'bone' in node_def:
+ obj = node_def['bone']
+ objt = 'BONE'
+ objco = obj.head_local
+
+ depth = node_def['depth']
+ uid = node_def['uid']
- if obj.cv_data.target != None:
- r.id_start = obj.cv_data.target.cv_data.uid
+ node = mdl_node()
+ node.co[0] = objco[0]
+ node.co[1] = objco[2]
+ node.co[2] = -objco[1]
+
+ # Convert rotation quat to our space type
+ quat = obj.matrix_local.to_quaternion()
+ node.q[0] = quat[1]
+ node.q[1] = quat[3]
+ node.q[2] = -quat[2]
+ node.q[3] = quat[0]
+
+ if objt == 'BONE':
+ node.s[0] = obj.tail[0]
+ node.s[1] = obj.tail[2]
+ node.s[2] = -obj.tail[1]
+ else:
+ node.s[0] = obj.scale[0]
+ node.s[1] = obj.scale[2]
+ node.s[2] = obj.scale[1]
- entdata_buffer += [r]
+ node.pstr_name = emplace_string( obj.name )
- # classtype == 'k_classtype_none':
+ if objt == 'BONE':
+ classtype = 'k_classtype_bone'
+ elif objt == 'ARMATURE':
+ classtype = 'k_classtype_skeleton'
else:
- node.classtype = 0
- node.offset = 0
+ classtype = obj.cv_data.classtype
+
+ # Process type: MESH
+ # =================================================================
+ #
- # Process meshes
+ # Dont use the cache if we have modifiers that affect the normals
#
- node.submesh_start = header.submesh_count
- node.submesh_count = 0
+ compile_mesh = False
+ if objt == 'MESH':
+ armature_def = None
+ compile_mesh = True
+ can_use_cache = True
- if obj.type == 'MESH':
- default_mat = c_uint32(69)
- default_mat.name = ""
-
- # Dont use the cache if we have modifiers that affect the normals
- #
- use_cache = True
for mod in obj.modifiers:
- if mod.type == 'DATA_TRANSFER':
- use_cache = False
+ if mod.type == 'DATA_TRANSFER' or mod.type == 'SHRINKWRAP':
+ can_use_cache = False
+
+ if mod.type == 'ARMATURE':
+ classtype = 'k_classtype_skin'
+ armature_def = graph_lookup[mod.object]
- if use_cache and obj.data.name in mesh_cache:
+ if can_use_cache and obj.data.name in mesh_cache:
ref = mesh_cache[obj.data.name]
node.submesh_start = ref.submesh_start
node.submesh_count = ref.submesh_count
- node_buffer += [node]
- continue
+ compile_mesh = False
+
+ if compile_mesh:
+ node.submesh_start = header.submesh_count
+ node.submesh_count = 0
+
+ default_mat = c_uint32(69)
+ default_mat.name = ""
dgraph = bpy.context.evaluated_depsgraph_get()
data = obj.evaluated_get(dgraph).data
for j in range(3):
vert = data.vertices[tri.vertices[j]]
li = tri.loops[j]
+ vi = data.loops[li].vertex_index
co = vert.co
norm = data.loops[li].normal
uv = (0,0)
- colour = (1,1,1,1)
+ colour = (255,255,255,255)
+ groups = [0,0,0,0]
+ weights = [0,0,0,0]
+
if data.uv_layers:
uv = data.uv_layers.active.data[li].uv
+
if data.vertex_colors:
colour = data.vertex_colors.active.data[li].color
+ colour = (int(colour[0]*255.0),\
+ int(colour[1]*255.0),\
+ int(colour[2]*255.0),\
+ int(colour[3]*255.0))
+
+ # WEight groups
+ #
+ if armature_def:
+ weight_groups = sorted( data.vertices[vi].groups, key = \
+ lambda a: a.weight, reverse=True )
+ tot = 0.0
+ for ml in range(3):
+ if len(weight_groups) > ml:
+ g = weight_groups[ml]
+ name = obj.vertex_groups[g.group].name
+ weight = g.weight
+
+ weights[ml] = weight
+ groups[ml] = armature_def['bones'].index(name)
+ tot += weight
+
+ if len(weight_groups) > 0:
+ inv_norm = (1.0/tot) * 65535.0
+ for ml in range(3):
+ weights[ml] = int( weights[ml] * inv_norm )
+ weights[ml] = min( weights[ml], 65535 )
+ weights[ml] = max( weights[ml], 0 )
TOLERENCE = 4
m = float(10**TOLERENCE)
int(norm[2]*m+0.5),\
int(uv[0]*m+0.5),\
int(uv[1]*m+0.5),\
- int(colour[0]*m+0.5),\
- int(colour[1]*m+0.5),\
- int(colour[2]*m+0.5),\
- int(colour[3]*m+0.5))
+ colour[0],\
+ colour[1],\
+ colour[2],\
+ colour[3],\
+ weights[0],\
+ weights[1],\
+ weights[2],\
+ weights[3],\
+ groups[0],\
+ groups[1],\
+ groups[2],\
+ groups[3])
if key in boffa:
indice_buffer += [boffa[key]]
v.colour[1] = colour[1]
v.colour[2] = colour[2]
v.colour[3] = colour[3]
+ v.weights[0] = weights[0]
+ v.weights[1] = weights[1]
+ v.weights[2] = weights[2]
+ v.weights[3] = weights[3]
+ v.groups[0] = groups[0]
+ v.groups[1] = groups[1]
+ v.groups[2] = groups[2]
+ v.groups[3] = groups[3]
+
vertex_buffer += [v]
for i in range(3):
header.indice_count += sm.indice_count
mesh_cache[obj.data.name] = node
+
+ # Process entity data
+ # ==================================================================
+ node.offset = entdata_length
+
+ if classtype != 'k_classtype_none':
+ disptype = classtype
+ else:
+ disptype = objt
+
+ s000 = F" [{uid: 3}/{header.node_count-1}]" + " |"*(depth-1)
+ s001 = F" L {obj.name}"
+ s002 = s000+s001
+ s003 = F"{disptype}"
+ s004 = ""
+ if classtype == 'k_classtype_skin':
+ s004 = F"-> {armature_def['obj'].cv_data.uid}"
+
+ scmp = F"{s002:<32} {s003:<16} {s004}"
+ print( scmp )
+
+ if classtype == 'k_classtype_INSTANCE' or \
+ classtype == 'k_classtype_BONE' or \
+ classtype == 'k_classtype_SKELETON' or \
+ classtype == 'k_classtype_SKIN':
+ print( "ERROR: user classtype cannot be _INSTANCE or _BONE" )
+ node.classtype = 0
+ node.offset = 0
+
+ elif classtype == 'k_classtype_skin':
+ node.classtype = 12
+
+ armature = armature_def['obj']
+ entdata_length += sizeof( classtype_skin )
+
+ skin = classtype_skin()
+ skin.skeleton = armature.cv_data.uid
+ entdata_buffer += [skin]
+
+ elif classtype == 'k_classtype_skeleton':
+ node.classtype = 11
+ entdata_length += sizeof( classtype_skeleton )
+
+ skeleton = classtype_skeleton()
+ skeleton.anim_start = 0
+ skeleton.anim_count = 0
+
+ entdata_buffer += [skeleton]
+
+ elif classtype == 'k_classtype_bone':
+ node.classtype = 10
+ entdata_length += sizeof( classtype_bone )
+
+ bone = classtype_bone()
+ bone.use_deform = node_def['deform']
+ entdata_buffer += [bone]
+
+ elif classtype == 'k_classtype_gate':
+ node.classtype = 1
+ entdata_length += sizeof( classtype_gate )
+
+ gate = classtype_gate()
+ gate.target = 0
+ if obj.cv_data.target != None:
+ gate.target = obj.cv_data.target.cv_data.uid
+
+ if obj.type == 'MESH':
+ gate.dims[0] = obj.data.cv_data.v0[0]
+ gate.dims[1] = obj.data.cv_data.v0[1]
+ gate.dims[2] = obj.data.cv_data.v0[2]
+ else:
+ gate.dims[0] = obj.cv_data.v0[0]
+ gate.dims[1] = obj.cv_data.v0[1]
+ gate.dims[2] = obj.cv_data.v0[2]
+
+ entdata_buffer += [gate]
+
+ elif classtype == 'k_classtype_block':
+ node.classtype = 2
+ entdata_length += sizeof( classtype_block )
+
+ source = obj.data.cv_data
+
+ block = classtype_block()
+ block.bbx[0][0] = source.v0[0]
+ block.bbx[0][1] = source.v0[2]
+ block.bbx[0][2] = -source.v1[1]
+
+ block.bbx[1][0] = source.v1[0]
+ block.bbx[1][1] = source.v1[2]
+ block.bbx[1][2] = -source.v0[1]
+ entdata_buffer += [block]
+
+ elif classtype == 'k_classtype_spawn':
+ node.classtype = 3
+
+ elif classtype == 'k_classtype_water':
+ node.classtype = 4
+
+ elif classtype == 'k_classtype_car_path':
+ node.classtype = 5
+ entdata_length += sizeof( classtype_car_path )
+
+ pn = classtype_car_path()
+ pn.target = 0
+ pn.target1 = 0
+
+ if obj.cv_data.target != None:
+ pn.target = obj.cv_data.target.cv_data.uid
+ if obj.cv_data.target1 != None:
+ pn.target1 = obj.cv_data.target1.cv_data.uid
+
+ entdata_buffer += [pn]
+
+ elif obj.is_instancer:
+ target = obj.instance_collection
+
+ node.classtype = 6
+ entdata_length += sizeof( classtype_instance )
+
+ inst = classtype_instance()
+ inst.pstr_file = emplace_string( F"models/{target.name}.mdl" )
+ entdata_buffer += [inst]
+
+ elif classtype == 'k_classtype_capsule':
+ node.classtype = 7
+
+ elif classtype == 'k_classtype_route_node':
+ node.classtype = 8
+ entdata_length += sizeof( classtype_route_node )
+
+ rn = classtype_route_node()
+ if obj.cv_data.target != None:
+ rn.target = obj.cv_data.target.cv_data.uid
+ if obj.cv_data.target1 != None:
+ rn.target1 = obj.cv_data.target1.cv_data.uid
+
+ entdata_buffer += [rn]
+
+ elif classtype == 'k_classtype_route':
+ node.classtype = 9
+ entdata_length += sizeof( classtype_route )
+ r = classtype_route()
+ r.pstr_name = emplace_string("not-implemented")
+ r.colour[0] = obj.cv_data.colour[0]
+ r.colour[1] = obj.cv_data.colour[1]
+ r.colour[2] = obj.cv_data.colour[2]
+
+ if obj.cv_data.target != None:
+ r.id_start = obj.cv_data.target.cv_data.uid
+
+ entdata_buffer += [r]
+
+ # classtype == 'k_classtype_none':
+ else:
+ node.classtype = 0
+ node.offset = 0
+
node_buffer += [node]
# Write data arrays
print( "Writing data" )
fpos = sizeof(header)
+ print( F"Nodes: {header.node_count}" )
header.node_offset = fpos
fpos += sizeof(mdl_node)*header.node_count
+ print( F"Submeshes: {header.submesh_count}" )
header.submesh_offset = fpos
fpos += sizeof(mdl_submesh)*header.submesh_count
+ print( F"Materials: {header.material_count}" )
header.material_offset = fpos
fpos += sizeof(mdl_material)*header.material_count
+ print( F"Entdata length: {entdata_length}" )
header.entdata_offset = fpos
fpos += entdata_length
-
+
+ print( F"Vertex count: {header.vertex_count}" )
header.vertex_offset = fpos
fpos += sizeof(mdl_vert)*header.vertex_count
+ print( F"Indice count: {header.indice_count}" )
header.indice_offset = fpos
fpos += sizeof(c_uint32)*header.indice_count
-
+
+ print( F"Strings length: {len(strings_buffer)}" )
header.strings_offset = fpos
fpos += len(strings_buffer)
header.file_length = fpos
- fp = open(F"/home/harry/Documents/carve/models_src/{name}.mdl", "wb")
+ path = F"/home/harry/Documents/carve/models_src/{collection_name}.mdl"
+ fp = open( path, "wb" )
+
fp.write( bytearray( header ) )
for node in node_buffer:
fp.write( strings_buffer )
fp.close()
- print( F"Completed {name}.mdl" )
+ print( F"Completed {collection_name}.mdl" )
# Clicky clicky GUI
# ------------------------------------------------------------------------------
('k_classtype_spawn', "k_classtype_spawn", "", 3),
('k_classtype_water', "k_classtype_water", "", 4),
('k_classtype_car_path', "k_classtype_car_path", "", 5),
+ ('k_classtype_INSTANCE', "","", 6 ),
('k_classtype_capsule', "k_classtype_capsule", "", 7 ),
('k_classtype_route_node', "k_classtype_route_node", "", 8 ),
- ('k_classtype_route', "k_classtype_route", "", 9 )
+ ('k_classtype_route', "k_classtype_route", "", 9 ),
+ ('k_classtype_bone',"k_classtype_bone","",10),
+ ('k_classtype_SKELETON', "","", 11 ),
+ ('k_classtype_SKIN',"","",12)
])
+class CV_SCENE_SETTINGS(bpy.types.PropertyGroup):
+ use_hidden: bpy.props.BoolProperty( name="use hidden", default=False )
+
class CV_OBJ_PANEL(bpy.types.Panel):
bl_label="Entity Config"
bl_idname="SCENE_PT_cv_entity"
def draw(_, context):
layout = _.layout
+ layout.prop( context.scene.cv_data, "use_hidden")
layout.operator( "carve.compile_all" )
def test_compile():
- for col in bpy.data.collections["export"].children:
- write_model( col.name )
+ view_layer = bpy.context.view_layer
+ for col in view_layer.layer_collection.children["export"].children:
+ if not col.hide_viewport or bpy.context.scene.cv_data.use_hidden:
+ write_model( col.name )
class CV_COMPILE(bpy.types.Operator):
bl_idname="carve.compile_all"
return {'FINISHED'}
classes = [CV_OBJ_SETTINGS,CV_OBJ_PANEL,CV_COMPILE,CV_INTERFACE,\
- CV_MESH_SETTINGS]
+ CV_MESH_SETTINGS, CV_SCENE_SETTINGS]
def register():
global cv_view_draw_handler
bpy.types.Object.cv_data = bpy.props.PointerProperty(type=CV_OBJ_SETTINGS)
bpy.types.Mesh.cv_data = bpy.props.PointerProperty(type=CV_MESH_SETTINGS)
+ bpy.types.Scene.cv_data = bpy.props.PointerProperty(type=CV_SCENE_SETTINGS)
cv_view_draw_handler = bpy.types.SpaceView3D.draw_handler_add(\
cv_draw,(),'WINDOW','POST_VIEW')