[DEV] work on EMF blender export
This commit is contained in:
parent
8d320ad441
commit
1719e9c1e8
@ -4,7 +4,7 @@
|
|||||||
bl_info = {
|
bl_info = {
|
||||||
"name": "EGE Mesh file format emf",
|
"name": "EGE Mesh file format emf",
|
||||||
"author": "Edouard DUPIN",
|
"author": "Edouard DUPIN",
|
||||||
"blender": (2, 80, 0),
|
"blender": (2, 81, 6),
|
||||||
"location": "File > Import-Export",
|
"location": "File > Import-Export",
|
||||||
"description": "Import-Export emf, Import EMF mesh, UV's, materials and textures",
|
"description": "Import-Export emf, Import EMF mesh, UV's, materials and textures",
|
||||||
"category": "Import-Export"}
|
"category": "Import-Export"}
|
||||||
@ -161,4 +161,9 @@ def unregister():
|
|||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
register()
|
print("Registering.");
|
||||||
|
register();
|
||||||
|
#print("Executing.");
|
||||||
|
#bpy.ops.export_scene.emf();
|
||||||
|
|
||||||
|
|
||||||
|
9
blender/io_scene_emf/exportEmf.py
Normal file
9
blender/io_scene_emf/exportEmf.py
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
import bpy
|
||||||
|
|
||||||
|
bpy.ops.wm.open_mainfile(filepath="/home/heero/dev/workspace-game/atriasoft/ege/blender/io_scene_emf/../../samples/resources/emf/tree1.blend")
|
||||||
|
|
||||||
|
bpy.ops.export_scene.emf(filepath="/home/heero/dev/workspace-game/atriasoft/ege/blender/io_scene_emf/../../samples/resources/emf/tree1.emf")
|
||||||
|
|
||||||
|
#bpy.ops.wm.open_mainfile(filepath="/home/heero/dev/workspace-game/atriasoft/ewol/resources/resources/ewol/theme/shape/Entry.blend")
|
||||||
|
|
||||||
|
#bpy.ops.export_scene.emf(filepath="/home/heero/dev/workspace-game/atriasoft/ewol/resources/resources/ewol/theme/shape/Entry.emf")
|
@ -4,7 +4,7 @@ import time
|
|||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
import mathutils
|
import mathutils
|
||||||
import bpy_extras.io_utils
|
from bpy_extras import io_utils, node_shader_utils
|
||||||
|
|
||||||
EXPORT_COLLISION_NAME = ""
|
EXPORT_COLLISION_NAME = ""
|
||||||
|
|
||||||
@ -117,13 +117,14 @@ def get_physics_shape(obj, mainObjScale):
|
|||||||
|
|
||||||
|
|
||||||
def write_collision_shape(object, file, mainObjScale, offset):
|
def write_collision_shape(object, file, mainObjScale, offset):
|
||||||
|
fw = file.write
|
||||||
if len(getChildren(object))==0:
|
if len(getChildren(object))==0:
|
||||||
# no phisical shape ...
|
# no phisical shape ...
|
||||||
return
|
return
|
||||||
string_offset = ""
|
string_offset = ""
|
||||||
for iii in range(offset):
|
for iii in range(offset):
|
||||||
string_offset += "\t"
|
string_offset += "\t"
|
||||||
file.write(string_offset + 'Physics:\n')
|
fw(string_offset + 'Physics:\n')
|
||||||
for subObj in getChildren(object):
|
for subObj in getChildren(object):
|
||||||
print(" element='" + subObj.name + "' type '" + str(subObj.type) + "'")
|
print(" element='" + subObj.name + "' type '" + str(subObj.type) + "'")
|
||||||
if subObj.type != 'MESH' \
|
if subObj.type != 'MESH' \
|
||||||
@ -133,9 +134,9 @@ def write_collision_shape(object, file, mainObjScale, offset):
|
|||||||
if shape=="":
|
if shape=="":
|
||||||
print("error of shape detection type ...");
|
print("error of shape detection type ...");
|
||||||
continue
|
continue
|
||||||
file.write(string_offset + "\t" + shape + "\n" )
|
fw(string_offset + "\t" + shape + "\n" )
|
||||||
for (k,v) in props.items():
|
for (k,v) in props.items():
|
||||||
file.write(string_offset + "\t\t%s:%s\n" % (k, v) )
|
fw(string_offset + "\t\t%s:%s\n" % (k, v) )
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@ -157,6 +158,7 @@ def mesh_triangulate(me):
|
|||||||
|
|
||||||
def write_mtl(scene, file, filepath, path_mode, copy_set, mtl_dict):
|
def write_mtl(scene, file, filepath, path_mode, copy_set, mtl_dict):
|
||||||
from mathutils import Color
|
from mathutils import Color
|
||||||
|
fw = file.write
|
||||||
world = scene.world
|
world = scene.world
|
||||||
#if world and world.ambient_color:
|
#if world and world.ambient_color:
|
||||||
# world_amb = world.ambient_color
|
# world_amb = world.ambient_color
|
||||||
@ -164,8 +166,8 @@ def write_mtl(scene, file, filepath, path_mode, copy_set, mtl_dict):
|
|||||||
world_amb = Color((0.0, 0.0, 0.0))
|
world_amb = Color((0.0, 0.0, 0.0))
|
||||||
source_dir = os.path.dirname(bpy.data.filepath)
|
source_dir = os.path.dirname(bpy.data.filepath)
|
||||||
dest_dir = os.path.dirname(filepath)
|
dest_dir = os.path.dirname(filepath)
|
||||||
file.write('\n')
|
fw('\n')
|
||||||
#file.write('\nMaterials:%i\n' % len(mtl_dict))
|
#fw('\nMaterials:%i\n' % len(mtl_dict))
|
||||||
mtl_dict_values = list(mtl_dict.values())
|
mtl_dict_values = list(mtl_dict.values())
|
||||||
mtl_dict_values.sort(key=lambda m: m[0])
|
mtl_dict_values.sort(key=lambda m: m[0])
|
||||||
# Write material/image combinations we have used.
|
# Write material/image combinations we have used.
|
||||||
@ -173,97 +175,100 @@ def write_mtl(scene, file, filepath, path_mode, copy_set, mtl_dict):
|
|||||||
for mtl_mat_name, mat, face_img in mtl_dict_values:
|
for mtl_mat_name, mat, face_img in mtl_dict_values:
|
||||||
# Get the Blender data for the material and the image.
|
# Get the Blender data for the material and the image.
|
||||||
# Having an image named None will make a bug, dont do it:)
|
# Having an image named None will make a bug, dont do it:)
|
||||||
file.write('Materials:%s\n' % mtl_mat_name) # Define a new material: matname_imgname
|
#print("material: '" + str(mtl_mat_name) + "': " + str(mat) + " " + str(face_img));
|
||||||
if mat:
|
#print(" mat: ");
|
||||||
# convert from blenders spec to 0 - 1000 range.
|
#for elem in dir(mat):
|
||||||
if mat.specular_shader == 'WARDISO':
|
# print(" - " + elem);
|
||||||
tspec = (0.4 - mat.specular_slope) / 0.0004
|
fw('Materials:%s\n' % mtl_mat_name) # Define a new material: matname_imgname
|
||||||
|
mat_wrap = node_shader_utils.PrincipledBSDFWrapper(mat) if mat else None
|
||||||
|
|
||||||
|
if mat_wrap:
|
||||||
|
use_mirror = mat_wrap.metallic != 0.0
|
||||||
|
use_transparency = mat_wrap.alpha != 1.0
|
||||||
|
|
||||||
|
# XXX Totally empirical conversion, trying to adapt it
|
||||||
|
# (from 1.0 - 0.0 Principled BSDF range to 0.0 - 900.0 OBJ specular exponent range)...
|
||||||
|
spec = (1.0 - mat_wrap.roughness) * 30
|
||||||
|
spec *= spec
|
||||||
|
fw(' Ns %.6f\n' % spec)
|
||||||
|
|
||||||
|
# Ambient
|
||||||
|
if use_mirror:
|
||||||
|
fw(' Ka %.6f %.6f %.6f\n' % (mat_wrap.metallic, mat_wrap.metallic, mat_wrap.metallic))
|
||||||
else:
|
else:
|
||||||
tspec = (mat.specular_hardness - 1) * 1.9607843137254901
|
fw(' Ka %.6f %.6f %.6f\n' % (1.0, 1.0, 1.0))
|
||||||
file.write('\tNs %.6f\n' % tspec)
|
fw(' Kd %.6f %.6f %.6f\n' % mat_wrap.base_color[:3]) # Diffuse
|
||||||
del tspec
|
# XXX TODO Find a way to handle tint and diffuse color, in a consistent way with import...
|
||||||
file.write('\tKa %.6f %.6f %.6f\n' % (mat.ambient * world_amb)[:]) # Ambient, uses mirror color,
|
fw(' Ks %.6f %.6f %.6f\n' % (mat_wrap.specular, mat_wrap.specular, mat_wrap.specular)) # Specular
|
||||||
file.write('\tKd %.6f %.6f %.6f\n' % (mat.diffuse_intensity * mat.diffuse_color)[:]) # Diffuse
|
# Emission, not in original MTL standard but seems pretty common, see T45766.
|
||||||
file.write('\tKs %.6f %.6f %.6f\n' % (mat.specular_intensity * mat.specular_color)[:]) # Specular
|
emission_strength = mat_wrap.emission_strength
|
||||||
if hasattr(mat, "ior"):
|
emission = [emission_strength * c for c in mat_wrap.emission_color[:3]]
|
||||||
file.write('\tNi %.6f\n' % mat.ior) # Refraction index
|
fw(' Ke %.6f %.6f %.6f\n' % tuple(emission))
|
||||||
|
fw(' vNi %.6f\n' % mat_wrap.ior) # Refraction index
|
||||||
|
fw(' d %.6f\n' % mat_wrap.alpha) # Alpha (obj uses 'd' for dissolve)
|
||||||
|
|
||||||
|
# See http://en.wikipedia.org/wiki/Wavefront_.obj_file for whole list of values...
|
||||||
|
# Note that mapping is rather fuzzy sometimes, trying to do our best here.
|
||||||
|
if mat_wrap.specular == 0:
|
||||||
|
fw(' illum 1\n') # no specular.
|
||||||
|
elif use_mirror:
|
||||||
|
if use_transparency:
|
||||||
|
fw(' illum 6\n') # Reflection, Transparency, Ray trace
|
||||||
|
else:
|
||||||
|
fw(' illum 3\n') # Reflection and Ray trace
|
||||||
|
elif use_transparency:
|
||||||
|
fw(' illum 9\n') # 'Glass' transparency and no Ray trace reflection... fuzzy matching, but...
|
||||||
else:
|
else:
|
||||||
file.write('\tNi %.6f\n' % 1.0)
|
fw(' illum 2\n') # light normally
|
||||||
file.write('\td %.6f\n' % mat.alpha) # Alpha (obj uses 'd' for dissolve)
|
|
||||||
# 0 to disable lighting, 1 for ambient & diffuse only (specular color set to black), 2 for full lighting.
|
#### And now, the image textures...
|
||||||
if mat.use_shadeless:
|
image_map = {
|
||||||
file.write('\tillum 0\n') # ignore lighting
|
"map_Kd": "base_color_texture",
|
||||||
elif mat.specular_intensity == 0:
|
"map_Ka": None, # ambient...
|
||||||
file.write('\tillum 1\n') # no specular.
|
"map_Ks": "specular_texture",
|
||||||
else:
|
"map_Ns": "roughness_texture",
|
||||||
file.write('\tillum 2\n') # light normaly
|
"map_d": "alpha_texture",
|
||||||
|
"map_Tr": None, # transmission roughness?
|
||||||
|
"map_Bump": "normalmap_texture",
|
||||||
|
"disp": None, # displacement...
|
||||||
|
"refl": "metallic_texture",
|
||||||
|
"map_Ke": "emission_color_texture" if emission_strength != 0.0 else None,
|
||||||
|
}
|
||||||
|
|
||||||
|
for key, mat_wrap_key in sorted(image_map.items()):
|
||||||
|
if mat_wrap_key is None:
|
||||||
|
continue
|
||||||
|
tex_wrap = getattr(mat_wrap, mat_wrap_key, None)
|
||||||
|
if tex_wrap is None:
|
||||||
|
continue
|
||||||
|
image = tex_wrap.image
|
||||||
|
if image is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
filepath = io_utils.path_reference(image.filepath, source_dir, dest_dir,
|
||||||
|
path_mode, "", copy_set, image.library)
|
||||||
|
options = []
|
||||||
|
if key == "map_Bump":
|
||||||
|
if mat_wrap.normalmap_strength != 1.0:
|
||||||
|
options.append('-bm %.6f' % mat_wrap.normalmap_strength)
|
||||||
|
if tex_wrap.translation != Vector((0.0, 0.0, 0.0)):
|
||||||
|
options.append('-o %.6f %.6f %.6f' % tex_wrap.translation[:])
|
||||||
|
if tex_wrap.scale != Vector((1.0, 1.0, 1.0)):
|
||||||
|
options.append('-s %.6f %.6f %.6f' % tex_wrap.scale[:])
|
||||||
|
if options:
|
||||||
|
fw('%s %s %s\n' % (key, " ".join(options), repr(filepath)[1:-1]))
|
||||||
|
else:
|
||||||
|
fw('%s %s\n' % (key, repr(filepath)[1:-1]))
|
||||||
|
|
||||||
else:
|
else:
|
||||||
#write a dummy material here?
|
# Write a dummy material here?
|
||||||
file.write('\tNs 0\n')
|
fw(' Ns 500\n')
|
||||||
file.write('\tKa %.6f %.6f %.6f\n' % world_amb[:]) # Ambient, uses mirror color,
|
fw(' Ka 0.8 0.8 0.8\n')
|
||||||
file.write('\tKd 0.8 0.8 0.8\n')
|
fw(' Kd 0.8 0.8 0.8\n')
|
||||||
file.write('\tKs 0.8 0.8 0.8\n')
|
fw(' Ks 0.8 0.8 0.8\n')
|
||||||
file.write('\td 1\n') # No alpha
|
fw(' d 1\n') # No alpha
|
||||||
file.write('\tillum 2\n') # light normaly
|
fw(' illum 2\n') # light normally
|
||||||
# Write images!
|
|
||||||
if face_img: # We have an image on the face!
|
|
||||||
filepath = face_img.filepath
|
|
||||||
if filepath: # may be '' for generated images
|
|
||||||
# write relative image path
|
|
||||||
filepath = bpy_extras.io_utils.path_reference(filepath,
|
|
||||||
source_dir,
|
|
||||||
dest_dir,
|
|
||||||
path_mode,
|
|
||||||
"",
|
|
||||||
copy_set,
|
|
||||||
face_img.library)
|
|
||||||
file.write('\tmap_Kd %s\n' % filepath) # Diffuse mapping image
|
|
||||||
del filepath
|
|
||||||
else:
|
|
||||||
# so we write the materials image.
|
|
||||||
face_img = None
|
|
||||||
if mat: # No face image. if we havea material search for MTex image.
|
|
||||||
image_map = {}
|
|
||||||
# backwards so topmost are highest priority
|
|
||||||
for mtex in reversed(mat.texture_slots):
|
|
||||||
if mtex and mtex.texture and mtex.texture.type == 'IMAGE':
|
|
||||||
image = mtex.texture.image
|
|
||||||
if image:
|
|
||||||
# texface overrides others
|
|
||||||
if( mtex.use_map_color_diffuse
|
|
||||||
and (face_img is None)
|
|
||||||
and (mtex.use_map_warp is False)
|
|
||||||
and (mtex.texture_coords != 'REFLECTION')
|
|
||||||
):
|
|
||||||
image_map["map_Kd"] = image
|
|
||||||
if mtex.use_map_ambient:
|
|
||||||
image_map["map_Ka"] = image
|
|
||||||
# this is the Spec intensity channel but Ks stands for specular Color
|
|
||||||
if mtex.use_map_color_spec: # specular color
|
|
||||||
image_map["map_Ks"] = image
|
|
||||||
if mtex.use_map_hardness: # specular hardness/glossiness
|
|
||||||
image_map["map_Ns"] = image
|
|
||||||
if mtex.use_map_alpha:
|
|
||||||
image_map["map_d"] = image
|
|
||||||
if mtex.use_map_translucency:
|
|
||||||
image_map["map_Tr"] = image
|
|
||||||
if mtex.use_map_normal and (mtex.texture.use_normal_map is True):
|
|
||||||
image_map["map_Bump"] = image
|
|
||||||
if mtex.use_map_normal and (mtex.texture.use_normal_map is False):
|
|
||||||
image_map["map_Disp"] = image
|
|
||||||
if mtex.use_map_color_diffuse and (mtex.texture_coords == 'REFLECTION'):
|
|
||||||
image_map["map_refl"] = image
|
|
||||||
if mtex.use_map_emit:
|
|
||||||
image_map["map_Ke"] = image
|
|
||||||
for key, image in image_map.items():
|
|
||||||
filepath = bpy_extras.io_utils.path_reference(image.filepath,
|
|
||||||
source_dir,
|
|
||||||
dest_dir,
|
|
||||||
path_mode,
|
|
||||||
"",
|
|
||||||
copy_set,
|
|
||||||
image.library)
|
|
||||||
file.write('\t%s %s\n' % (key, repr(filepath)[1:-1]))
|
|
||||||
|
|
||||||
def veckey3d(v):
|
def veckey3d(v):
|
||||||
return round(v.x, 6), round(v.y, 6), round(v.z, 6)
|
return round(v.x, 6), round(v.y, 6), round(v.z, 6)
|
||||||
@ -273,7 +278,7 @@ def veckey2d(v):
|
|||||||
|
|
||||||
def write_mesh(scene, file, object, mtl_dict):
|
def write_mesh(scene, file, object, mtl_dict):
|
||||||
print("**************** '" + str(object.name) + "' *******************")
|
print("**************** '" + str(object.name) + "' *******************")
|
||||||
|
fw = file.write
|
||||||
# Initialize totals, these are updated each object
|
# Initialize totals, these are updated each object
|
||||||
totverts = 1
|
totverts = 1
|
||||||
totuvco = 1
|
totuvco = 1
|
||||||
@ -288,7 +293,7 @@ def write_mesh(scene, file, object, mtl_dict):
|
|||||||
|
|
||||||
if object.type != 'MESH':
|
if object.type != 'MESH':
|
||||||
print(object.name + 'is not a mesh type - ignoring type=' + object.type)
|
print(object.name + 'is not a mesh type - ignoring type=' + object.type)
|
||||||
file.write('# can not export:"%s":type="%s"\n' % (object.name, str(object.type)))
|
fw('# can not export:"%s":type="%s"\n' % (object.name, str(object.type)))
|
||||||
return
|
return
|
||||||
#print("name:'%s'" % object.name)
|
#print("name:'%s'" % object.name)
|
||||||
#for plop in object.child:
|
#for plop in object.child:
|
||||||
@ -319,16 +324,16 @@ def write_mesh(scene, file, object, mtl_dict):
|
|||||||
if me is None:
|
if me is None:
|
||||||
continue
|
continue
|
||||||
me.transform(ob_mat)
|
me.transform(ob_mat)
|
||||||
#print("ploppp:" + str(ob_mat) )
|
print("ploppp:" + str(ob_mat) )
|
||||||
# _must_ do this first since it re-allocs arrays
|
# _must_ do this first since it re-allocs arrays
|
||||||
# triangulate all the mesh:
|
# triangulate all the mesh:
|
||||||
mesh_triangulate(me)
|
mesh_triangulate(me)
|
||||||
# calculated normals:
|
# calculated normals:
|
||||||
me.calc_normals()
|
me.calc_normals()
|
||||||
# export UV mapping:
|
# export UV mapping:
|
||||||
faceuv = len(me.uv_textures) > 0
|
faceuv = len(me.uv_layers) > 0
|
||||||
if faceuv:
|
if faceuv:
|
||||||
uv_texture = me.uv_textures.active.data[:]
|
uv_texture = me.uv_layers.active.data[:]
|
||||||
uv_layer = me.uv_layers.active.data[:]
|
uv_layer = me.uv_layers.active.data[:]
|
||||||
me_verts = me.vertices[:]
|
me_verts = me.vertices[:]
|
||||||
# Make our own list so it can be sorted to reduce context switching
|
# Make our own list so it can be sorted to reduce context switching
|
||||||
@ -365,19 +370,19 @@ def write_mesh(scene, file, object, mtl_dict):
|
|||||||
obnamestring = name_compat(name1)
|
obnamestring = name_compat(name1)
|
||||||
else:
|
else:
|
||||||
obnamestring = '%s_%s' % (name_compat(name1), name_compat(name2))
|
obnamestring = '%s_%s' % (name_compat(name1), name_compat(name2))
|
||||||
file.write('Mesh:%s\n' % obnamestring) # Write Object name
|
fw('Mesh:%s\n' % obnamestring) # Write Object name
|
||||||
###########################################################
|
###########################################################
|
||||||
## Vert
|
## Vert
|
||||||
###########################################################
|
###########################################################
|
||||||
file.write('\tVertex:%d\n\t\t' % len(me_verts))
|
fw('\tVertex:%d\n\t\t' % len(me_verts))
|
||||||
for v in me_verts:
|
for v in me_verts:
|
||||||
file.write('%.6f %.6f %.6f|' % v.co[:])
|
fw('%.6f %.6f %.6f|' % v.co[:])
|
||||||
file.write('\n')
|
fw('\n')
|
||||||
###########################################################
|
###########################################################
|
||||||
## UV
|
## UV
|
||||||
###########################################################
|
###########################################################
|
||||||
if faceuv:
|
if faceuv:
|
||||||
file.write('\tUV-mapping:\n\t\t')
|
fw('\tUV-mapping:\n\t\t')
|
||||||
# in case removing some of these dont get defined.
|
# in case removing some of these dont get defined.
|
||||||
uv = uvkey = uv_dict = f_index = uv_index = None
|
uv = uvkey = uv_dict = f_index = uv_index = None
|
||||||
uv_face_mapping = [None] * len(face_index_pairs)
|
uv_face_mapping = [None] * len(face_index_pairs)
|
||||||
@ -391,12 +396,12 @@ def write_mesh(scene, file, object, mtl_dict):
|
|||||||
uv_k = uv_dict[uvkey]
|
uv_k = uv_dict[uvkey]
|
||||||
except:
|
except:
|
||||||
uv_k = uv_dict[uvkey] = len(uv_dict)
|
uv_k = uv_dict[uvkey] = len(uv_dict)
|
||||||
file.write('%.6f %.6f|' % uv[:])
|
fw('%.6f %.6f|' % uv[:])
|
||||||
uv_ls.append(uv_k)
|
uv_ls.append(uv_k)
|
||||||
uv_unique_count = len(uv_dict)
|
uv_unique_count = len(uv_dict)
|
||||||
del uv, uvkey, uv_dict, f_index, uv_index, uv_ls, uv_k
|
del uv, uvkey, uv_dict, f_index, uv_index, uv_ls, uv_k
|
||||||
# Only need uv_unique_count and uv_face_mapping
|
# Only need uv_unique_count and uv_face_mapping
|
||||||
file.write('\n')
|
fw('\n')
|
||||||
else:
|
else:
|
||||||
print("does not use UV-MAPPING")
|
print("does not use UV-MAPPING")
|
||||||
###########################################################
|
###########################################################
|
||||||
@ -409,7 +414,7 @@ def write_mesh(scene, file, object, mtl_dict):
|
|||||||
localIsSmooth = 'face'
|
localIsSmooth = 'face'
|
||||||
else:
|
else:
|
||||||
localIsSmooth = 'face'
|
localIsSmooth = 'face'
|
||||||
file.write('\tNormal(%s):%d\n\t\t' % (localIsSmooth, len(face_index_pairs)) )
|
fw('\tNormal(%s):%d\n\t\t' % (localIsSmooth, len(face_index_pairs)) )
|
||||||
for f, f_index in face_index_pairs:
|
for f, f_index in face_index_pairs:
|
||||||
if f.use_smooth:
|
if f.use_smooth:
|
||||||
for v_idx in f.vertices:
|
for v_idx in f.vertices:
|
||||||
@ -418,22 +423,22 @@ def write_mesh(scene, file, object, mtl_dict):
|
|||||||
if noKey not in globalNormals:
|
if noKey not in globalNormals:
|
||||||
globalNormals[noKey] = totno
|
globalNormals[noKey] = totno
|
||||||
totno += 1
|
totno += 1
|
||||||
file.write('%.6f %.6f %.6f|' % noKey)
|
fw('%.6f %.6f %.6f|' % noKey)
|
||||||
else:
|
else:
|
||||||
# Hard, 1 normal from the face.
|
# Hard, 1 normal from the face.
|
||||||
noKey = veckey3d(f.normal)
|
noKey = veckey3d(f.normal)
|
||||||
if noKey not in globalNormals:
|
if noKey not in globalNormals:
|
||||||
globalNormals[noKey] = totno
|
globalNormals[noKey] = totno
|
||||||
totno += 1
|
totno += 1
|
||||||
file.write('%.6f %.6f %.6f|' % noKey)
|
fw('%.6f %.6f %.6f|' % noKey)
|
||||||
|
|
||||||
file.write('\n')
|
fw('\n')
|
||||||
if not faceuv:
|
if not faceuv:
|
||||||
f_image = None
|
f_image = None
|
||||||
###########################################################
|
###########################################################
|
||||||
## faces
|
## faces
|
||||||
###########################################################
|
###########################################################
|
||||||
file.write('\tFace:%d' % len(face_index_pairs))
|
fw('\tFace:%d' % len(face_index_pairs))
|
||||||
for f, f_index in face_index_pairs:
|
for f, f_index in face_index_pairs:
|
||||||
f_smooth = f.use_smooth
|
f_smooth = f.use_smooth
|
||||||
f_mat = min(f.material_index, len(materials) - 1)
|
f_mat = min(f.material_index, len(materials) - 1)
|
||||||
@ -451,7 +456,7 @@ def write_mesh(scene, file, object, mtl_dict):
|
|||||||
else:
|
else:
|
||||||
if key[0] is None and key[1] is None:
|
if key[0] is None and key[1] is None:
|
||||||
# inform the use of a material:
|
# inform the use of a material:
|
||||||
file.write("\n\t\t---:") # mat, image
|
fw("\n\t\t---:") # mat, image
|
||||||
else:
|
else:
|
||||||
mat_data = mtl_dict.get(key)
|
mat_data = mtl_dict.get(key)
|
||||||
if not mat_data:
|
if not mat_data:
|
||||||
@ -476,14 +481,14 @@ def write_mesh(scene, file, object, mtl_dict):
|
|||||||
mat_data = mtl_dict[key] = mtl_name, materials[f_mat], f_image
|
mat_data = mtl_dict[key] = mtl_name, materials[f_mat], f_image
|
||||||
mtl_rev_dict[mtl_name] = key
|
mtl_rev_dict[mtl_name] = key
|
||||||
# set the use of a material:
|
# set the use of a material:
|
||||||
file.write("\n\t\t%s\n\t\t\t" % mat_data[0]) # can be mat_image or (null)
|
fw("\n\t\t%s\n\t\t\t" % mat_data[0]) # can be mat_image or (null)
|
||||||
contextMat = key
|
contextMat = key
|
||||||
f_v = [(vi, me_verts[v_idx]) for vi, v_idx in enumerate(f.vertices)]
|
f_v = [(vi, me_verts[v_idx]) for vi, v_idx in enumerate(f.vertices)]
|
||||||
if faceuv:
|
if faceuv:
|
||||||
# export the normals:
|
# export the normals:
|
||||||
if f_smooth: # Smoothed, use vertex normals
|
if f_smooth: # Smoothed, use vertex normals
|
||||||
for vi, v in f_v:
|
for vi, v in f_v:
|
||||||
file.write(" %d/%d/%d" %
|
fw(" %d/%d/%d" %
|
||||||
(v.index + totverts-1,
|
(v.index + totverts-1,
|
||||||
totuvco + uv_face_mapping[f_index][vi]-1,
|
totuvco + uv_face_mapping[f_index][vi]-1,
|
||||||
globalNormals[veckey3d(v.normal)]-1,
|
globalNormals[veckey3d(v.normal)]-1,
|
||||||
@ -491,7 +496,7 @@ def write_mesh(scene, file, object, mtl_dict):
|
|||||||
else: # No smoothing, face normals
|
else: # No smoothing, face normals
|
||||||
no = globalNormals[veckey3d(f.normal)]
|
no = globalNormals[veckey3d(f.normal)]
|
||||||
for vi, v in f_v:
|
for vi, v in f_v:
|
||||||
file.write(" %d/%d/%d" %
|
fw(" %d/%d/%d" %
|
||||||
(v.index + totverts-1,
|
(v.index + totverts-1,
|
||||||
totuvco + uv_face_mapping[f_index][vi]-1,
|
totuvco + uv_face_mapping[f_index][vi]-1,
|
||||||
no-1,
|
no-1,
|
||||||
@ -501,31 +506,31 @@ def write_mesh(scene, file, object, mtl_dict):
|
|||||||
# export the normals:
|
# export the normals:
|
||||||
if f_smooth: # Smoothed, use vertex normals
|
if f_smooth: # Smoothed, use vertex normals
|
||||||
for vi, v in f_v:
|
for vi, v in f_v:
|
||||||
file.write(" %d/%d" % (
|
fw(" %d/%d" % (
|
||||||
v.index + totverts-1,
|
v.index + totverts-1,
|
||||||
globalNormals[veckey3d(v.normal)]-1,
|
globalNormals[veckey3d(v.normal)]-1,
|
||||||
))
|
))
|
||||||
else: # No smoothing, face normals
|
else: # No smoothing, face normals
|
||||||
no = globalNormals[veckey3d(f.normal)]
|
no = globalNormals[veckey3d(f.normal)]
|
||||||
for vi, v in f_v:
|
for vi, v in f_v:
|
||||||
file.write(" %d/%d" % (v.index + totverts-1, no-1))
|
fw(" %d/%d" % (v.index + totverts-1, no-1))
|
||||||
file.write('|')
|
fw('|')
|
||||||
file.write('\n')
|
fw('\n')
|
||||||
# Write edges. ==> did not know what it is ...
|
# Write edges. ==> did not know what it is ...
|
||||||
#file.write('Faces:%d' % len(edges))
|
#fw('Faces:%d' % len(edges))
|
||||||
#for ed in edges:
|
#for ed in edges:
|
||||||
# if ed.is_loose:
|
# if ed.is_loose:
|
||||||
# file.write('%d %d\n' % (ed.vertices[0] + totverts, ed.vertices[1] + totverts))
|
# fw('%d %d\n' % (ed.vertices[0] + totverts, ed.vertices[1] + totverts))
|
||||||
|
|
||||||
# Make the indices global rather then per mesh
|
# Make the indices global rather then per mesh
|
||||||
totverts += len(me_verts)
|
totverts += len(me_verts)
|
||||||
if faceuv:
|
if faceuv:
|
||||||
totuvco += uv_unique_count
|
totuvco += uv_unique_count
|
||||||
# clean up
|
# clean up
|
||||||
bpy.data.meshes.remove(me)
|
# TODO: bpy.data. .remove(me)
|
||||||
|
|
||||||
if object.dupli_type != 'NONE':
|
# TODO: if object.dupli_type != 'NONE':
|
||||||
object.dupli_list_clear()
|
# TODO: object.dupli_list_clear()
|
||||||
#####################################################################
|
#####################################################################
|
||||||
## Save collision shapes (for one object):
|
## Save collision shapes (for one object):
|
||||||
#####################################################################
|
#####################################################################
|
||||||
@ -554,10 +559,10 @@ def write_file(filepath,
|
|||||||
mtlfilepath = os.path.splitext(filepath)[0] + ".mtl"
|
mtlfilepath = os.path.splitext(filepath)[0] + ".mtl"
|
||||||
|
|
||||||
file = open(filepath, "w", encoding="utf8", newline="\n")
|
file = open(filepath, "w", encoding="utf8", newline="\n")
|
||||||
|
fw = file.write
|
||||||
# Write Header
|
# Write Header
|
||||||
file.write('EMF(STRING)\n') # if binary:file.write('EMF(BINARY)\n')
|
fw('EMF(STRING)\n') # if binary:fw('EMF(BINARY)\n')
|
||||||
file.write('# Blender v%s EMF File: %r\n' % (bpy.app.version_string, os.path.basename(bpy.data.filepath)))
|
fw('# Blender v%s EMF File: %r\n' % (bpy.app.version_string, os.path.basename(bpy.data.filepath)))
|
||||||
|
|
||||||
# A Dict of Materials
|
# A Dict of Materials
|
||||||
# (material.name, image.name):matname_imagename # matname_imagename has gaps removed.
|
# (material.name, image.name):matname_imagename # matname_imagename has gaps removed.
|
||||||
|
4
blender/io_scene_emf/readme.md
Normal file
4
blender/io_scene_emf/readme.md
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
|
||||||
|
To export manyally a emf... (for test ...)
|
||||||
|
|
||||||
|
blender --background -P ./exportEmf.py
|
514
blender/io_scene_obj/__init__.py
Normal file
514
blender/io_scene_obj/__init__.py
Normal file
@ -0,0 +1,514 @@
|
|||||||
|
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or
|
||||||
|
# modify it under the terms of the GNU General Public License
|
||||||
|
# as published by the Free Software Foundation; either version 2
|
||||||
|
# of the License, or (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program; if not, write to the Free Software Foundation,
|
||||||
|
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||||
|
#
|
||||||
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
# <pep8-80 compliant>
|
||||||
|
|
||||||
|
bl_info = {
|
||||||
|
"name": "Wavefront OBJ format",
|
||||||
|
"author": "Campbell Barton, Bastien Montagne",
|
||||||
|
"version": (3, 8, 1),
|
||||||
|
"blender": (2, 81, 6),
|
||||||
|
"location": "File > Import-Export",
|
||||||
|
"description": "Import-Export OBJ, Import OBJ mesh, UV's, materials and textures",
|
||||||
|
"warning": "",
|
||||||
|
"doc_url": "{BLENDER_MANUAL_URL}/addons/import_export/scene_obj.html",
|
||||||
|
"support": 'OFFICIAL',
|
||||||
|
"category": "Import-Export",
|
||||||
|
}
|
||||||
|
|
||||||
|
if "bpy" in locals():
|
||||||
|
import importlib
|
||||||
|
if "import_obj" in locals():
|
||||||
|
importlib.reload(import_obj)
|
||||||
|
if "export_obj" in locals():
|
||||||
|
importlib.reload(export_obj)
|
||||||
|
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
from bpy.props import (
|
||||||
|
BoolProperty,
|
||||||
|
FloatProperty,
|
||||||
|
StringProperty,
|
||||||
|
EnumProperty,
|
||||||
|
)
|
||||||
|
from bpy_extras.io_utils import (
|
||||||
|
ImportHelper,
|
||||||
|
ExportHelper,
|
||||||
|
orientation_helper,
|
||||||
|
path_reference_mode,
|
||||||
|
axis_conversion,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@orientation_helper(axis_forward='-Z', axis_up='Y')
|
||||||
|
class ImportOBJ(bpy.types.Operator, ImportHelper):
|
||||||
|
"""Load a Wavefront OBJ File"""
|
||||||
|
bl_idname = "import_scene.obj"
|
||||||
|
bl_label = "Import OBJ"
|
||||||
|
bl_options = {'PRESET', 'UNDO'}
|
||||||
|
|
||||||
|
filename_ext = ".obj"
|
||||||
|
filter_glob: StringProperty(
|
||||||
|
default="*.obj;*.mtl",
|
||||||
|
options={'HIDDEN'},
|
||||||
|
)
|
||||||
|
|
||||||
|
use_edges: BoolProperty(
|
||||||
|
name="Lines",
|
||||||
|
description="Import lines and faces with 2 verts as edge",
|
||||||
|
default=True,
|
||||||
|
)
|
||||||
|
use_smooth_groups: BoolProperty(
|
||||||
|
name="Smooth Groups",
|
||||||
|
description="Surround smooth groups by sharp edges",
|
||||||
|
default=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
use_split_objects: BoolProperty(
|
||||||
|
name="Object",
|
||||||
|
description="Import OBJ Objects into Blender Objects",
|
||||||
|
default=True,
|
||||||
|
)
|
||||||
|
use_split_groups: BoolProperty(
|
||||||
|
name="Group",
|
||||||
|
description="Import OBJ Groups into Blender Objects",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
use_groups_as_vgroups: BoolProperty(
|
||||||
|
name="Poly Groups",
|
||||||
|
description="Import OBJ groups as vertex groups",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
use_image_search: BoolProperty(
|
||||||
|
name="Image Search",
|
||||||
|
description="Search subdirs for any associated images "
|
||||||
|
"(Warning, may be slow)",
|
||||||
|
default=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
split_mode: EnumProperty(
|
||||||
|
name="Split",
|
||||||
|
items=(('ON', "Split", "Split geometry, omits unused verts"),
|
||||||
|
('OFF', "Keep Vert Order", "Keep vertex order from file"),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
global_clamp_size: FloatProperty(
|
||||||
|
name="Clamp Size",
|
||||||
|
description="Clamp bounds under this value (zero to disable)",
|
||||||
|
min=0.0, max=1000.0,
|
||||||
|
soft_min=0.0, soft_max=1000.0,
|
||||||
|
default=0.0,
|
||||||
|
)
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
# print("Selected: " + context.active_object.name)
|
||||||
|
from . import import_obj
|
||||||
|
|
||||||
|
if self.split_mode == 'OFF':
|
||||||
|
self.use_split_objects = False
|
||||||
|
self.use_split_groups = False
|
||||||
|
else:
|
||||||
|
self.use_groups_as_vgroups = False
|
||||||
|
|
||||||
|
keywords = self.as_keywords(ignore=("axis_forward",
|
||||||
|
"axis_up",
|
||||||
|
"filter_glob",
|
||||||
|
"split_mode",
|
||||||
|
))
|
||||||
|
|
||||||
|
global_matrix = axis_conversion(from_forward=self.axis_forward,
|
||||||
|
from_up=self.axis_up,
|
||||||
|
).to_4x4()
|
||||||
|
keywords["global_matrix"] = global_matrix
|
||||||
|
|
||||||
|
if bpy.data.is_saved and context.preferences.filepaths.use_relative_paths:
|
||||||
|
import os
|
||||||
|
keywords["relpath"] = os.path.dirname(bpy.data.filepath)
|
||||||
|
|
||||||
|
return import_obj.load(context, **keywords)
|
||||||
|
|
||||||
|
def draw(self, context):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class OBJ_PT_import_include(bpy.types.Panel):
|
||||||
|
bl_space_type = 'FILE_BROWSER'
|
||||||
|
bl_region_type = 'TOOL_PROPS'
|
||||||
|
bl_label = "Include"
|
||||||
|
bl_parent_id = "FILE_PT_operator"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
sfile = context.space_data
|
||||||
|
operator = sfile.active_operator
|
||||||
|
|
||||||
|
return operator.bl_idname == "IMPORT_SCENE_OT_obj"
|
||||||
|
|
||||||
|
def draw(self, context):
|
||||||
|
layout = self.layout
|
||||||
|
layout.use_property_split = True
|
||||||
|
layout.use_property_decorate = False # No animation.
|
||||||
|
|
||||||
|
sfile = context.space_data
|
||||||
|
operator = sfile.active_operator
|
||||||
|
|
||||||
|
layout.prop(operator, 'use_image_search')
|
||||||
|
layout.prop(operator, 'use_smooth_groups')
|
||||||
|
layout.prop(operator, 'use_edges')
|
||||||
|
|
||||||
|
|
||||||
|
class OBJ_PT_import_transform(bpy.types.Panel):
|
||||||
|
bl_space_type = 'FILE_BROWSER'
|
||||||
|
bl_region_type = 'TOOL_PROPS'
|
||||||
|
bl_label = "Transform"
|
||||||
|
bl_parent_id = "FILE_PT_operator"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
sfile = context.space_data
|
||||||
|
operator = sfile.active_operator
|
||||||
|
|
||||||
|
return operator.bl_idname == "IMPORT_SCENE_OT_obj"
|
||||||
|
|
||||||
|
def draw(self, context):
|
||||||
|
layout = self.layout
|
||||||
|
layout.use_property_split = True
|
||||||
|
layout.use_property_decorate = False # No animation.
|
||||||
|
|
||||||
|
sfile = context.space_data
|
||||||
|
operator = sfile.active_operator
|
||||||
|
|
||||||
|
layout.prop(operator, "global_clamp_size")
|
||||||
|
layout.prop(operator, "axis_forward")
|
||||||
|
layout.prop(operator, "axis_up")
|
||||||
|
|
||||||
|
|
||||||
|
class OBJ_PT_import_geometry(bpy.types.Panel):
|
||||||
|
bl_space_type = 'FILE_BROWSER'
|
||||||
|
bl_region_type = 'TOOL_PROPS'
|
||||||
|
bl_label = "Geometry"
|
||||||
|
bl_parent_id = "FILE_PT_operator"
|
||||||
|
bl_options = {'DEFAULT_CLOSED'}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
sfile = context.space_data
|
||||||
|
operator = sfile.active_operator
|
||||||
|
|
||||||
|
return operator.bl_idname == "IMPORT_SCENE_OT_obj"
|
||||||
|
|
||||||
|
def draw(self, context):
|
||||||
|
layout = self.layout
|
||||||
|
|
||||||
|
sfile = context.space_data
|
||||||
|
operator = sfile.active_operator
|
||||||
|
|
||||||
|
layout.row().prop(operator, "split_mode", expand=True)
|
||||||
|
|
||||||
|
layout.use_property_split = True
|
||||||
|
layout.use_property_decorate = False # No animation.
|
||||||
|
|
||||||
|
col = layout.column()
|
||||||
|
if operator.split_mode == 'ON':
|
||||||
|
col.prop(operator, "use_split_objects", text="Split by Object")
|
||||||
|
col.prop(operator, "use_split_groups", text="Split by Group")
|
||||||
|
else:
|
||||||
|
col.prop(operator, "use_groups_as_vgroups")
|
||||||
|
|
||||||
|
|
||||||
|
@orientation_helper(axis_forward='-Z', axis_up='Y')
|
||||||
|
class ExportOBJ(bpy.types.Operator, ExportHelper):
|
||||||
|
"""Save a Wavefront OBJ File"""
|
||||||
|
|
||||||
|
bl_idname = "export_scene.obj"
|
||||||
|
bl_label = 'Export OBJ'
|
||||||
|
bl_options = {'PRESET'}
|
||||||
|
|
||||||
|
filename_ext = ".obj"
|
||||||
|
filter_glob: StringProperty(
|
||||||
|
default="*.obj;*.mtl",
|
||||||
|
options={'HIDDEN'},
|
||||||
|
)
|
||||||
|
|
||||||
|
# context group
|
||||||
|
use_selection: BoolProperty(
|
||||||
|
name="Selection Only",
|
||||||
|
description="Export selected objects only",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
use_animation: BoolProperty(
|
||||||
|
name="Animation",
|
||||||
|
description="Write out an OBJ for each frame",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
# object group
|
||||||
|
use_mesh_modifiers: BoolProperty(
|
||||||
|
name="Apply Modifiers",
|
||||||
|
description="Apply modifiers",
|
||||||
|
default=True,
|
||||||
|
)
|
||||||
|
# Non working in Blender 2.8 currently.
|
||||||
|
# ~ use_mesh_modifiers_render: BoolProperty(
|
||||||
|
# ~ name="Use Modifiers Render Settings",
|
||||||
|
# ~ description="Use render settings when applying modifiers to mesh objects",
|
||||||
|
# ~ default=False,
|
||||||
|
# ~ )
|
||||||
|
|
||||||
|
# extra data group
|
||||||
|
use_edges: BoolProperty(
|
||||||
|
name="Include Edges",
|
||||||
|
description="",
|
||||||
|
default=True,
|
||||||
|
)
|
||||||
|
use_smooth_groups: BoolProperty(
|
||||||
|
name="Smooth Groups",
|
||||||
|
description="Write sharp edges as smooth groups",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
use_smooth_groups_bitflags: BoolProperty(
|
||||||
|
name="Bitflag Smooth Groups",
|
||||||
|
description="Same as 'Smooth Groups', but generate smooth groups IDs as bitflags "
|
||||||
|
"(produces at most 32 different smooth groups, usually much less)",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
use_normals: BoolProperty(
|
||||||
|
name="Write Normals",
|
||||||
|
description="Export one normal per vertex and per face, to represent flat faces and sharp edges",
|
||||||
|
default=True,
|
||||||
|
)
|
||||||
|
use_uvs: BoolProperty(
|
||||||
|
name="Include UVs",
|
||||||
|
description="Write out the active UV coordinates",
|
||||||
|
default=True,
|
||||||
|
)
|
||||||
|
use_materials: BoolProperty(
|
||||||
|
name="Write Materials",
|
||||||
|
description="Write out the MTL file",
|
||||||
|
default=True,
|
||||||
|
)
|
||||||
|
use_triangles: BoolProperty(
|
||||||
|
name="Triangulate Faces",
|
||||||
|
description="Convert all faces to triangles",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
use_nurbs: BoolProperty(
|
||||||
|
name="Write Nurbs",
|
||||||
|
description="Write nurbs curves as OBJ nurbs rather than "
|
||||||
|
"converting to geometry",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
use_vertex_groups: BoolProperty(
|
||||||
|
name="Polygroups",
|
||||||
|
description="",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
# grouping group
|
||||||
|
use_blen_objects: BoolProperty(
|
||||||
|
name="OBJ Objects",
|
||||||
|
description="Export Blender objects as OBJ objects",
|
||||||
|
default=True,
|
||||||
|
)
|
||||||
|
group_by_object: BoolProperty(
|
||||||
|
name="OBJ Groups",
|
||||||
|
description="Export Blender objects as OBJ groups",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
group_by_material: BoolProperty(
|
||||||
|
name="Material Groups",
|
||||||
|
description="Generate an OBJ group for each part of a geometry using a different material",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
keep_vertex_order: BoolProperty(
|
||||||
|
name="Keep Vertex Order",
|
||||||
|
description="",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
global_scale: FloatProperty(
|
||||||
|
name="Scale",
|
||||||
|
min=0.01, max=1000.0,
|
||||||
|
default=1.0,
|
||||||
|
)
|
||||||
|
|
||||||
|
path_mode: path_reference_mode
|
||||||
|
|
||||||
|
check_extension = True
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
from . import export_obj
|
||||||
|
|
||||||
|
from mathutils import Matrix
|
||||||
|
keywords = self.as_keywords(ignore=("axis_forward",
|
||||||
|
"axis_up",
|
||||||
|
"global_scale",
|
||||||
|
"check_existing",
|
||||||
|
"filter_glob",
|
||||||
|
))
|
||||||
|
|
||||||
|
global_matrix = (Matrix.Scale(self.global_scale, 4) @
|
||||||
|
axis_conversion(to_forward=self.axis_forward,
|
||||||
|
to_up=self.axis_up,
|
||||||
|
).to_4x4())
|
||||||
|
|
||||||
|
keywords["global_matrix"] = global_matrix
|
||||||
|
return export_obj.save(context, **keywords)
|
||||||
|
|
||||||
|
def draw(self, context):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class OBJ_PT_export_include(bpy.types.Panel):
|
||||||
|
bl_space_type = 'FILE_BROWSER'
|
||||||
|
bl_region_type = 'TOOL_PROPS'
|
||||||
|
bl_label = "Include"
|
||||||
|
bl_parent_id = "FILE_PT_operator"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
sfile = context.space_data
|
||||||
|
operator = sfile.active_operator
|
||||||
|
|
||||||
|
return operator.bl_idname == "EXPORT_SCENE_OT_obj"
|
||||||
|
|
||||||
|
def draw(self, context):
|
||||||
|
layout = self.layout
|
||||||
|
layout.use_property_split = True
|
||||||
|
layout.use_property_decorate = False # No animation.
|
||||||
|
|
||||||
|
sfile = context.space_data
|
||||||
|
operator = sfile.active_operator
|
||||||
|
|
||||||
|
col = layout.column(heading="Limit to")
|
||||||
|
col.prop(operator, 'use_selection')
|
||||||
|
|
||||||
|
col = layout.column(heading="Objects as", align=True)
|
||||||
|
col.prop(operator, 'use_blen_objects')
|
||||||
|
col.prop(operator, 'group_by_object')
|
||||||
|
col.prop(operator, 'group_by_material')
|
||||||
|
|
||||||
|
layout.separator()
|
||||||
|
|
||||||
|
layout.prop(operator, 'use_animation')
|
||||||
|
|
||||||
|
|
||||||
|
class OBJ_PT_export_transform(bpy.types.Panel):
|
||||||
|
bl_space_type = 'FILE_BROWSER'
|
||||||
|
bl_region_type = 'TOOL_PROPS'
|
||||||
|
bl_label = "Transform"
|
||||||
|
bl_parent_id = "FILE_PT_operator"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
sfile = context.space_data
|
||||||
|
operator = sfile.active_operator
|
||||||
|
|
||||||
|
return operator.bl_idname == "EXPORT_SCENE_OT_obj"
|
||||||
|
|
||||||
|
def draw(self, context):
|
||||||
|
layout = self.layout
|
||||||
|
layout.use_property_split = True
|
||||||
|
layout.use_property_decorate = False # No animation.
|
||||||
|
|
||||||
|
sfile = context.space_data
|
||||||
|
operator = sfile.active_operator
|
||||||
|
|
||||||
|
layout.prop(operator, 'global_scale')
|
||||||
|
layout.prop(operator, 'path_mode')
|
||||||
|
layout.prop(operator, 'axis_forward')
|
||||||
|
layout.prop(operator, 'axis_up')
|
||||||
|
|
||||||
|
|
||||||
|
class OBJ_PT_export_geometry(bpy.types.Panel):
|
||||||
|
bl_space_type = 'FILE_BROWSER'
|
||||||
|
bl_region_type = 'TOOL_PROPS'
|
||||||
|
bl_label = "Geometry"
|
||||||
|
bl_parent_id = "FILE_PT_operator"
|
||||||
|
bl_options = {'DEFAULT_CLOSED'}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
sfile = context.space_data
|
||||||
|
operator = sfile.active_operator
|
||||||
|
|
||||||
|
return operator.bl_idname == "EXPORT_SCENE_OT_obj"
|
||||||
|
|
||||||
|
def draw(self, context):
|
||||||
|
layout = self.layout
|
||||||
|
layout.use_property_split = True
|
||||||
|
layout.use_property_decorate = False # No animation.
|
||||||
|
|
||||||
|
sfile = context.space_data
|
||||||
|
operator = sfile.active_operator
|
||||||
|
|
||||||
|
layout.prop(operator, 'use_mesh_modifiers')
|
||||||
|
# Property definition disabled, not working in 2.8 currently.
|
||||||
|
# layout.prop(operator, 'use_mesh_modifiers_render')
|
||||||
|
layout.prop(operator, 'use_smooth_groups')
|
||||||
|
layout.prop(operator, 'use_smooth_groups_bitflags')
|
||||||
|
layout.prop(operator, 'use_normals')
|
||||||
|
layout.prop(operator, 'use_uvs')
|
||||||
|
layout.prop(operator, 'use_materials')
|
||||||
|
layout.prop(operator, 'use_triangles')
|
||||||
|
layout.prop(operator, 'use_nurbs', text="Curves as NURBS")
|
||||||
|
layout.prop(operator, 'use_vertex_groups')
|
||||||
|
layout.prop(operator, 'keep_vertex_order')
|
||||||
|
|
||||||
|
|
||||||
|
def menu_func_import(self, context):
|
||||||
|
self.layout.operator(ImportOBJ.bl_idname, text="Wavefront (.obj)")
|
||||||
|
|
||||||
|
|
||||||
|
def menu_func_export(self, context):
|
||||||
|
self.layout.operator(ExportOBJ.bl_idname, text="Wavefront (.obj)")
|
||||||
|
|
||||||
|
|
||||||
|
classes = (
|
||||||
|
ImportOBJ,
|
||||||
|
OBJ_PT_import_include,
|
||||||
|
OBJ_PT_import_transform,
|
||||||
|
OBJ_PT_import_geometry,
|
||||||
|
ExportOBJ,
|
||||||
|
OBJ_PT_export_include,
|
||||||
|
OBJ_PT_export_transform,
|
||||||
|
OBJ_PT_export_geometry,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def register():
|
||||||
|
for cls in classes:
|
||||||
|
bpy.utils.register_class(cls)
|
||||||
|
|
||||||
|
bpy.types.TOPBAR_MT_file_import.append(menu_func_import)
|
||||||
|
bpy.types.TOPBAR_MT_file_export.append(menu_func_export)
|
||||||
|
|
||||||
|
|
||||||
|
def unregister():
|
||||||
|
bpy.types.TOPBAR_MT_file_import.remove(menu_func_import)
|
||||||
|
bpy.types.TOPBAR_MT_file_export.remove(menu_func_export)
|
||||||
|
|
||||||
|
for cls in classes:
|
||||||
|
bpy.utils.unregister_class(cls)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
register()
|
792
blender/io_scene_obj/export_obj.py
Normal file
792
blender/io_scene_obj/export_obj.py
Normal file
@ -0,0 +1,792 @@
|
|||||||
|
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or
|
||||||
|
# modify it under the terms of the GNU General Public License
|
||||||
|
# as published by the Free Software Foundation; either version 2
|
||||||
|
# of the License, or (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program; if not, write to the Free Software Foundation,
|
||||||
|
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||||
|
#
|
||||||
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
# <pep8 compliant>
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
from mathutils import Matrix, Vector, Color
|
||||||
|
from bpy_extras import io_utils, node_shader_utils
|
||||||
|
|
||||||
|
from bpy_extras.wm_utils.progress_report import (
|
||||||
|
ProgressReport,
|
||||||
|
ProgressReportSubstep,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def name_compat(name):
|
||||||
|
if name is None:
|
||||||
|
return 'None'
|
||||||
|
else:
|
||||||
|
return name.replace(' ', '_')
|
||||||
|
|
||||||
|
|
||||||
|
def mesh_triangulate(me):
|
||||||
|
import bmesh
|
||||||
|
bm = bmesh.new()
|
||||||
|
bm.from_mesh(me)
|
||||||
|
bmesh.ops.triangulate(bm, faces=bm.faces)
|
||||||
|
bm.to_mesh(me)
|
||||||
|
bm.free()
|
||||||
|
|
||||||
|
|
||||||
|
def write_mtl(scene, filepath, path_mode, copy_set, mtl_dict):
|
||||||
|
source_dir = os.path.dirname(bpy.data.filepath)
|
||||||
|
dest_dir = os.path.dirname(filepath)
|
||||||
|
|
||||||
|
with open(filepath, "w", encoding="utf8", newline="\n") as f:
|
||||||
|
fw = f.write
|
||||||
|
|
||||||
|
fw('# Blender MTL File: %r\n' % (os.path.basename(bpy.data.filepath) or "None"))
|
||||||
|
fw('# Material Count: %i\n' % len(mtl_dict))
|
||||||
|
|
||||||
|
mtl_dict_values = list(mtl_dict.values())
|
||||||
|
mtl_dict_values.sort(key=lambda m: m[0])
|
||||||
|
|
||||||
|
# Write material/image combinations we have used.
|
||||||
|
# Using mtl_dict.values() directly gives un-predictable order.
|
||||||
|
for mtl_mat_name, mat in mtl_dict_values:
|
||||||
|
# Get the Blender data for the material and the image.
|
||||||
|
# Having an image named None will make a bug, dont do it :)
|
||||||
|
|
||||||
|
fw('\nnewmtl %s\n' % mtl_mat_name) # Define a new material: matname_imgname
|
||||||
|
|
||||||
|
mat_wrap = node_shader_utils.PrincipledBSDFWrapper(mat) if mat else None
|
||||||
|
|
||||||
|
if mat_wrap:
|
||||||
|
use_mirror = mat_wrap.metallic != 0.0
|
||||||
|
use_transparency = mat_wrap.alpha != 1.0
|
||||||
|
|
||||||
|
# XXX Totally empirical conversion, trying to adapt it
|
||||||
|
# (from 1.0 - 0.0 Principled BSDF range to 0.0 - 900.0 OBJ specular exponent range)...
|
||||||
|
spec = (1.0 - mat_wrap.roughness) * 30
|
||||||
|
spec *= spec
|
||||||
|
fw('Ns %.6f\n' % spec)
|
||||||
|
|
||||||
|
# Ambient
|
||||||
|
if use_mirror:
|
||||||
|
fw('Ka %.6f %.6f %.6f\n' % (mat_wrap.metallic, mat_wrap.metallic, mat_wrap.metallic))
|
||||||
|
else:
|
||||||
|
fw('Ka %.6f %.6f %.6f\n' % (1.0, 1.0, 1.0))
|
||||||
|
fw('Kd %.6f %.6f %.6f\n' % mat_wrap.base_color[:3]) # Diffuse
|
||||||
|
# XXX TODO Find a way to handle tint and diffuse color, in a consistent way with import...
|
||||||
|
fw('Ks %.6f %.6f %.6f\n' % (mat_wrap.specular, mat_wrap.specular, mat_wrap.specular)) # Specular
|
||||||
|
# Emission, not in original MTL standard but seems pretty common, see T45766.
|
||||||
|
emission_strength = mat_wrap.emission_strength
|
||||||
|
emission = [emission_strength * c for c in mat_wrap.emission_color[:3]]
|
||||||
|
fw('Ke %.6f %.6f %.6f\n' % tuple(emission))
|
||||||
|
fw('Ni %.6f\n' % mat_wrap.ior) # Refraction index
|
||||||
|
fw('d %.6f\n' % mat_wrap.alpha) # Alpha (obj uses 'd' for dissolve)
|
||||||
|
|
||||||
|
# See http://en.wikipedia.org/wiki/Wavefront_.obj_file for whole list of values...
|
||||||
|
# Note that mapping is rather fuzzy sometimes, trying to do our best here.
|
||||||
|
if mat_wrap.specular == 0:
|
||||||
|
fw('illum 1\n') # no specular.
|
||||||
|
elif use_mirror:
|
||||||
|
if use_transparency:
|
||||||
|
fw('illum 6\n') # Reflection, Transparency, Ray trace
|
||||||
|
else:
|
||||||
|
fw('illum 3\n') # Reflection and Ray trace
|
||||||
|
elif use_transparency:
|
||||||
|
fw('illum 9\n') # 'Glass' transparency and no Ray trace reflection... fuzzy matching, but...
|
||||||
|
else:
|
||||||
|
fw('illum 2\n') # light normally
|
||||||
|
|
||||||
|
#### And now, the image textures...
|
||||||
|
image_map = {
|
||||||
|
"map_Kd": "base_color_texture",
|
||||||
|
"map_Ka": None, # ambient...
|
||||||
|
"map_Ks": "specular_texture",
|
||||||
|
"map_Ns": "roughness_texture",
|
||||||
|
"map_d": "alpha_texture",
|
||||||
|
"map_Tr": None, # transmission roughness?
|
||||||
|
"map_Bump": "normalmap_texture",
|
||||||
|
"disp": None, # displacement...
|
||||||
|
"refl": "metallic_texture",
|
||||||
|
"map_Ke": "emission_color_texture" if emission_strength != 0.0 else None,
|
||||||
|
}
|
||||||
|
|
||||||
|
for key, mat_wrap_key in sorted(image_map.items()):
|
||||||
|
if mat_wrap_key is None:
|
||||||
|
continue
|
||||||
|
tex_wrap = getattr(mat_wrap, mat_wrap_key, None)
|
||||||
|
if tex_wrap is None:
|
||||||
|
continue
|
||||||
|
image = tex_wrap.image
|
||||||
|
if image is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
filepath = io_utils.path_reference(image.filepath, source_dir, dest_dir,
|
||||||
|
path_mode, "", copy_set, image.library)
|
||||||
|
options = []
|
||||||
|
if key == "map_Bump":
|
||||||
|
if mat_wrap.normalmap_strength != 1.0:
|
||||||
|
options.append('-bm %.6f' % mat_wrap.normalmap_strength)
|
||||||
|
if tex_wrap.translation != Vector((0.0, 0.0, 0.0)):
|
||||||
|
options.append('-o %.6f %.6f %.6f' % tex_wrap.translation[:])
|
||||||
|
if tex_wrap.scale != Vector((1.0, 1.0, 1.0)):
|
||||||
|
options.append('-s %.6f %.6f %.6f' % tex_wrap.scale[:])
|
||||||
|
if options:
|
||||||
|
fw('%s %s %s\n' % (key, " ".join(options), repr(filepath)[1:-1]))
|
||||||
|
else:
|
||||||
|
fw('%s %s\n' % (key, repr(filepath)[1:-1]))
|
||||||
|
|
||||||
|
else:
|
||||||
|
# Write a dummy material here?
|
||||||
|
fw('Ns 500\n')
|
||||||
|
fw('Ka 0.8 0.8 0.8\n')
|
||||||
|
fw('Kd 0.8 0.8 0.8\n')
|
||||||
|
fw('Ks 0.8 0.8 0.8\n')
|
||||||
|
fw('d 1\n') # No alpha
|
||||||
|
fw('illum 2\n') # light normally
|
||||||
|
|
||||||
|
|
||||||
|
def test_nurbs_compat(ob):
|
||||||
|
if ob.type != 'CURVE':
|
||||||
|
return False
|
||||||
|
|
||||||
|
for nu in ob.data.splines:
|
||||||
|
if nu.point_count_v == 1 and nu.type != 'BEZIER': # not a surface and not bezier
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def write_nurb(fw, ob, ob_mat):
|
||||||
|
tot_verts = 0
|
||||||
|
cu = ob.data
|
||||||
|
|
||||||
|
# use negative indices
|
||||||
|
for nu in cu.splines:
|
||||||
|
if nu.type == 'POLY':
|
||||||
|
DEG_ORDER_U = 1
|
||||||
|
else:
|
||||||
|
DEG_ORDER_U = nu.order_u - 1 # odd but tested to be correct
|
||||||
|
|
||||||
|
if nu.type == 'BEZIER':
|
||||||
|
print("\tWarning, bezier curve:", ob.name, "only poly and nurbs curves supported")
|
||||||
|
continue
|
||||||
|
|
||||||
|
if nu.point_count_v > 1:
|
||||||
|
print("\tWarning, surface:", ob.name, "only poly and nurbs curves supported")
|
||||||
|
continue
|
||||||
|
|
||||||
|
if len(nu.points) <= DEG_ORDER_U:
|
||||||
|
print("\tWarning, order_u is lower then vert count, skipping:", ob.name)
|
||||||
|
continue
|
||||||
|
|
||||||
|
pt_num = 0
|
||||||
|
do_closed = nu.use_cyclic_u
|
||||||
|
do_endpoints = (do_closed == 0) and nu.use_endpoint_u
|
||||||
|
|
||||||
|
for pt in nu.points:
|
||||||
|
fw('v %.6f %.6f %.6f\n' % (ob_mat @ pt.co.to_3d())[:])
|
||||||
|
pt_num += 1
|
||||||
|
tot_verts += pt_num
|
||||||
|
|
||||||
|
fw('g %s\n' % (name_compat(ob.name))) # name_compat(ob.getData(1)) could use the data name too
|
||||||
|
fw('cstype bspline\n') # not ideal, hard coded
|
||||||
|
fw('deg %d\n' % DEG_ORDER_U) # not used for curves but most files have it still
|
||||||
|
|
||||||
|
curve_ls = [-(i + 1) for i in range(pt_num)]
|
||||||
|
|
||||||
|
# 'curv' keyword
|
||||||
|
if do_closed:
|
||||||
|
if DEG_ORDER_U == 1:
|
||||||
|
pt_num += 1
|
||||||
|
curve_ls.append(-1)
|
||||||
|
else:
|
||||||
|
pt_num += DEG_ORDER_U
|
||||||
|
curve_ls = curve_ls + curve_ls[0:DEG_ORDER_U]
|
||||||
|
|
||||||
|
fw('curv 0.0 1.0 %s\n' % (" ".join([str(i) for i in curve_ls]))) # Blender has no U and V values for the curve
|
||||||
|
|
||||||
|
# 'parm' keyword
|
||||||
|
tot_parm = (DEG_ORDER_U + 1) + pt_num
|
||||||
|
tot_parm_div = float(tot_parm - 1)
|
||||||
|
parm_ls = [(i / tot_parm_div) for i in range(tot_parm)]
|
||||||
|
|
||||||
|
if do_endpoints: # end points, force param
|
||||||
|
for i in range(DEG_ORDER_U + 1):
|
||||||
|
parm_ls[i] = 0.0
|
||||||
|
parm_ls[-(1 + i)] = 1.0
|
||||||
|
|
||||||
|
fw("parm u %s\n" % " ".join(["%.6f" % i for i in parm_ls]))
|
||||||
|
|
||||||
|
fw('end\n')
|
||||||
|
|
||||||
|
return tot_verts
|
||||||
|
|
||||||
|
|
||||||
|
def write_file(filepath, objects, depsgraph, scene,
|
||||||
|
EXPORT_TRI=False,
|
||||||
|
EXPORT_EDGES=False,
|
||||||
|
EXPORT_SMOOTH_GROUPS=False,
|
||||||
|
EXPORT_SMOOTH_GROUPS_BITFLAGS=False,
|
||||||
|
EXPORT_NORMALS=False,
|
||||||
|
EXPORT_UV=True,
|
||||||
|
EXPORT_MTL=True,
|
||||||
|
EXPORT_APPLY_MODIFIERS=True,
|
||||||
|
EXPORT_APPLY_MODIFIERS_RENDER=False,
|
||||||
|
EXPORT_BLEN_OBS=True,
|
||||||
|
EXPORT_GROUP_BY_OB=False,
|
||||||
|
EXPORT_GROUP_BY_MAT=False,
|
||||||
|
EXPORT_KEEP_VERT_ORDER=False,
|
||||||
|
EXPORT_POLYGROUPS=False,
|
||||||
|
EXPORT_CURVE_AS_NURBS=True,
|
||||||
|
EXPORT_GLOBAL_MATRIX=None,
|
||||||
|
EXPORT_PATH_MODE='AUTO',
|
||||||
|
progress=ProgressReport(),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Basic write function. The context and options must be already set
|
||||||
|
This can be accessed externaly
|
||||||
|
eg.
|
||||||
|
write( 'c:\\test\\foobar.obj', Blender.Object.GetSelected() ) # Using default options.
|
||||||
|
"""
|
||||||
|
if EXPORT_GLOBAL_MATRIX is None:
|
||||||
|
EXPORT_GLOBAL_MATRIX = Matrix()
|
||||||
|
|
||||||
|
def veckey3d(v):
|
||||||
|
return round(v.x, 4), round(v.y, 4), round(v.z, 4)
|
||||||
|
|
||||||
|
def veckey2d(v):
|
||||||
|
return round(v[0], 4), round(v[1], 4)
|
||||||
|
|
||||||
|
def findVertexGroupName(face, vWeightMap):
|
||||||
|
"""
|
||||||
|
Searches the vertexDict to see what groups is assigned to a given face.
|
||||||
|
We use a frequency system in order to sort out the name because a given vertex can
|
||||||
|
belong to two or more groups at the same time. To find the right name for the face
|
||||||
|
we list all the possible vertex group names with their frequency and then sort by
|
||||||
|
frequency in descend order. The top element is the one shared by the highest number
|
||||||
|
of vertices is the face's group
|
||||||
|
"""
|
||||||
|
weightDict = {}
|
||||||
|
for vert_index in face.vertices:
|
||||||
|
vWeights = vWeightMap[vert_index]
|
||||||
|
for vGroupName, weight in vWeights:
|
||||||
|
weightDict[vGroupName] = weightDict.get(vGroupName, 0.0) + weight
|
||||||
|
|
||||||
|
if weightDict:
|
||||||
|
return max((weight, vGroupName) for vGroupName, weight in weightDict.items())[1]
|
||||||
|
else:
|
||||||
|
return '(null)'
|
||||||
|
|
||||||
|
with ProgressReportSubstep(progress, 2, "OBJ Export path: %r" % filepath, "OBJ Export Finished") as subprogress1:
|
||||||
|
with open(filepath, "w", encoding="utf8", newline="\n") as f:
|
||||||
|
fw = f.write
|
||||||
|
|
||||||
|
# Write Header
|
||||||
|
fw('# Blender v%s OBJ File: %r\n' % (bpy.app.version_string, os.path.basename(bpy.data.filepath)))
|
||||||
|
fw('# www.blender.org\n')
|
||||||
|
|
||||||
|
# Tell the obj file what material file to use.
|
||||||
|
if EXPORT_MTL:
|
||||||
|
mtlfilepath = os.path.splitext(filepath)[0] + ".mtl"
|
||||||
|
# filepath can contain non utf8 chars, use repr
|
||||||
|
fw('mtllib %s\n' % repr(os.path.basename(mtlfilepath))[1:-1])
|
||||||
|
|
||||||
|
# Initialize totals, these are updated each object
|
||||||
|
totverts = totuvco = totno = 1
|
||||||
|
|
||||||
|
face_vert_index = 1
|
||||||
|
|
||||||
|
# A Dict of Materials
|
||||||
|
# (material.name, image.name):matname_imagename # matname_imagename has gaps removed.
|
||||||
|
mtl_dict = {}
|
||||||
|
# Used to reduce the usage of matname_texname materials, which can become annoying in case of
|
||||||
|
# repeated exports/imports, yet keeping unique mat names per keys!
|
||||||
|
# mtl_name: (material.name, image.name)
|
||||||
|
mtl_rev_dict = {}
|
||||||
|
|
||||||
|
copy_set = set()
|
||||||
|
|
||||||
|
# Get all meshes
|
||||||
|
subprogress1.enter_substeps(len(objects))
|
||||||
|
for i, ob_main in enumerate(objects):
|
||||||
|
# ignore dupli children
|
||||||
|
if ob_main.parent and ob_main.parent.instance_type in {'VERTS', 'FACES'}:
|
||||||
|
subprogress1.step("Ignoring %s, dupli child..." % ob_main.name)
|
||||||
|
continue
|
||||||
|
|
||||||
|
obs = [(ob_main, ob_main.matrix_world)]
|
||||||
|
if ob_main.is_instancer:
|
||||||
|
obs += [(dup.instance_object.original, dup.matrix_world.copy())
|
||||||
|
for dup in depsgraph.object_instances
|
||||||
|
if dup.parent and dup.parent.original == ob_main]
|
||||||
|
# ~ print(ob_main.name, 'has', len(obs) - 1, 'dupli children')
|
||||||
|
|
||||||
|
subprogress1.enter_substeps(len(obs))
|
||||||
|
for ob, ob_mat in obs:
|
||||||
|
with ProgressReportSubstep(subprogress1, 6) as subprogress2:
|
||||||
|
uv_unique_count = no_unique_count = 0
|
||||||
|
|
||||||
|
# Nurbs curve support
|
||||||
|
if EXPORT_CURVE_AS_NURBS and test_nurbs_compat(ob):
|
||||||
|
ob_mat = EXPORT_GLOBAL_MATRIX @ ob_mat
|
||||||
|
totverts += write_nurb(fw, ob, ob_mat)
|
||||||
|
continue
|
||||||
|
# END NURBS
|
||||||
|
|
||||||
|
ob_for_convert = ob.evaluated_get(depsgraph) if EXPORT_APPLY_MODIFIERS else ob.original
|
||||||
|
|
||||||
|
try:
|
||||||
|
me = ob_for_convert.to_mesh()
|
||||||
|
except RuntimeError:
|
||||||
|
me = None
|
||||||
|
|
||||||
|
if me is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# _must_ do this before applying transformation, else tessellation may differ
|
||||||
|
if EXPORT_TRI:
|
||||||
|
# _must_ do this first since it re-allocs arrays
|
||||||
|
mesh_triangulate(me)
|
||||||
|
|
||||||
|
me.transform(EXPORT_GLOBAL_MATRIX @ ob_mat)
|
||||||
|
# If negative scaling, we have to invert the normals...
|
||||||
|
if ob_mat.determinant() < 0.0:
|
||||||
|
me.flip_normals()
|
||||||
|
|
||||||
|
if EXPORT_UV:
|
||||||
|
faceuv = len(me.uv_layers) > 0
|
||||||
|
if faceuv:
|
||||||
|
uv_layer = me.uv_layers.active.data[:]
|
||||||
|
else:
|
||||||
|
faceuv = False
|
||||||
|
|
||||||
|
me_verts = me.vertices[:]
|
||||||
|
|
||||||
|
# Make our own list so it can be sorted to reduce context switching
|
||||||
|
face_index_pairs = [(face, index) for index, face in enumerate(me.polygons)]
|
||||||
|
|
||||||
|
if EXPORT_EDGES:
|
||||||
|
edges = me.edges
|
||||||
|
else:
|
||||||
|
edges = []
|
||||||
|
|
||||||
|
if not (len(face_index_pairs) + len(edges) + len(me.vertices)): # Make sure there is something to write
|
||||||
|
# clean up
|
||||||
|
ob_for_convert.to_mesh_clear()
|
||||||
|
continue # dont bother with this mesh.
|
||||||
|
|
||||||
|
if EXPORT_NORMALS and face_index_pairs:
|
||||||
|
me.calc_normals_split()
|
||||||
|
# No need to call me.free_normals_split later, as this mesh is deleted anyway!
|
||||||
|
|
||||||
|
loops = me.loops
|
||||||
|
|
||||||
|
if (EXPORT_SMOOTH_GROUPS or EXPORT_SMOOTH_GROUPS_BITFLAGS) and face_index_pairs:
|
||||||
|
smooth_groups, smooth_groups_tot = me.calc_smooth_groups(use_bitflags=EXPORT_SMOOTH_GROUPS_BITFLAGS)
|
||||||
|
if smooth_groups_tot <= 1:
|
||||||
|
smooth_groups, smooth_groups_tot = (), 0
|
||||||
|
else:
|
||||||
|
smooth_groups, smooth_groups_tot = (), 0
|
||||||
|
|
||||||
|
materials = me.materials[:]
|
||||||
|
material_names = [m.name if m else None for m in materials]
|
||||||
|
|
||||||
|
# avoid bad index errors
|
||||||
|
if not materials:
|
||||||
|
materials = [None]
|
||||||
|
material_names = [name_compat(None)]
|
||||||
|
|
||||||
|
# Sort by Material, then images
|
||||||
|
# so we dont over context switch in the obj file.
|
||||||
|
if EXPORT_KEEP_VERT_ORDER:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
if len(materials) > 1:
|
||||||
|
if smooth_groups:
|
||||||
|
sort_func = lambda a: (a[0].material_index,
|
||||||
|
smooth_groups[a[1]] if a[0].use_smooth else False)
|
||||||
|
else:
|
||||||
|
sort_func = lambda a: (a[0].material_index,
|
||||||
|
a[0].use_smooth)
|
||||||
|
else:
|
||||||
|
# no materials
|
||||||
|
if smooth_groups:
|
||||||
|
sort_func = lambda a: smooth_groups[a[1] if a[0].use_smooth else False]
|
||||||
|
else:
|
||||||
|
sort_func = lambda a: a[0].use_smooth
|
||||||
|
|
||||||
|
face_index_pairs.sort(key=sort_func)
|
||||||
|
|
||||||
|
del sort_func
|
||||||
|
|
||||||
|
# Set the default mat to no material and no image.
|
||||||
|
contextMat = 0, 0 # Can never be this, so we will label a new material the first chance we get.
|
||||||
|
contextSmooth = None # Will either be true or false, set bad to force initialization switch.
|
||||||
|
|
||||||
|
if EXPORT_BLEN_OBS or EXPORT_GROUP_BY_OB:
|
||||||
|
name1 = ob.name
|
||||||
|
name2 = ob.data.name
|
||||||
|
if name1 == name2:
|
||||||
|
obnamestring = name_compat(name1)
|
||||||
|
else:
|
||||||
|
obnamestring = '%s_%s' % (name_compat(name1), name_compat(name2))
|
||||||
|
|
||||||
|
if EXPORT_BLEN_OBS:
|
||||||
|
fw('o %s\n' % obnamestring) # Write Object name
|
||||||
|
else: # if EXPORT_GROUP_BY_OB:
|
||||||
|
fw('g %s\n' % obnamestring)
|
||||||
|
|
||||||
|
subprogress2.step()
|
||||||
|
|
||||||
|
# Vert
|
||||||
|
for v in me_verts:
|
||||||
|
fw('v %.6f %.6f %.6f\n' % v.co[:])
|
||||||
|
|
||||||
|
subprogress2.step()
|
||||||
|
|
||||||
|
# UV
|
||||||
|
if faceuv:
|
||||||
|
# in case removing some of these dont get defined.
|
||||||
|
uv = f_index = uv_index = uv_key = uv_val = uv_ls = None
|
||||||
|
|
||||||
|
uv_face_mapping = [None] * len(face_index_pairs)
|
||||||
|
|
||||||
|
uv_dict = {}
|
||||||
|
uv_get = uv_dict.get
|
||||||
|
for f, f_index in face_index_pairs:
|
||||||
|
uv_ls = uv_face_mapping[f_index] = []
|
||||||
|
for uv_index, l_index in enumerate(f.loop_indices):
|
||||||
|
uv = uv_layer[l_index].uv
|
||||||
|
# include the vertex index in the key so we don't share UV's between vertices,
|
||||||
|
# allowed by the OBJ spec but can cause issues for other importers, see: T47010.
|
||||||
|
|
||||||
|
# this works too, shared UV's for all verts
|
||||||
|
#~ uv_key = veckey2d(uv)
|
||||||
|
uv_key = loops[l_index].vertex_index, veckey2d(uv)
|
||||||
|
|
||||||
|
uv_val = uv_get(uv_key)
|
||||||
|
if uv_val is None:
|
||||||
|
uv_val = uv_dict[uv_key] = uv_unique_count
|
||||||
|
fw('vt %.6f %.6f\n' % uv[:])
|
||||||
|
uv_unique_count += 1
|
||||||
|
uv_ls.append(uv_val)
|
||||||
|
|
||||||
|
del uv_dict, uv, f_index, uv_index, uv_ls, uv_get, uv_key, uv_val
|
||||||
|
# Only need uv_unique_count and uv_face_mapping
|
||||||
|
|
||||||
|
subprogress2.step()
|
||||||
|
|
||||||
|
# NORMAL, Smooth/Non smoothed.
|
||||||
|
if EXPORT_NORMALS:
|
||||||
|
no_key = no_val = None
|
||||||
|
normals_to_idx = {}
|
||||||
|
no_get = normals_to_idx.get
|
||||||
|
loops_to_normals = [0] * len(loops)
|
||||||
|
for f, f_index in face_index_pairs:
|
||||||
|
for l_idx in f.loop_indices:
|
||||||
|
no_key = veckey3d(loops[l_idx].normal)
|
||||||
|
no_val = no_get(no_key)
|
||||||
|
if no_val is None:
|
||||||
|
no_val = normals_to_idx[no_key] = no_unique_count
|
||||||
|
fw('vn %.4f %.4f %.4f\n' % no_key)
|
||||||
|
no_unique_count += 1
|
||||||
|
loops_to_normals[l_idx] = no_val
|
||||||
|
del normals_to_idx, no_get, no_key, no_val
|
||||||
|
else:
|
||||||
|
loops_to_normals = []
|
||||||
|
|
||||||
|
subprogress2.step()
|
||||||
|
|
||||||
|
# XXX
|
||||||
|
if EXPORT_POLYGROUPS:
|
||||||
|
# Retrieve the list of vertex groups
|
||||||
|
vertGroupNames = ob.vertex_groups.keys()
|
||||||
|
if vertGroupNames:
|
||||||
|
currentVGroup = ''
|
||||||
|
# Create a dictionary keyed by face id and listing, for each vertex, the vertex groups it belongs to
|
||||||
|
vgroupsMap = [[] for _i in range(len(me_verts))]
|
||||||
|
for v_idx, v_ls in enumerate(vgroupsMap):
|
||||||
|
v_ls[:] = [(vertGroupNames[g.group], g.weight) for g in me_verts[v_idx].groups]
|
||||||
|
|
||||||
|
for f, f_index in face_index_pairs:
|
||||||
|
f_smooth = f.use_smooth
|
||||||
|
if f_smooth and smooth_groups:
|
||||||
|
f_smooth = smooth_groups[f_index]
|
||||||
|
f_mat = min(f.material_index, len(materials) - 1)
|
||||||
|
|
||||||
|
# MAKE KEY
|
||||||
|
key = material_names[f_mat], None # No image, use None instead.
|
||||||
|
|
||||||
|
# Write the vertex group
|
||||||
|
if EXPORT_POLYGROUPS:
|
||||||
|
if vertGroupNames:
|
||||||
|
# find what vertext group the face belongs to
|
||||||
|
vgroup_of_face = findVertexGroupName(f, vgroupsMap)
|
||||||
|
if vgroup_of_face != currentVGroup:
|
||||||
|
currentVGroup = vgroup_of_face
|
||||||
|
fw('g %s\n' % vgroup_of_face)
|
||||||
|
|
||||||
|
# CHECK FOR CONTEXT SWITCH
|
||||||
|
if key == contextMat:
|
||||||
|
pass # Context already switched, dont do anything
|
||||||
|
else:
|
||||||
|
if key[0] is None and key[1] is None:
|
||||||
|
# Write a null material, since we know the context has changed.
|
||||||
|
if EXPORT_GROUP_BY_MAT:
|
||||||
|
# can be mat_image or (null)
|
||||||
|
fw("g %s_%s\n" % (name_compat(ob.name), name_compat(ob.data.name)))
|
||||||
|
if EXPORT_MTL:
|
||||||
|
fw("usemtl (null)\n") # mat, image
|
||||||
|
|
||||||
|
else:
|
||||||
|
mat_data = mtl_dict.get(key)
|
||||||
|
if not mat_data:
|
||||||
|
# First add to global dict so we can export to mtl
|
||||||
|
# Then write mtl
|
||||||
|
|
||||||
|
# Make a new names from the mat and image name,
|
||||||
|
# converting any spaces to underscores with name_compat.
|
||||||
|
|
||||||
|
# If none image dont bother adding it to the name
|
||||||
|
# Try to avoid as much as possible adding texname (or other things)
|
||||||
|
# to the mtl name (see [#32102])...
|
||||||
|
mtl_name = "%s" % name_compat(key[0])
|
||||||
|
if mtl_rev_dict.get(mtl_name, None) not in {key, None}:
|
||||||
|
if key[1] is None:
|
||||||
|
tmp_ext = "_NONE"
|
||||||
|
else:
|
||||||
|
tmp_ext = "_%s" % name_compat(key[1])
|
||||||
|
i = 0
|
||||||
|
while mtl_rev_dict.get(mtl_name + tmp_ext, None) not in {key, None}:
|
||||||
|
i += 1
|
||||||
|
tmp_ext = "_%3d" % i
|
||||||
|
mtl_name += tmp_ext
|
||||||
|
mat_data = mtl_dict[key] = mtl_name, materials[f_mat]
|
||||||
|
mtl_rev_dict[mtl_name] = key
|
||||||
|
|
||||||
|
if EXPORT_GROUP_BY_MAT:
|
||||||
|
# can be mat_image or (null)
|
||||||
|
fw("g %s_%s_%s\n" % (name_compat(ob.name), name_compat(ob.data.name), mat_data[0]))
|
||||||
|
if EXPORT_MTL:
|
||||||
|
fw("usemtl %s\n" % mat_data[0]) # can be mat_image or (null)
|
||||||
|
|
||||||
|
contextMat = key
|
||||||
|
if f_smooth != contextSmooth:
|
||||||
|
if f_smooth: # on now off
|
||||||
|
if smooth_groups:
|
||||||
|
f_smooth = smooth_groups[f_index]
|
||||||
|
fw('s %d\n' % f_smooth)
|
||||||
|
else:
|
||||||
|
fw('s 1\n')
|
||||||
|
else: # was off now on
|
||||||
|
fw('s off\n')
|
||||||
|
contextSmooth = f_smooth
|
||||||
|
|
||||||
|
f_v = [(vi, me_verts[v_idx], l_idx)
|
||||||
|
for vi, (v_idx, l_idx) in enumerate(zip(f.vertices, f.loop_indices))]
|
||||||
|
|
||||||
|
fw('f')
|
||||||
|
if faceuv:
|
||||||
|
if EXPORT_NORMALS:
|
||||||
|
for vi, v, li in f_v:
|
||||||
|
fw(" %d/%d/%d" % (totverts + v.index,
|
||||||
|
totuvco + uv_face_mapping[f_index][vi],
|
||||||
|
totno + loops_to_normals[li],
|
||||||
|
)) # vert, uv, normal
|
||||||
|
else: # No Normals
|
||||||
|
for vi, v, li in f_v:
|
||||||
|
fw(" %d/%d" % (totverts + v.index,
|
||||||
|
totuvco + uv_face_mapping[f_index][vi],
|
||||||
|
)) # vert, uv
|
||||||
|
|
||||||
|
face_vert_index += len(f_v)
|
||||||
|
|
||||||
|
else: # No UV's
|
||||||
|
if EXPORT_NORMALS:
|
||||||
|
for vi, v, li in f_v:
|
||||||
|
fw(" %d//%d" % (totverts + v.index, totno + loops_to_normals[li]))
|
||||||
|
else: # No Normals
|
||||||
|
for vi, v, li in f_v:
|
||||||
|
fw(" %d" % (totverts + v.index))
|
||||||
|
|
||||||
|
fw('\n')
|
||||||
|
|
||||||
|
subprogress2.step()
|
||||||
|
|
||||||
|
# Write edges.
|
||||||
|
if EXPORT_EDGES:
|
||||||
|
for ed in edges:
|
||||||
|
if ed.is_loose:
|
||||||
|
fw('l %d %d\n' % (totverts + ed.vertices[0], totverts + ed.vertices[1]))
|
||||||
|
|
||||||
|
# Make the indices global rather then per mesh
|
||||||
|
totverts += len(me_verts)
|
||||||
|
totuvco += uv_unique_count
|
||||||
|
totno += no_unique_count
|
||||||
|
|
||||||
|
# clean up
|
||||||
|
ob_for_convert.to_mesh_clear()
|
||||||
|
|
||||||
|
subprogress1.leave_substeps("Finished writing geometry of '%s'." % ob_main.name)
|
||||||
|
subprogress1.leave_substeps()
|
||||||
|
|
||||||
|
subprogress1.step("Finished exporting geometry, now exporting materials")
|
||||||
|
|
||||||
|
# Now we have all our materials, save them
|
||||||
|
if EXPORT_MTL:
|
||||||
|
write_mtl(scene, mtlfilepath, EXPORT_PATH_MODE, copy_set, mtl_dict)
|
||||||
|
|
||||||
|
# copy all collected files.
|
||||||
|
io_utils.path_reference_copy(copy_set)
|
||||||
|
|
||||||
|
|
||||||
|
def _write(context, filepath,
|
||||||
|
EXPORT_TRI, # ok
|
||||||
|
EXPORT_EDGES,
|
||||||
|
EXPORT_SMOOTH_GROUPS,
|
||||||
|
EXPORT_SMOOTH_GROUPS_BITFLAGS,
|
||||||
|
EXPORT_NORMALS, # ok
|
||||||
|
EXPORT_UV, # ok
|
||||||
|
EXPORT_MTL,
|
||||||
|
EXPORT_APPLY_MODIFIERS, # ok
|
||||||
|
EXPORT_APPLY_MODIFIERS_RENDER, # ok
|
||||||
|
EXPORT_BLEN_OBS,
|
||||||
|
EXPORT_GROUP_BY_OB,
|
||||||
|
EXPORT_GROUP_BY_MAT,
|
||||||
|
EXPORT_KEEP_VERT_ORDER,
|
||||||
|
EXPORT_POLYGROUPS,
|
||||||
|
EXPORT_CURVE_AS_NURBS,
|
||||||
|
EXPORT_SEL_ONLY, # ok
|
||||||
|
EXPORT_ANIMATION,
|
||||||
|
EXPORT_GLOBAL_MATRIX,
|
||||||
|
EXPORT_PATH_MODE, # Not used
|
||||||
|
):
|
||||||
|
|
||||||
|
with ProgressReport(context.window_manager) as progress:
|
||||||
|
base_name, ext = os.path.splitext(filepath)
|
||||||
|
context_name = [base_name, '', '', ext] # Base name, scene name, frame number, extension
|
||||||
|
|
||||||
|
depsgraph = context.evaluated_depsgraph_get()
|
||||||
|
scene = context.scene
|
||||||
|
|
||||||
|
# Exit edit mode before exporting, so current object states are exported properly.
|
||||||
|
if bpy.ops.object.mode_set.poll():
|
||||||
|
bpy.ops.object.mode_set(mode='OBJECT')
|
||||||
|
|
||||||
|
orig_frame = scene.frame_current
|
||||||
|
|
||||||
|
# Export an animation?
|
||||||
|
if EXPORT_ANIMATION:
|
||||||
|
scene_frames = range(scene.frame_start, scene.frame_end + 1) # Up to and including the end frame.
|
||||||
|
else:
|
||||||
|
scene_frames = [orig_frame] # Dont export an animation.
|
||||||
|
|
||||||
|
# Loop through all frames in the scene and export.
|
||||||
|
progress.enter_substeps(len(scene_frames))
|
||||||
|
for frame in scene_frames:
|
||||||
|
if EXPORT_ANIMATION: # Add frame to the filepath.
|
||||||
|
context_name[2] = '_%.6d' % frame
|
||||||
|
|
||||||
|
scene.frame_set(frame, subframe=0.0)
|
||||||
|
if EXPORT_SEL_ONLY:
|
||||||
|
objects = context.selected_objects
|
||||||
|
else:
|
||||||
|
objects = scene.objects
|
||||||
|
|
||||||
|
full_path = ''.join(context_name)
|
||||||
|
|
||||||
|
# erm... bit of a problem here, this can overwrite files when exporting frames. not too bad.
|
||||||
|
# EXPORT THE FILE.
|
||||||
|
progress.enter_substeps(1)
|
||||||
|
write_file(full_path, objects, depsgraph, scene,
|
||||||
|
EXPORT_TRI,
|
||||||
|
EXPORT_EDGES,
|
||||||
|
EXPORT_SMOOTH_GROUPS,
|
||||||
|
EXPORT_SMOOTH_GROUPS_BITFLAGS,
|
||||||
|
EXPORT_NORMALS,
|
||||||
|
EXPORT_UV,
|
||||||
|
EXPORT_MTL,
|
||||||
|
EXPORT_APPLY_MODIFIERS,
|
||||||
|
EXPORT_APPLY_MODIFIERS_RENDER,
|
||||||
|
EXPORT_BLEN_OBS,
|
||||||
|
EXPORT_GROUP_BY_OB,
|
||||||
|
EXPORT_GROUP_BY_MAT,
|
||||||
|
EXPORT_KEEP_VERT_ORDER,
|
||||||
|
EXPORT_POLYGROUPS,
|
||||||
|
EXPORT_CURVE_AS_NURBS,
|
||||||
|
EXPORT_GLOBAL_MATRIX,
|
||||||
|
EXPORT_PATH_MODE,
|
||||||
|
progress,
|
||||||
|
)
|
||||||
|
progress.leave_substeps()
|
||||||
|
|
||||||
|
scene.frame_set(orig_frame, subframe=0.0)
|
||||||
|
progress.leave_substeps()
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
Currently the exporter lacks these features:
|
||||||
|
* multiple scene export (only active scene is written)
|
||||||
|
* particles
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def save(context,
|
||||||
|
filepath,
|
||||||
|
*,
|
||||||
|
use_triangles=False,
|
||||||
|
use_edges=True,
|
||||||
|
use_normals=False,
|
||||||
|
use_smooth_groups=False,
|
||||||
|
use_smooth_groups_bitflags=False,
|
||||||
|
use_uvs=True,
|
||||||
|
use_materials=True,
|
||||||
|
use_mesh_modifiers=True,
|
||||||
|
use_mesh_modifiers_render=False,
|
||||||
|
use_blen_objects=True,
|
||||||
|
group_by_object=False,
|
||||||
|
group_by_material=False,
|
||||||
|
keep_vertex_order=False,
|
||||||
|
use_vertex_groups=False,
|
||||||
|
use_nurbs=True,
|
||||||
|
use_selection=True,
|
||||||
|
use_animation=False,
|
||||||
|
global_matrix=None,
|
||||||
|
path_mode='AUTO'
|
||||||
|
):
|
||||||
|
|
||||||
|
_write(context, filepath,
|
||||||
|
EXPORT_TRI=use_triangles,
|
||||||
|
EXPORT_EDGES=use_edges,
|
||||||
|
EXPORT_SMOOTH_GROUPS=use_smooth_groups,
|
||||||
|
EXPORT_SMOOTH_GROUPS_BITFLAGS=use_smooth_groups_bitflags,
|
||||||
|
EXPORT_NORMALS=use_normals,
|
||||||
|
EXPORT_UV=use_uvs,
|
||||||
|
EXPORT_MTL=use_materials,
|
||||||
|
EXPORT_APPLY_MODIFIERS=use_mesh_modifiers,
|
||||||
|
EXPORT_APPLY_MODIFIERS_RENDER=use_mesh_modifiers_render,
|
||||||
|
EXPORT_BLEN_OBS=use_blen_objects,
|
||||||
|
EXPORT_GROUP_BY_OB=group_by_object,
|
||||||
|
EXPORT_GROUP_BY_MAT=group_by_material,
|
||||||
|
EXPORT_KEEP_VERT_ORDER=keep_vertex_order,
|
||||||
|
EXPORT_POLYGROUPS=use_vertex_groups,
|
||||||
|
EXPORT_CURVE_AS_NURBS=use_nurbs,
|
||||||
|
EXPORT_SEL_ONLY=use_selection,
|
||||||
|
EXPORT_ANIMATION=use_animation,
|
||||||
|
EXPORT_GLOBAL_MATRIX=global_matrix,
|
||||||
|
EXPORT_PATH_MODE=path_mode,
|
||||||
|
)
|
||||||
|
|
||||||
|
return {'FINISHED'}
|
1323
blender/io_scene_obj/import_obj.py
Normal file
1323
blender/io_scene_obj/import_obj.py
Normal file
File diff suppressed because it is too large
Load Diff
BIN
samples/resources/emf/tree1.blend
Normal file
BIN
samples/resources/emf/tree1.blend
Normal file
Binary file not shown.
3
samples/resources/emf/tree1.emf
Normal file
3
samples/resources/emf/tree1.emf
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
EMF(STRING)
|
||||||
|
# Blender v2.92.0 EMF File: 'tree1.blend'
|
||||||
|
|
2
samples/resources/emf/tree1.mtl
Normal file
2
samples/resources/emf/tree1.mtl
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
# Blender MTL File: 'tree1.blend'
|
||||||
|
# Material Count: 0
|
3
samples/resources/emf/tree1.obj
Normal file
3
samples/resources/emf/tree1.obj
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
# Blender v2.92.0 OBJ File: 'tree1.blend'
|
||||||
|
# www.blender.org
|
||||||
|
mtllib tree1.mtl
|
59
samples/resources/emf/tree1_base.emf
Normal file
59
samples/resources/emf/tree1_base.emf
Normal file
File diff suppressed because one or more lines are too long
Loading…
x
Reference in New Issue
Block a user