mirror of https://github.com/AxioDL/metaforce.git
MAPA/MAPU bug fixes
This commit is contained in:
parent
87ae8aaa57
commit
ed5085e42b
|
@ -1,7 +1,8 @@
|
||||||
import bpy, struct
|
import bpy, struct, bmesh
|
||||||
from . import hmdl
|
from . import hmdl
|
||||||
from mathutils import Vector
|
from mathutils import Vector
|
||||||
VertPool = hmdl.HMDLMesh.VertPool
|
VertPool = hmdl.HMDLMesh.VertPool
|
||||||
|
strip_next_loop = hmdl.HMDLMesh.strip_next_loop
|
||||||
|
|
||||||
def cook(writebuf, mesh_obj):
|
def cook(writebuf, mesh_obj):
|
||||||
if mesh_obj.type != 'MESH':
|
if mesh_obj.type != 'MESH':
|
||||||
|
@ -98,10 +99,14 @@ def cook(writebuf, mesh_obj):
|
||||||
if edge.seam:
|
if edge.seam:
|
||||||
edge_set.add(edge)
|
edge_set.add(edge)
|
||||||
|
|
||||||
trace_edge = edge_set.pop()
|
if len(edge_set):
|
||||||
|
trace_edge = edge_set.pop()
|
||||||
|
else:
|
||||||
|
trace_edge = None
|
||||||
|
edge_ranges = []
|
||||||
edge_iter = loop_iter + loop_count
|
edge_iter = loop_iter + loop_count
|
||||||
edge_count = 0
|
while trace_edge:
|
||||||
if trace_edge:
|
edge_count = 0
|
||||||
vert_pool.vert_out_map(writebuf, trace_edge.verts[0])
|
vert_pool.vert_out_map(writebuf, trace_edge.verts[0])
|
||||||
vert_pool.vert_out_map(writebuf, trace_edge.verts[1])
|
vert_pool.vert_out_map(writebuf, trace_edge.verts[1])
|
||||||
edge_count += 2
|
edge_count += 2
|
||||||
|
@ -117,18 +122,24 @@ def cook(writebuf, mesh_obj):
|
||||||
edge_count += 1
|
edge_count += 1
|
||||||
found_edge = True
|
found_edge = True
|
||||||
break
|
break
|
||||||
|
if len(edge_set):
|
||||||
|
trace_edge = edge_set.pop()
|
||||||
|
else:
|
||||||
|
trace_edge = None
|
||||||
|
edge_ranges.append((edge_iter, edge_count))
|
||||||
|
edge_iter += edge_count
|
||||||
|
|
||||||
pos_avg = Vector()
|
pos_avg = Vector()
|
||||||
norm_avg = Vector()
|
norm_avg = Vector()
|
||||||
if len(loop_set):
|
if len(loop_set):
|
||||||
for loop in loop_set:
|
for loop in loop_set:
|
||||||
pos_avg += loop.co
|
pos_avg += loop.vert.co
|
||||||
norm_avg += loop.normal
|
norm_avg += loop.vert.normal
|
||||||
pos_avg /= len(loop_set)
|
pos_avg /= len(loop_set)
|
||||||
norm_avg /= len(loop_set)
|
norm_avg /= len(loop_set)
|
||||||
norm_avg.normalize()
|
norm_avg.normalize()
|
||||||
|
|
||||||
loop_ranges.append((loop_iter, loop_count, edge_iter, edge_count, pos_avg, norm_avg))
|
loop_ranges.append((loop_iter, loop_count, edge_ranges, pos_avg, norm_avg))
|
||||||
loop_iter += loop_count + edge_count
|
loop_iter += loop_count + edge_count
|
||||||
|
|
||||||
# No more surfaces
|
# No more surfaces
|
||||||
|
@ -137,9 +148,12 @@ def cook(writebuf, mesh_obj):
|
||||||
# Write out loop ranges and averages
|
# Write out loop ranges and averages
|
||||||
writebuf(struct.pack('I', len(loop_ranges)))
|
writebuf(struct.pack('I', len(loop_ranges)))
|
||||||
for loop_range in loop_ranges:
|
for loop_range in loop_ranges:
|
||||||
|
writebuf(struct.pack('fff', loop_range[3][0], loop_range[3][1], loop_range[3][2]))
|
||||||
writebuf(struct.pack('fff', loop_range[4][0], loop_range[4][1], loop_range[4][2]))
|
writebuf(struct.pack('fff', loop_range[4][0], loop_range[4][1], loop_range[4][2]))
|
||||||
writebuf(struct.pack('fff', loop_range[5][0], loop_range[5][1], loop_range[5][2]))
|
writebuf(struct.pack('II', loop_range[0], loop_range[1]))
|
||||||
writebuf(struct.pack('IIII', loop_range[0], loop_range[1], loop_range[2], loop_range[3]))
|
writebuf(struct.pack('I', len(loop_range[2])))
|
||||||
|
for edge_range in loop_range[2]:
|
||||||
|
writebuf(struct.pack('II', edge_range[0], edge_range[1]))
|
||||||
|
|
||||||
# Write out mappable objects
|
# Write out mappable objects
|
||||||
poi_count = 0
|
poi_count = 0
|
||||||
|
@ -151,7 +165,7 @@ def cook(writebuf, mesh_obj):
|
||||||
for obj in bpy.context.scene.objects:
|
for obj in bpy.context.scene.objects:
|
||||||
if obj.retro_mappable_type != -1:
|
if obj.retro_mappable_type != -1:
|
||||||
writebuf(struct.pack('III',
|
writebuf(struct.pack('III',
|
||||||
obj.retro_mappable_type, obj.retro_mappable_unk, obj.retro_mappable_sclyid))
|
obj.retro_mappable_type, obj.retro_mappable_unk, int(obj.retro_mappable_sclyid, 0)))
|
||||||
writebuf(struct.pack('ffffffffffffffff',
|
writebuf(struct.pack('ffffffffffffffff',
|
||||||
obj.matrix_world[0][0], obj.matrix_world[0][1], obj.matrix_world[0][2], obj.matrix_world[0][3],
|
obj.matrix_world[0][0], obj.matrix_world[0][1], obj.matrix_world[0][2], obj.matrix_world[0][3],
|
||||||
obj.matrix_world[1][0], obj.matrix_world[1][1], obj.matrix_world[1][2], obj.matrix_world[1][3],
|
obj.matrix_world[1][0], obj.matrix_world[1][1], obj.matrix_world[1][2], obj.matrix_world[1][3],
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
import bpy
|
import bpy, os, struct
|
||||||
|
|
||||||
def cook(writebuf):
|
def cook(writebuf):
|
||||||
found_lib = False
|
found_lib = False
|
||||||
for obj in bpy.context.scene.objects:
|
for obj in bpy.context.scene.objects:
|
||||||
if obj.library:
|
if obj.data and obj.data.library:
|
||||||
path = os.path.normpath(bpy.path.abspath(obj.library.filepath))
|
path = os.path.normpath(bpy.path.abspath(obj.data.library.filepath))
|
||||||
writebuf(struct.pack('I', len(path)))
|
writebuf(struct.pack('I', len(path)))
|
||||||
writebuf(path.encode())
|
writebuf(path.encode())
|
||||||
found_lib = True
|
found_lib = True
|
||||||
|
@ -12,6 +12,12 @@ def cook(writebuf):
|
||||||
if not found_lib:
|
if not found_lib:
|
||||||
raise RuntimeError('No hexagon segments present')
|
raise RuntimeError('No hexagon segments present')
|
||||||
|
|
||||||
|
world_count = 0
|
||||||
|
for obj in bpy.context.scene.objects:
|
||||||
|
if not obj.parent and obj.type == 'EMPTY':
|
||||||
|
world_count += 1
|
||||||
|
writebuf(struct.pack('I', world_count))
|
||||||
|
|
||||||
for obj in bpy.context.scene.objects:
|
for obj in bpy.context.scene.objects:
|
||||||
if not obj.parent and obj.type == 'EMPTY':
|
if not obj.parent and obj.type == 'EMPTY':
|
||||||
writebuf(struct.pack('I', len(obj.name)))
|
writebuf(struct.pack('I', len(obj.name)))
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit 5df07a203939f382f66238e7d3cec417f9ec0b3e
|
Subproject commit f3226c6102a3f25d603f8b7ff6dcd8b358b59636
|
|
@ -726,8 +726,7 @@ public:
|
||||||
Vector3f centerOfMass;
|
Vector3f centerOfMass;
|
||||||
Index start;
|
Index start;
|
||||||
Index count;
|
Index count;
|
||||||
Index borderStart;
|
std::vector<std::pair<Index, Index>> borders;
|
||||||
Index borderCount;
|
|
||||||
Surface(BlenderConnection& conn);
|
Surface(BlenderConnection& conn);
|
||||||
};
|
};
|
||||||
std::vector<Surface> surfaces;
|
std::vector<Surface> surfaces;
|
||||||
|
|
|
@ -1098,7 +1098,17 @@ BlenderConnection::DataStream::MapArea::Surface::Surface(BlenderConnection& conn
|
||||||
{
|
{
|
||||||
centerOfMass.read(conn);
|
centerOfMass.read(conn);
|
||||||
normal.read(conn);
|
normal.read(conn);
|
||||||
conn._readBuf(&start, 16);
|
conn._readBuf(&start, 8);
|
||||||
|
|
||||||
|
uint32_t borderCount;
|
||||||
|
conn._readBuf(&borderCount, 4);
|
||||||
|
borders.reserve(borderCount);
|
||||||
|
for (int i=0 ; i<borderCount ; ++i)
|
||||||
|
{
|
||||||
|
borders.emplace_back();
|
||||||
|
std::pair<Index, Index>& idx = borders.back();
|
||||||
|
conn._readBuf(&idx, 8);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
BlenderConnection::DataStream::MapArea::POI::POI(BlenderConnection& conn)
|
BlenderConnection::DataStream::MapArea::POI::POI(BlenderConnection& conn)
|
||||||
|
@ -1165,9 +1175,7 @@ BlenderConnection::DataStream::MapUniverse::World::World(BlenderConnection& conn
|
||||||
conn._readBuf(&path[0], pathLen);
|
conn._readBuf(&path[0], pathLen);
|
||||||
|
|
||||||
hecl::SystemStringView sysPath(path);
|
hecl::SystemStringView sysPath(path);
|
||||||
SystemString pathRel =
|
worldPath.assign(conn.m_loadedBlend.getProject().getProjectWorkingPath(), sysPath.sys_str());
|
||||||
conn.m_loadedBlend.getProject().getProjectRootPath().getProjectRelativeFromAbsolute(sysPath.sys_str());
|
|
||||||
worldPath.assign(conn.m_loadedBlend.getProject().getProjectWorkingPath(), pathRel);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue