1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428
1429
1430
1431
1432
1433
1434
1435
1436
1437
1438
1439
1440
1441
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456
1457
1458
1459
1460
1461
1462
1463
1464
1465
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483
1484
1485
1486
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499
1500
1501
1502
1503
1504
1505
1506
1507
1508
1509
1510
1511
1512
1513
1514
1515
1516
1517
1518
1519
1520
1521
1522
1523
1524
1525
1526
1527
1528
1529
1530
1531
1532
1533
1534
1535
1536
1537
1538
1539
1540
1541
1542
1543
1544
1545
1546
1547
1548
1549
1550
1551
1552
1553
1554
1555
1556
1557
1558
1559
1560
1561
1562
1563
1564
1565
1566
1567
1568
1569
1570
1571
1572
1573
1574
1575
1576
1577
1578
1579
1580
1581
1582
1583
1584
1585
1586
1587
1588
1589
1590
1591
1592
1593
1594
1595
1596
1597
1598
1599
1600
1601
1602
1603
1604
|
#!BPY
"""
Name: 'LightWave + Materials (.lwo)...'
Blender: 237
Group: 'Import'
Tooltip: 'Import LightWave Object File Format (.lwo)'
"""
__author__ = "Alessandro Pirovano, Anthony D'Agostino (Scorpius)"
__url__ = ("blender", "elysiun",
"Author's homepage, http://www.redrival.com/scorpius", "Author's homepage, http://uaraus.altervista.org")
importername = "lwo_import 0.1.16"
# +---------------------------------------------------------+
# | Save your work before and after use. |
# | Please report any useful comment to: |
# | uaraus-dem@yahoo.it |
# | Thanks |
# +---------------------------------------------------------+
# +---------------------------------------------------------+
# | Copyright (c) 2002 Anthony D'Agostino |
# | http://www.redrival.com/scorpius |
# | scorpius@netzero.com |
# | April 21, 2002 |
# | Released under the Blender Artistic Licence (BAL) |
# | Import Export Suite v0.5 |
# +---------------------------------------------------------+
# | Read and write LightWave Object File Format (*.lwo) |
# +---------------------------------------------------------+
# +---------------------------------------------------------+
# | Alessandro Pirovano tweaked starting on March 2005 |
# | http://uaraus.altervista.org |
# +---------------------------------------------------------+
# +---------------------------------------------------------+
# | Release log: |
# | 0.1.16: fixed (try 2) texture offset calculations |
# | added hint on axis mapping |
# | added hint on texture blending mode |
# | added hint on texture transparency setting |
# | search images in original directory first |
# | fixed texture order application |
# | 0.1.15: added release log |
# | fixed texture offset calculations (non-UV) |
# | fixed reverting vertex order in face generation |
# | associate texture on game-engine settings |
# | vector math definitely based on mathutils |
# | search images in "Images" and "../Images" dir |
# | revised logging facility |
# | fixed subsurf texture and material mappings |
# | 0.1.14: patched missing mod_vector (not definitive) |
# | 0.1.13: first public release |
# +---------------------------------------------------------+
#blender related import
import Blender
#iosuite related import
try: #new naming
import meshtools as my_meshtools
except ImportError: #fallback to the old one
print "using old mod_meshtools"
import mod_meshtools as my_meshtools
#python specific modules import
import struct, chunk, os, cStringIO, time, operator, copy
# ===========================================================
# === Utility Preamble ======================================
# ===========================================================
textname = "lwo_log"
#uncomment the following line to disable logging facility
#textname = None 1
# ===========================================================
class dotext:
_NO = 0 #use internal to class only
LOG = 1 #write only to LOG
CON = 2 #write to both LOG and CONSOLE
def __init__(self, tname, where=LOG):
self.dwhere = where #defaults on console only
if (tname==None):
print "*** not using text object to log script"
self.txtobj = None
return
tlist = Blender.Text.get()
for i in range(len(tlist)):
if (tlist[i].getName()==tname):
tlist[i].clear()
#print tname, " text object found and cleared!"
self.txtobj = tlist[i]
return
#print tname, " text object not found and created!"
self.txtobj = Blender.Text.New(tname)
# end def __init__
def write(self, wstring, maxlen=100):
if (self.txtobj==None): return
while (1):
ll = len(wstring)
if (ll>maxlen):
self.txtobj.write((wstring[:maxlen]))
self.txtobj.write("\n")
wstring = (wstring[maxlen:])
else:
self.txtobj.write(wstring)
break
# end def write
def pstring(self, ppstring, where = _NO):
if where == dotext._NO: where = self.dwhere
if where == dotext.CON:
print ppstring
self.write(ppstring)
self.write("\n")
# end def pstring
def plist(self, pplist, where = _NO):
self.pprint ("list:[")
for pp in range(len(pplist)):
self.pprint ("[%d] -> %s" % (pp, pplist[pp]), where)
self.pprint ("]")
# end def plist
def pdict(self, pdict, where = _NO):
self.pprint ("dict:{", where)
for pp in pdict.keys():
self.pprint ("[%s] -> %s" % (pp, pdict[pp]), where)
self.pprint ("}")
# end def pdict
def pprint(self, parg, where = _NO):
if parg == None:
self.pstring("_None_", where)
elif type(parg) == type ([]):
self.plist(parg, where)
elif type(parg) == type ({}):
self.pdict(parg, where)
else:
self.pstring(parg, where)
# end def pprint
def logcon(self, parg):
self.pprint(parg, dotext.CON)
# end def logcon
# endclass dotext
tobj=dotext(textname)
#uncomment the following line to log all messages on both console and logfile
#tobj=dotext(textname,dotext.CON)
# ===========================================================
# === Main read functions ===================================
# ===========================================================
# =============================
# === Read LightWave Format ===
# =============================
def read(filename):
global tobj
tobj.logcon ("#####################################################################")
tobj.logcon ("This is: %s" % importername)
tobj.logcon ("Importing file:")
tobj.logcon (filename)
tobj.pprint ("#####################################################################")
start = time.clock()
file = open(filename, "rb")
# === LWO header ===
form_id, form_size, form_type = struct.unpack(">4s1L4s", file.read(12))
if (form_type == "LWOB"):
read_lwob(file, filename)
elif (form_type == "LWO2"):
read_lwo2(file, filename)
else:
tobj.logcon ("Can't read a file with the form_type: %s" %form_type)
return
Blender.Window.DrawProgressBar(1.0, "") # clear progressbar
file.close()
end = time.clock()
seconds = " in %.2f %s" % (end-start, "seconds")
if form_type == "LWO2": fmt = " (v6.0 Format)"
if form_type == "LWOB": fmt = " (v5.5 Format)"
message = "Successfully imported " + os.path.basename(filename) + fmt + seconds
#my_meshtools.print_boxed(message)
tobj.pprint ("#####################################################################")
tobj.logcon (message)
tobj.logcon ("#####################################################################")
# enddef read
# =================================
# === Read LightWave 5.5 format ===
# =================================
def read_lwob(file, filename):
global tobj
tobj.logcon("LightWave 5.5 format")
objname = os.path.splitext(os.path.basename(filename))[0]
while 1:
try:
lwochunk = chunk.Chunk(file)
except EOFError:
break
if lwochunk.chunkname == "LAYR":
objname = read_layr(lwochunk)
elif lwochunk.chunkname == "PNTS": # Verts
verts = read_verts(lwochunk)
elif lwochunk.chunkname == "POLS": # Faces v5.5
faces = read_faces_5(lwochunk)
my_meshtools.create_mesh(verts, faces, objname)
else: # Misc Chunks
lwochunk.skip()
return
# enddef read_lwob
# =============================
# === Read LightWave Format ===
# =============================
def read_lwo2(file, filename, typ="LWO2"):
global tobj
tobj.logcon("LightWave 6 (and above) format")
dir_part = Blender.sys.dirname(filename)
fname_part = Blender.sys.basename(filename)
#first initialization of data structures
defaultname = os.path.splitext(fname_part)[0]
tag_list = [] #tag list: global for the whole file?
surf_list = [] #surf list: global for the whole file?
clip_list = [] #clip list: global for the whole file?
object_index = 0
object_list = None
# init value is: object_list = [[None, {}, [], [], {}, {}, 0, {}, {}]]
#0 - objname #original name
#1 - obj_dict = {TAG} #objects created
#2 - verts = [] #object vertexes
#3 - faces = [] #object faces (associations poly -> vertexes)
#4 - obj_dim_dict = {TAG} #tuples size and pos in local object coords - used for NON-UV mappings
#5 - polytag_dict = {TAG} #tag to polygon mapping
#6 - patch_flag #0 = surf; 1 = patch (subdivision surface) - it was the image list
#7 - uvcoords_dict = {name} #uvmap coordinates (mixed mode per face/per vertex)
#8 - facesuv_dict = {name} #uvmap coordinates associations poly -> uv tuples
while 1:
try:
lwochunk = chunk.Chunk(file)
except EOFError:
break
tobj.pprint(" ")
if lwochunk.chunkname == "LAYR":
tobj.pprint("---- LAYR")
objname = read_layr(lwochunk)
tobj.pprint(objname)
if object_list == None:
object_list = [[objname, {}, [], [], {}, {}, 0, {}, {}]]
else:
object_list.append([objname, {}, [], [], {}, {}, 0, {}, {}])
object_index += 1
elif lwochunk.chunkname == "PNTS": # Verts
tobj.pprint("---- PNTS")
verts = read_verts(lwochunk)
object_list[object_index][2] = verts
elif lwochunk.chunkname == "VMAP": # MAPS (UV)
tobj.pprint("---- VMAP")
object_list[object_index][7], object_list[object_index][8] = read_vmap(object_list[object_index][7], object_list[object_index][8], object_list[object_index][3], len(object_list[object_index][2]), lwochunk)
elif lwochunk.chunkname == "VMAD": # MAPS (UV) per-face
tobj.pprint("---- VMAD")
object_list[object_index][7], object_list[object_index][8] = read_vmad(object_list[object_index][7], object_list[object_index][8], object_list[object_index][3], len(object_list[object_index][2]), lwochunk)
elif lwochunk.chunkname == "POLS": # Faces v6.0
tobj.pprint("-------- POLS(6)")
faces, flag = read_faces_6(lwochunk)
#flag is 0 for regular polygon, 1 for patches (= subsurf), 2 for anything else to be ignored
if flag<2:
if object_list[object_index][3] != []:
object_list.append([object_list[object_index][0], #update name
{}, #init
copy.deepcopy(object_list[object_index][2]), #same vertexes
[], #no faces
{}, #no need to copy - filled at runtime
{}, #polygon tagging will follow
flag, #patch flag
copy.deepcopy(object_list[object_index][7]), #same uvcoords
{}]) #no uv mapping
object_index += 1
#end if already has a face list
#update uv coords mapping if VMAP already encountered
for uvname in object_list[object_index][7]:
tobj.pprint("updating uv to face mapping for %s" % uvname)
object_list[object_index][8][uvname] = copy.deepcopy(faces)
object_list[object_index][3] = faces
objname = object_list[object_index][0]
if objname == None:
objname = defaultname
#end if processing a valid poly type
elif lwochunk.chunkname == "TAGS": # Tags
tobj.pprint("---- TAGS")
tag_list.extend(read_tags(lwochunk))
elif lwochunk.chunkname == "PTAG": # PTags
tobj.pprint("---- PTAG")
polytag_dict = read_ptags(lwochunk, tag_list)
for kk in polytag_dict.keys(): object_list[object_index][5][kk] = polytag_dict[kk]
elif lwochunk.chunkname == "SURF": # surfaces
tobj.pprint("---- SURF")
surf_list.append(read_surfs(lwochunk, surf_list, tag_list))
elif lwochunk.chunkname == "CLIP": # texture images
tobj.pprint("---- CLIP")
clip_list.append(read_clip(lwochunk))
tobj.pprint("read total %s clips" % len(clip_list))
else: # Misc Chunks
tobj.pprint("---- %s: skipping" % lwochunk.chunkname)
lwochunk.skip()
#uncomment here to log data structure as it is built
#tobj.pprint(object_list)
tobj.pprint ("\n#####################################################################")
tobj.pprint("Found %d objects:" % len(object_list))
tobj.pprint ("#####################################################################")
for objspec_list in object_list:
tobj.pprint ("\n#===================================================================#")
tobj.pprint("Processing Object: %s" % objspec_list[0])
tobj.pprint ("#===================================================================#")
objspec_list[3], objspec_list[5], objspec_list[8] = recalc_faces(objspec_list[2], objspec_list[3], objspec_list[5], objspec_list[8]) #recalculate faces, polytag_dict and uv_mapping get rid of faces fanning
create_objects(objspec_list)
if surf_list != []:
create_material(clip_list, surf_list, objspec_list, dir_part) #give it all the object
return
# enddef read_lwo2
# ===========================================================
# === File reading routines =================================
# ===========================================================
# ==================
# === Read Verts ===
# ==================
def read_verts(lwochunk):
global tobj
data = cStringIO.StringIO(lwochunk.read())
numverts = lwochunk.chunksize/12
#$verts = []
verts = [None] * numverts
for i in range(numverts):
if not i%100 and my_meshtools.show_progress:
Blender.Window.DrawProgressBar(float(i)/numverts, "Reading Verts")
x, y, z = struct.unpack(">fff", data.read(12))
verts[i] = (x, z, y)
tobj.pprint("read %d vertexes" % (i+1))
return verts
# enddef read_verts
# =================
# === Read Name ===
# =================
# modified to deal with odd lenght strings
def read_name(file):
name = ""
while 1:
char = file.read(1)
if char == "\0": break
else: name += char
len_name = len(name) + 1 #count the trailing zero
if len_name%2==1:
char = file.read(1) #remove zero padding to even lenght
len_name += 1
return name, len_name
# ==================
# === Read Layer ===
# ==================
def read_layr(lwochunk):
data = cStringIO.StringIO(lwochunk.read())
idx, flags = struct.unpack(">hh", data.read(4))
pivot = struct.unpack(">fff", data.read(12))
layer_name, discard = read_name(data)
if not layer_name: layer_name = "NoName"
return layer_name
# enddef read_layr
# ======================
# === Read Faces 5.5 ===
# ======================
def read_faces_5(lwochunk):
data = cStringIO.StringIO(lwochunk.read())
faces = []
i = 0
while i < lwochunk.chunksize:
if not i%100 and my_meshtools.show_progress:
Blender.Window.DrawProgressBar(float(i)/lwochunk.chunksize, "Reading Faces")
facev = []
numfaceverts, = struct.unpack(">H", data.read(2))
for j in range(numfaceverts):
index, = struct.unpack(">H", data.read(2))
facev.append(index)
facev.reverse()
faces.append(facev)
surfaceindex, = struct.unpack(">H", data.read(2))
if surfaceindex < 0:
tobj.logcon ("***Error. Referencing uncorrect surface index")
return
i += (4+numfaceverts*2)
return faces
# ==================================
# === Read Variable-Length Index ===
# ==================================
def read_vx(data):
byte1, = struct.unpack(">B", data.read(1))
if byte1 != 0xFF: # 2-byte index
byte2, = struct.unpack(">B", data.read(1))
index = byte1*256 + byte2
index_size = 2
else: # 4-byte index
byte2, byte3, byte4 = struct.unpack(">3B", data.read(3))
index = byte2*65536 + byte3*256 + byte4
index_size = 4
return index, index_size
# ======================
# === Read uvmapping ===
# ======================
def read_vmap(uvcoords_dict, facesuv_dict, faces, maxvertnum, lwochunk):
if maxvertnum == 0:
tobj.pprint ("Found VMAP but no vertexes to map!")
return uvcoords_dict, facesuv_dict
data = cStringIO.StringIO(lwochunk.read())
map_type = data.read(4)
if map_type != "TXUV":
tobj.pprint ("Reading VMAP: No Texture UV map Were Found. Map Type: %s" % map_type)
return uvcoords_dict, facesuv_dict
dimension, = struct.unpack(">H", data.read(2))
name, i = read_name(data) #i initialized with string lenght + zeros
tobj.pprint ("TXUV %d %s" % (dimension, name))
#my_uv_list = [None] * maxvertnum
my_uv_list = [(0.0, 0.0)] * maxvertnum #more safe to have some default coordinates to associate in any case?
while (i < lwochunk.chunksize - 6): #4+2 header bytes already read
vertnum, vnum_size = read_vx(data)
u, v = struct.unpack(">ff", data.read(8))
if vertnum >= maxvertnum:
tobj.pprint ("Hem: more uvmap than vertexes? ignoring uv data for vertex %d" % vertnum)
else:
my_uv_list[vertnum] = (u, v)
i += 8 + vnum_size
#end loop on uv pairs
uvcoords_dict[name] = my_uv_list
#this is a per-vertex mapping AND the uv tuple is vertex-ordered, so faces_uv is the same as faces
if faces == []:
tobj.pprint ("no faces read yet! delaying uv to face assignments")
facesuv_dict[name] = []
else:
#deepcopy so we could modify it without actually modify faces
tobj.pprint ("faces already present: proceeding with assignments")
facesuv_dict[name] = copy.deepcopy(faces)
return uvcoords_dict, facesuv_dict
# ========================
# === Read uvmapping 2 ===
# ========================
def read_vmad(uvcoords_dict, facesuv_dict, faces, maxvertnum, lwochunk):
maxfacenum = len(faces)
if maxvertnum == 0 or maxfacenum == 0:
tobj.pprint ("Found VMAD but no vertexes to map!")
return uvcoords_dict, facesuv_dict
data = cStringIO.StringIO(lwochunk.read())
map_type = data.read(4)
if map_type != "TXUV":
tobj.pprint ("Reading VMAD: No Texture UV map Were Found. Map Type: %s" % map_type)
return uvcoords_dict, facesuv_dict
dimension, = struct.unpack(">H", data.read(2))
name, i = read_name(data) #i initialized with string lenght + zeros
tobj.pprint ("TXUV %d %s" % (dimension, name))
if uvcoords_dict.has_key(name):
my_uv_list = uvcoords_dict[name] #update existing
my_facesuv_list = facesuv_dict[name]
else:
my_uv_list = [(0.0, 0.0)] * maxvertnum #start a brand new: this could be made more smart
my_facesuv_list = copy.deepcopy(faces)
#end variable initialization
lastindex = len(my_uv_list) - 1
while (i < lwochunk.chunksize - 6): #4+2 header bytes already read
vertnum, vnum_size = read_vx(data)
i += vnum_size
polynum, vnum_size = read_vx(data)
i += vnum_size
u, v = struct.unpack(">ff", data.read(8))
if polynum >= maxfacenum or vertnum >= maxvertnum:
tobj.pprint ("Hem: more uvmap than vertexes? ignorig uv data for vertex %d" % vertnum)
else:
my_uv_list.append( (u,v) )
newindex = len(my_uv_list) - 1
for vi in range(len(my_facesuv_list[polynum])): #polynum starting from 1 or from 0?
if my_facesuv_list[polynum][vi] == vertnum:
my_facesuv_list[polynum][vi] = newindex
#end loop on current face vertexes
i += 8
#end loop on uv pairs
uvcoords_dict[name] = my_uv_list
facesuv_dict[name] = my_facesuv_list
tobj.pprint ("updated %d vertexes data" % (newindex-lastindex))
return uvcoords_dict, facesuv_dict
# =================
# === Read tags ===
# =================
def read_tags(lwochunk):
data = cStringIO.StringIO(lwochunk.read())
tag_list = []
current_tag = ""
i = 0
while i < lwochunk.chunksize:
char = data.read(1)
if char == "\0":
tag_list.append(current_tag)
if (len(current_tag) % 2 == 0): char = data.read(1)
current_tag = ""
else:
current_tag += char
i += 1
tobj.pprint("read %d tags, list follows:" % len(tag_list))
tobj.pprint( tag_list)
return tag_list
# ==================
# === Read Ptags ===
# ==================
def read_ptags(lwochunk, tag_list):
data = cStringIO.StringIO(lwochunk.read())
polygon_type = data.read(4)
if polygon_type != "SURF":
tobj.pprint ("No Surf Were Found. Polygon Type: %s" % polygon_type)
return {}
ptag_dict = {}
i = 0
while(i < lwochunk.chunksize-4): #4 bytes polygon type already read
if not i%100 and my_meshtools.show_progress:
Blender.Window.DrawProgressBar(float(i)/lwochunk.chunksize, "Reading PTAGS")
poln, poln_size = read_vx(data)
i += poln_size
tag_index, = struct.unpack(">H", data.read(2))
if tag_index > (len(tag_list)):
tobj.pprint ("Reading PTAG: Surf belonging to undefined TAG: %d. Skipping" % tag_index)
return {}
i += 2
tag_key = tag_list[tag_index]
if not(ptag_dict.has_key(tag_key)):
ptag_dict[tag_list[tag_index]] = [poln]
else:
ptag_dict[tag_list[tag_index]].append(poln)
for i in ptag_dict.keys():
tobj.pprint ("read %d polygons belonging to TAG %s" % (len(ptag_dict[i]), i))
return ptag_dict
# ==================
# === Read Clips ===
# ==================
def read_clip(lwochunk):
clip_dict = {}
data = cStringIO.StringIO(lwochunk.read())
image_index, = struct.unpack(">L", data.read(4))
clip_dict['ID'] = image_index
i = 4
while(i < lwochunk.chunksize):
subchunkname, = struct.unpack("4s", data.read(4))
subchunklen, = struct.unpack(">H", data.read(2))
if subchunkname == "STIL":
tobj.pprint("-------- STIL")
clip_name, k = read_name(data)
#now split text independently from platform
#depend on the system where image was saved. NOT the one where the script is run
no_sep = "\\"
if Blender.sys.sep == no_sep: no_sep ="/"
if (no_sep in clip_name):
clip_name = clip_name.replace(no_sep, Blender.sys.sep)
short_name = Blender.sys.basename(clip_name)
if (clip_name == "") or (short_name == ""):
tobj.pprint ("Reading CLIP: Empty clip name not allowed. Skipping")
discard = data.read(subchunklen-k)
clip_dict['NAME'] = clip_name
clip_dict['BASENAME'] = short_name
elif subchunkname == "XREF": #cross reference another image
tobj.pprint("-------- XREF")
image_index, = struct.unpack(">L", data.read(4))
clip_name, k = read_name(data)
clip_dict['NAME'] = clip_name
clip_dict['XREF'] = image_index
elif subchunkname == "NEGA": #negate texture effect
tobj.pprint("-------- NEGA")
n, = struct.unpack(">H", data.read(2))
clip_dict['NEGA'] = n
else: # Misc Chunks
tobj.pprint("-------- SURF:%s: skipping" % subchunkname)
discard = data.read(subchunklen)
i = i + 6 + subchunklen
#end loop on surf chunks
tobj.pprint("read image:%s" % clip_dict)
return clip_dict
# ===========================
# === Read Surfaces Block ===
# ===========================
def read_surfblok(subchunkdata):
lenght = len(subchunkdata)
my_dict = {}
my_uvname = ""
data = cStringIO.StringIO(subchunkdata)
##############################################################
# blok header sub-chunk
##############################################################
subchunkname, = struct.unpack("4s", data.read(4))
subchunklen, = struct.unpack(">h", data.read(2))
accumulate_i = subchunklen + 6
if subchunkname != 'IMAP':
tobj.pprint("---------- SURF: BLOK: %s: block aborting" % subchunkname)
return {}, ""
tobj.pprint ("---------- IMAP")
ordinal, i = read_name(data)
my_dict['ORD'] = ordinal
my_dict['g_ORD'] = -1
my_dict['ENAB'] = True
while(i < subchunklen): # ---------left 6------------------------- loop on header parameters
sub2chunkname, = struct.unpack("4s", data.read(4))
sub2chunklen, = struct.unpack(">h", data.read(2))
i = i + 6 + sub2chunklen
if sub2chunkname == "CHAN":
tobj.pprint("------------ CHAN")
sub2chunkname, = struct.unpack("4s", data.read(4))
my_dict['CHAN'] = sub2chunkname
sub2chunklen -= 4
elif sub2chunkname == "ENAB": #only present if is to be disabled
tobj.pprint("------------ ENAB")
ena, = struct.unpack(">h", data.read(2))
my_dict['ENAB'] = ena
sub2chunklen -= 2
elif sub2chunkname == "NEGA": #only present if is to be enabled
tobj.pprint("------------ NEGA")
ena, = struct.unpack(">h", data.read(2))
if ena == 1:
my_dict['NEGA'] = ena
sub2chunklen -= 2
elif sub2chunkname == "OPAC": #only present if is to be disabled
tobj.pprint("------------ OPAC")
opa, = struct.unpack(">h", data.read(2))
s, = struct.unpack(">f", data.read(4))
envelope, env_size = read_vx(data)
my_dict['OPAC'] = opa
my_dict['OPACVAL'] = s
sub2chunklen -= 6
elif sub2chunkname == "AXIS":
tobj.pprint("------------ AXIS")
ena, = struct.unpack(">h", data.read(2))
my_dict['DISPLAXIS'] = ena
sub2chunklen -= 2
else: # Misc Chunks
tobj.pprint("------------ SURF: BLOK: IMAP: %s: skipping" % sub2chunkname)
discard = data.read(sub2chunklen)
#end loop on blok header subchunks
##############################################################
# blok attributes sub-chunk
##############################################################
subchunkname, = struct.unpack("4s", data.read(4))
subchunklen, = struct.unpack(">h", data.read(2))
accumulate_i += subchunklen + 6
if subchunkname != 'TMAP':
tobj.pprint("---------- SURF: BLOK: %s: block aborting" % subchunkname)
return {}, ""
tobj.pprint ("---------- TMAP")
i = 0
while(i < subchunklen): # -----------left 6----------------------- loop on header parameters
sub2chunkname, = struct.unpack("4s", data.read(4))
sub2chunklen, = struct.unpack(">h", data.read(2))
i = i + 6 + sub2chunklen
if sub2chunkname == "CNTR":
tobj.pprint("------------ CNTR")
x, y, z = struct.unpack(">fff", data.read(12))
envelope, env_size = read_vx(data)
my_dict['CNTR'] = [x, y, z]
sub2chunklen -= (12+env_size)
elif sub2chunkname == "SIZE":
tobj.pprint("------------ SIZE")
x, y, z = struct.unpack(">fff", data.read(12))
envelope, env_size = read_vx(data)
my_dict['SIZE'] = [x, y, z]
sub2chunklen -= (12+env_size)
elif sub2chunkname == "ROTA":
tobj.pprint("------------ ROTA")
x, y, z = struct.unpack(">fff", data.read(12))
envelope, env_size = read_vx(data)
my_dict['ROTA'] = [x, y, z]
sub2chunklen -= (12+env_size)
elif sub2chunkname == "CSYS":
tobj.pprint("------------ CSYS")
ena, = struct.unpack(">h", data.read(2))
my_dict['CSYS'] = ena
sub2chunklen -= 2
else: # Misc Chunks
tobj.pprint("------------ SURF: BLOK: TMAP: %s: skipping" % sub2chunkname)
if sub2chunklen > 0:
discard = data.read(sub2chunklen)
#end loop on blok attributes subchunks
##############################################################
# ok, now other attributes without sub_chunks
##############################################################
while(accumulate_i < lenght): # ---------------------------------- loop on header parameters: lenght has already stripped the 6 bypes header
subchunkname, = struct.unpack("4s", data.read(4))
subchunklen, = struct.unpack(">H", data.read(2))
accumulate_i = accumulate_i + 6 + subchunklen
if subchunkname == "PROJ":
tobj.pprint("---------- PROJ")
p, = struct.unpack(">h", data.read(2))
my_dict['PROJ'] = p
subchunklen -= 2
elif subchunkname == "AXIS":
tobj.pprint("---------- AXIS")
a, = struct.unpack(">h", data.read(2))
my_dict['MAJAXIS'] = a
subchunklen -= 2
elif subchunkname == "IMAG":
tobj.pprint("---------- IMAG")
i, i_size = read_vx(data)
my_dict['IMAG'] = i
subchunklen -= i_size
elif subchunkname == "WRAP":
tobj.pprint("---------- WRAP")
ww, wh = struct.unpack(">hh", data.read(4))
#reduce width and height to just 1 parameter for both
my_dict['WRAP'] = max([ww,wh])
#my_dict['WRAPWIDTH'] = ww
#my_dict['WRAPHEIGHT'] = wh
subchunklen -= 4
elif subchunkname == "WRPW":
tobj.pprint("---------- WRPW")
w, = struct.unpack(">f", data.read(4))
my_dict['WRPW'] = w
envelope, env_size = read_vx(data)
subchunklen -= (env_size+4)
elif subchunkname == "WRPH":
tobj.pprint("---------- WRPH")
w, = struct.unpack(">f", data.read(4))
my_dict['WRPH'] = w
envelope, env_size = read_vx(data)
subchunklen -= (env_size+4)
elif subchunkname == "VMAP":
tobj.pprint("---------- VMAP")
vmp, i = read_name(data)
my_dict['VMAP'] = vmp
my_uvname = vmp
subchunklen -= i
else: # Misc Chunks
tobj.pprint("---------- SURF: BLOK: %s: skipping" % subchunkname)
if subchunklen > 0:
discard = data.read(subchunklen)
#end loop on blok subchunks
return my_dict, my_uvname
# =====================
# === Read Surfaces ===
# =====================
def read_surfs(lwochunk, surf_list, tag_list):
my_dict = {}
data = cStringIO.StringIO(lwochunk.read())
surf_name, i = read_name(data)
parent_name, j = read_name(data)
i += j
if (surf_name == "") or not(surf_name in tag_list):
tobj.pprint ("Reading SURF: Actually empty surf name not allowed. Skipping")
return {}
if (parent_name != ""):
parent_index = [x['NAME'] for x in surf_list].count(parent_name)
if parent_index >0:
my_dict = surf_list[parent_index-1]
my_dict['NAME'] = surf_name
tobj.pprint ("Surface data for TAG %s" % surf_name)
while(i < lwochunk.chunksize):
subchunkname, = struct.unpack("4s", data.read(4))
subchunklen, = struct.unpack(">H", data.read(2))
i = i + 6 + subchunklen #6 bytes subchunk header
if subchunkname == "COLR": #color: mapped on color
tobj.pprint("-------- COLR")
r, g, b = struct.unpack(">fff", data.read(12))
envelope, env_size = read_vx(data)
my_dict['COLR'] = [r, g, b]
subchunklen -= (12+env_size)
elif subchunkname == "DIFF": #diffusion: mapped on reflection (diffuse shader)
tobj.pprint("-------- DIFF")
s, = struct.unpack(">f", data.read(4))
envelope, env_size = read_vx(data)
my_dict['DIFF'] = s
subchunklen -= (4+env_size)
elif subchunkname == "SPEC": #specularity: mapped to specularity (spec shader)
tobj.pprint("-------- SPEC")
s, = struct.unpack(">f", data.read(4))
envelope, env_size = read_vx(data)
my_dict['SPEC'] = s
subchunklen -= (4+env_size)
elif subchunkname == "REFL": #reflection: mapped on raymirror
tobj.pprint("-------- REFL")
s, = struct.unpack(">f", data.read(4))
envelope, env_size = read_vx(data)
my_dict['REFL'] = s
subchunklen -= (4+env_size)
elif subchunkname == "TRNL": #translucency: mapped on same param
tobj.pprint("-------- TRNL")
s, = struct.unpack(">f", data.read(4))
envelope, env_size = read_vx(data)
my_dict['TRNL'] = s
subchunklen -= (4+env_size)
elif subchunkname == "GLOS": #glossiness: mapped on specularity hardness (spec shader)
tobj.pprint("-------- GLOS")
s, = struct.unpack(">f", data.read(4))
envelope, env_size = read_vx(data)
my_dict['GLOS'] = s
subchunklen -= (4+env_size)
elif subchunkname == "TRAN": #transparency: inverted and mapped on alpha channel
tobj.pprint("-------- TRAN")
s, = struct.unpack(">f", data.read(4))
envelope, env_size = read_vx(data)
my_dict['TRAN'] = s
subchunklen -= (4+env_size)
elif subchunkname == "LUMI": #luminosity: mapped on emit channel
tobj.pprint("-------- LUMI")
s, = struct.unpack(">f", data.read(4))
envelope, env_size = read_vx(data)
my_dict['LUMI'] = s
subchunklen -= (4+env_size)
elif subchunkname == "GVAL": #glow: mapped on add channel
tobj.pprint("-------- GVAL")
s, = struct.unpack(">f", data.read(4))
envelope, env_size = read_vx(data)
my_dict['GVAL'] = s
subchunklen -= (4+env_size)
elif subchunkname == "SMAN": #smoothing angle
tobj.pprint("-------- SMAN")
s, = struct.unpack(">f", data.read(4))
my_dict['SMAN'] = s
subchunklen -= 4
elif subchunkname == "SIDE": #double sided?
tobj.pprint("-------- SIDE") #if 1 side do not define key
s, = struct.unpack(">H", data.read(2))
if s == 3:
my_dict['SIDE'] = s
subchunklen -= 2
elif subchunkname == "RIND": #Refraction: mapped on IOR
tobj.pprint("-------- RIND")
s, = struct.unpack(">f", data.read(4))
envelope, env_size = read_vx(data)
my_dict['RIND'] = s
subchunklen -= (4+env_size)
elif subchunkname == "BLOK": #blocks
tobj.pprint("-------- BLOK")
rr, uvname = read_surfblok(data.read(subchunklen))
#paranoia setting: preventing adding an empty dict
if rr != {}:
if not(my_dict.has_key('BLOK')):
my_dict['BLOK'] = [rr]
else:
my_dict['BLOK'].append(rr)
if uvname != "":
my_dict['UVNAME'] = uvname #theoretically there could be a number of them: only one used per surf
subchunklen = 0 #force ending
else: # Misc Chunks
tobj.pprint("-------- SURF:%s: skipping" % subchunkname)
if subchunklen > 0:
discard = data.read(subchunklen)
#end loop on surf chunks
if my_dict.has_key('BLOK'):
my_dict['BLOK'].reverse()
return my_dict
# ===========================================================
# === Generation Routines ===================================
# ===========================================================
# ==================================================
# === Compute vector distance between two points ===
# ==================================================
def dist_vector (head, tail): #vector from head to tail
return Blender.Mathutils.Vector([head[0] - tail[0], head[1] - tail[1], head[2] - tail[2]])
# ================
# === Find Ear ===
# ================
def find_ear(normal, list_dict, verts, face):
nv = len(list_dict['MF'])
#looping through vertexes trying to find an ear
#most likely in case of panic
mlc = 0
mla = 1
mlb = 2
for c in range(nv):
a = (c+1) % nv; b = (a+1) % nv
if list_dict['P'][a] > 0.0: #we have to start from a convex vertex
#if (list_dict['P'][a] > 0.0) and (list_dict['P'][b] <= 0.0): #we have to start from a convex vertex
mlc = c
mla = a
mlb = b
#tobj.pprint ("## mmindex: %s, %s, %s 'P': %s, %s, %s" % (c, a, b, list_dict['P'][c],list_dict['P'][a],list_dict['P'][b]))
#tobj.pprint (" ok, this one passed")
concave = 0
concave_inside = 0
for xx in range(nv): #looking for concave vertex
if (list_dict['P'][xx] <= 0.0) and (xx != b) and (xx != c): #cannot be a: it's convex
#ok, found concave vertex
concave = 1
#a, b, c, xx are all meta-meta vertex indexes
mva = list_dict['MF'][a] #meta-vertex-index
mvb = list_dict['MF'][b]
mvc = list_dict['MF'][c]
mvxx = list_dict['MF'][xx]
va = face[mva] #vertex
vb = face[mvb]
vc = face[mvc]
vxx = face[mvxx]
#Distances
d_ac_v = list_dict['D'][c]
d_ba_v = list_dict['D'][a]
d_cb_v = dist_vector(verts[vc], verts[vb])
#distance from triangle points
d_xxa_v = dist_vector(verts[vxx], verts[va])
d_xxb_v = dist_vector(verts[vxx], verts[vb])
d_xxc_v = dist_vector(verts[vxx], verts[vc])
#normals
n_xxa_v = Blender.Mathutils.CrossVecs(d_ba_v, d_xxa_v)
n_xxb_v = Blender.Mathutils.CrossVecs(d_cb_v, d_xxb_v)
n_xxc_v = Blender.Mathutils.CrossVecs(d_ac_v, d_xxc_v)
#how are oriented the normals?
p_xxa_v = Blender.Mathutils.DotVecs(normal, n_xxa_v)
p_xxb_v = Blender.Mathutils.DotVecs(normal, n_xxb_v)
p_xxc_v = Blender.Mathutils.DotVecs(normal, n_xxc_v)
#if normals are oriented all to same directions - so it is insida
if ((p_xxa_v > 0.0) and (p_xxb_v > 0.0) and (p_xxc_v > 0.0)) or ((p_xxa_v <= 0.0) and (p_xxb_v <= 0.0) and (p_xxc_v <= 0.0)):
#print "vertex %d: concave inside" % xx
concave_inside = 1
break
#endif found a concave vertex
#end loop looking for concave vertexes
if (concave == 0) or (concave_inside == 0):
#no concave vertexes in polygon (should not be): return immediately
#looped all concave vertexes and no one inside found
return [c, a, b]
#no convex vertex, try another one
#end loop to find a suitable base vertex for ear
#looped all candidate ears and find no-one suitable
tobj.pprint ("Reducing face: no valid ear found to reduce!")
return [mlc, mla, mlb] #uses most likely
# ====================
# === Reduce Faces ===
# ====================
# http://www-cgrl.cs.mcgill.ca/~godfried/teaching/cg-projects/97/Ian/cutting_ears.html per l'import
def reduce_face(verts, face):
nv = len (face)
if nv == 3: return [[0,1,2]] #trivial decomposition list
list_dict = {}
#meta-vertex indexes
list_dict['MF'] = range(nv) # these are meta-vertex-indexes
list_dict['D'] = [None] * nv
list_dict['X'] = [None] * nv
list_dict['P'] = [None] * nv
#list of distances
for mvi in list_dict['MF']:
#vector between two vertexes
mvi_hiend = (mvi+1) % nv #last-to-first
vi_hiend = face[mvi_hiend] #vertex
vi = face[mvi]
list_dict['D'][mvi] = dist_vector(verts[vi_hiend], verts[vi])
#list of cross products - normals evaluated into vertexes
for vi in range(nv):
list_dict['X'][vi] = Blender.Mathutils.CrossVecs(list_dict['D'][vi], list_dict['D'][vi-1])
my_face_normal = Blender.Mathutils.Vector([list_dict['X'][0][0], list_dict['X'][0][1], list_dict['X'][0][2]])
#list of dot products
list_dict['P'][0] = 1.0
for vi in range(1, nv):
list_dict['P'][vi] = Blender.Mathutils.DotVecs(my_face_normal, list_dict['X'][vi])
#is there at least one concave vertex?
#one_concave = reduce(lambda x, y: (x) or (y<=0.0), list_dict['P'], 0)
one_concave = reduce(lambda x, y: (x) + (y<0.0), list_dict['P'], 0)
decomposition_list = []
while 1:
if nv == 3: break
if one_concave:
#look for triangle
ct = find_ear(my_face_normal, list_dict, verts, face)
mv0 = list_dict['MF'][ct[0]] #meta-vertex-index
mv1 = list_dict['MF'][ct[1]]
mv2 = list_dict['MF'][ct[2]]
#add the triangle to output list
decomposition_list.append([mv0, mv1, mv2])
#update data structures removing remove middle vertex from list
#distances
v0 = face[mv0] #vertex
v1 = face[mv1]
v2 = face[mv2]
list_dict['D'][ct[0]] = dist_vector(verts[v2], verts[v0])
#cross products
list_dict['X'][ct[0]] = Blender.Mathutils.CrossVecs(list_dict['D'][ct[0]], list_dict['D'][ct[0]-1])
list_dict['X'][ct[2]] = Blender.Mathutils.CrossVecs(list_dict['D'][ct[2]], list_dict['D'][ct[0]])
#list of dot products
list_dict['P'][ct[0]] = Blender.Mathutils.DotVecs(my_face_normal, list_dict['X'][ct[0]])
list_dict['P'][ct[2]] = Blender.Mathutils.DotVecs(my_face_normal, list_dict['X'][ct[2]])
#physical removal
list_dict['MF'].pop(ct[1])
list_dict['D'].pop(ct[1])
list_dict['X'].pop(ct[1])
list_dict['P'].pop(ct[1])
one_concave = reduce(lambda x, y: (x) or (y<0.0), list_dict['P'], 0)
nv -=1
else: #here if no more concave vertexes
if nv == 4: break #quads only if no concave vertexes
decomposition_list.append([list_dict['MF'][0], list_dict['MF'][1], list_dict['MF'][2]])
#physical removal
list_dict['MF'].pop(1)
nv -=1
#end while there are more my_face to triangulate
decomposition_list.append(list_dict['MF'])
return decomposition_list
# =========================
# === Recalculate Faces ===
# =========================
# --------- this do the standard face + ptag_dict + uv-map recalc
def recalc_faces(verts, faces, polytag_dict, facesuv_dict):
# init local face list
my_faces = []
# init local uvface dict
my_facesuv = {}
for uvname in facesuv_dict:
my_facesuv[uvname] = []
replaced_faces_dict = {}
j = 0
if len(faces)==0:
return faces, polytag_dict, facesuv_dict
for i in range(len(faces)):
# i = index that spans on original faces
# j = index that spans on new faces
if not i%100 and my_meshtools.show_progress: Blender.Window.DrawProgressBar(float(i)/len(faces), "Recalculating faces")
numfaceverts=len(faces[i])
if numfaceverts < 4: #This face is a triangle or quad: more strict - it has to be a triangle
my_faces.append(faces[i]) #ok, leave it alone ....
for uvname in facesuv_dict:
my_facesuv[uvname].append(facesuv_dict[uvname][i])
replaced_faces_dict[i] = [j] #.... but change the nuber order of the face
j += 1
else: # Reduce n-sided convex polygon.
meta_faces = reduce_face(verts, faces[i]) # Indices of triangles.
this_faces = [] # list of triangles poly replacing original face
this_faces_index = []
for mf in meta_faces:
ll = len(mf)
if ll == 3: #triangle
this_faces.append([faces[i][mf[0]], faces[i][mf[1]], faces[i][mf[2]]])
else: #quads
this_faces.append([faces[i][mf[0]], faces[i][mf[1]], faces[i][mf[2]], faces[i][mf[3]]])
for uvname in facesuv_dict:
if ll == 3: #triangle
my_facesuv[uvname].append([facesuv_dict[uvname][i][mf[0]], facesuv_dict[uvname][i][mf[1]], facesuv_dict[uvname][i][mf[2]]])
else: #quads
my_facesuv[uvname].append([facesuv_dict[uvname][i][mf[0]], facesuv_dict[uvname][i][mf[1]], facesuv_dict[uvname][i][mf[2]], facesuv_dict[uvname][i][mf[3]]])
this_faces_index.append(j)
j +=1
my_faces.extend(this_faces)
replaced_faces_dict[i] = this_faces_index #face i substituted by this list of faces
#endif on face vertex number
#end loop on every face
#now we have the new faces list and a dictionary replacement.
#going for polygon tagging
my_ptag_dict = {}
for tag in polytag_dict: #for every tag group
my_ptag_dict[tag] = [] #rebuild a new entry
for poly in polytag_dict[tag]: #take every element of old face list
my_ptag_dict[tag].extend(replaced_faces_dict[poly]) #substitutes the element of new face list
return my_faces, my_ptag_dict, my_facesuv
# ========================================
# === Revert list keeping first vertex ===
# ========================================
def revert (llist):
#different flavors: the reverse one is the one that works better
#rhead = [llist[0]]
#rtail = llist[1:]
#rhead.extend(rtail)
#return rhead
#--------------
rhead=copy.deepcopy(llist)
rhead.reverse()
return rhead
#--------------
#return llist
# ====================================
# === Modified Create Blender Mesh ===
# ====================================
def my_create_mesh(complete_vertlist, complete_facelist, current_facelist, objname, not_used_faces):
#take the needed faces and update the not-used face list
vertex_map = [-1] * len(complete_vertlist)
cur_ptag_faces = []
for ff in current_facelist:
cur_face = complete_facelist[ff]
cur_ptag_faces.append(cur_face)
if not_used_faces != []: not_used_faces[ff] = -1
for vv in cur_face:
vertex_map[vv] = 1
#end loop on vertex on this face
#end loop on faces
mesh = Blender.NMesh.GetRaw()
#append vertexes
jj = 0
for i in range(len(complete_vertlist)):
if vertex_map[i] == 1:
if not i%100 and my_meshtools.show_progress: Blender.Window.DrawProgressBar(float(i)/len(complete_vertlist), "Generating Verts")
x, y, z = complete_vertlist[i]
mesh.verts.append(Blender.NMesh.Vert(x, y, z))
vertex_map[i] = jj
jj += 1
#end sweep over vertexes
#append faces
for i in range(len(cur_ptag_faces)):
if not i%100 and my_meshtools.show_progress: Blender.Window.DrawProgressBar(float(i)/len(cur_ptag_faces), "Generating Faces")
face = Blender.NMesh.Face()
rev_face = revert(cur_ptag_faces[i])
for vi in rev_face:
#for vi in cur_ptag_faces[i]:
index = vertex_map[vi]
face.v.append(mesh.verts[index])
#end sweep over vertexes
mesh.faces.append(face)
#end sweep over faces
if not my_meshtools.overwrite_mesh_name:
objname = my_meshtools.versioned_name(objname)
Blender.NMesh.PutRaw(mesh, objname) # Name the Mesh
obj = Blender.Object.GetSelected()[0]
obj.name=objname # Name the Object
Blender.Redraw()
return obj, not_used_faces #return the created object
# ============================================
# === Set Subsurf attributes on given mesh ===
# ============================================
def set_subsurf(obj):
msh = obj.getData()
msh.setSubDivLevels([2, 2])
msh.mode |= Blender.NMesh.Modes.SUBSURF
msh.update(1)
obj.makeDisplayList()
return
# =================================
# === object size and dimension ===
# =================================
def obj_size_pos(obj):
bbox = obj.getBoundBox()
bbox_min = map(lambda *row: min(row), *bbox) #transpose & get min
bbox_max = map(lambda *row: max(row), *bbox) #transpose & get max
obj_size = (bbox_max[0]-bbox_min[0], bbox_max[1]-bbox_min[1], bbox_max[2]-bbox_min[2])
obj_pos = ( (bbox_max[0]+bbox_min[0]) / 2, (bbox_max[1]+bbox_min[1]) / 2, (bbox_max[2]+bbox_min[2]) / 2)
return (obj_size, obj_pos)
# =========================
# === Create the object ===
# =========================
def create_objects(objspec_list):
nf = len(objspec_list[3])
not_used_faces = range(nf)
ptag_dict = objspec_list[5]
obj_dict = {} #links tag names to object, used for material assignments
obj_dim_dict = {}
obj_list = [] #have it handy for parent association
middlechar = "+"
endchar = ""
if (objspec_list[6] == 1):
middlechar = endchar = "#"
for cur_tag in ptag_dict.keys():
if ptag_dict[cur_tag] != []:
cur_obj, not_used_faces= my_create_mesh(objspec_list[2], objspec_list[3], ptag_dict[cur_tag], objspec_list[0][:9]+middlechar+cur_tag[:9], not_used_faces)
if objspec_list[6] == 1:
set_subsurf(cur_obj)
obj_dict[cur_tag] = cur_obj
obj_dim_dict[cur_tag] = obj_size_pos(cur_obj)
obj_list.append(cur_obj)
#end loop on current group
#and what if some faces not used in any named PTAG? get rid of unused faces
for ff in range(nf):
tt = nf-1-ff #reverse order
if not_used_faces[tt] == -1:
not_used_faces.pop(tt)
#end sweep on unused face list
if not_used_faces != []:
cur_obj, not_used_faces = my_create_mesh(objspec_list[2], objspec_list[3], not_used_faces, objspec_list[0][:9]+middlechar+"lone", [])
#my_meshtools.create_mesh(objspec_list[2], not_used_faces, "_unk") #vert, faces, name
#cur_obj = Blender.Object.GetSelected()[0]
if objspec_list[6] == 1:
set_subsurf(cur_obj)
obj_dict["lone"] = cur_obj
obj_dim_dict["lone"] = obj_size_pos(cur_obj)
obj_list.append(cur_obj)
objspec_list[1] = obj_dict
objspec_list[4] = obj_dim_dict
scene = Blender.Scene.getCurrent () # get the current scene
ob = Blender.Object.New ('Empty', objspec_list[0]+endchar) # make empty object
scene.link (ob) # link the object into the scene
ob.makeParent(obj_list, 1, 0) # set the root for created objects (no inverse, update scene hyerarchy (slow))
Blender.Redraw()
return
# =====================
# === Load an image ===
# =====================
#extensively search for image name
def load_image(dir_part, name):
img = None
nname = Blender.sys.splitext(name)
lname = [c.lower() for c in nname]
ext_list = []
if lname[1] != nname[1]:
ext_list.append(lname[1])
ext_list.extend(['.tga', '.png', '.jpg', '.gif', '.bmp']) #order from best to worst (personal judgement) bmp last cause of nasty bug
#first round: original "case"
current = Blender.sys.join(dir_part, name)
name_list = [current]
name_list.extend([Blender.sys.makename(current, ext) for ext in ext_list])
#second round: lower "case"
if lname[0] != nname[0]:
current = Blender.sys.join(dir_part, lname[0])
name_list.extend([Blender.sys.makename(current, ext) for ext in ext_list])
for nn in name_list:
if Blender.sys.exists(nn) == 1:
break
try:
img = Blender.Image.Load(nn)
return img
except IOError:
return None
# ===========================================
# === Lookup for image index in clip_list ===
# ===========================================
def lookup_imag(clip_list,ima_id):
for ii in clip_list:
if ii['ID'] == ima_id:
if ii.has_key('XREF'):
#cross reference - recursively look for images
return lookup_imag(clip_list, ii['XREF'])
else:
return ii
return None
# ===================================================
# === Create and assign image mapping to material ===
# ===================================================
def create_blok(surf, mat, clip_list, dir_part, obj_size, obj_pos):
def output_size_ofs(size, pos, blok):
#just automate repetitive task
c_map = [0,1,2]
c_map_txt = [" X--", " -Y-", " --Z"]
if blok['MAJAXIS'] == 0:
c_map = [1,2,0]
if blok['MAJAXIS'] == 2:
c_map = [0,2,1]
tobj.pprint ("!!!axis mapping:")
for mp in c_map: tobj.pprint (c_map_txt[mp])
s = ["1.0 (Forced)"] * 3
o = ["0.0 (Forced)"] * 3
if blok['SIZE'][0] > 0.0: #paranoia controls
s[0] = "%.5f" % (size[0]/blok['SIZE'][0])
o[0] = "%.5f" % ((blok['CNTR'][0]-pos[0])/blok['SIZE'][0])
if blok['SIZE'][1] > 0.0:
s[2] = "%.5f" % (size[2]/blok['SIZE'][1])
o[2] = "%.5f" % ((blok['CNTR'][1]-pos[2])/blok['SIZE'][1])
if blok['SIZE'][2] > 0.0:
s[1] = "%.5f" % (size[1]/blok['SIZE'][2])
o[1] = "%.5f" % ((blok['CNTR'][2]-pos[1])/blok['SIZE'][2])
tobj.pprint ("!!!texture size and offsets:")
tobj.pprint (" sizeX = %s; sizeY = %s; sizeZ = %s" % (s[c_map[0]], s[c_map[1]], s[c_map[2]]))
tobj.pprint (" ofsX = %s; ofsY = %s; ofsZ = %s" % (o[c_map[0]], o[c_map[1]], o[c_map[2]]))
return
ti = 0
for blok in surf['BLOK']:
tobj.pprint ("#...................................................................#")
tobj.pprint ("# Processing texture block no.%s for surf %s" % (ti,surf['NAME']))
tobj.pprint ("#...................................................................#")
tobj.pdict (blok)
if ti > 9: break #only 8 channels 0..7 allowed for texture mapping
if not blok['ENAB']:
tobj.pprint ( "***Image is not ENABled! Quitting this block")
break
if not(blok.has_key('IMAG')):
tobj.pprint ( "***No IMAGe for this block? Quitting")
break #extract out the image index within the clip_list
tobj.pprint ("looking for image number %d" % blok['IMAG'])
ima = lookup_imag(clip_list, blok['IMAG'])
if ima == None:
tobj.pprint ( "***Block index image not within CLIP list? Quitting Block")
break #safety check (paranoia setting)
#look for images
img = load_image("",ima['NAME'])
if img == None:
tobj.pprint ( "***No image %s found: trying LWO file subdir" % ima['NAME'])
img = load_image(dir_part,ima['BASENAME'])
if img == None:
tobj.pprint ( "***No image %s found in directory %s: trying Images subdir" % (ima['BASENAME'], dir_part))
img = load_image(dir_part+Blender.sys.sep+"Images",ima['BASENAME'])
if img == None:
tobj.pprint ( "***No image %s found: trying alternate Images subdir" % ima['BASENAME'])
img = load_image(dir_part+Blender.sys.sep+".."+Blender.sys.sep+"Images",ima['BASENAME'])
if img == None:
tobj.pprint ( "***No image %s found: giving up" % ima['BASENAME'])
break
#lucky we are: we have an image
tname = str(ima['ID'])
if blok.has_key('CHAN'):
tname = tname + "+" + blok['CHAN']
newtex = Blender.Texture.New(tname)
newtex.setType('Image') # make it an image texture
newtex.image = img
#how does it extends beyond borders
if blok.has_key('WRAP'):
if (blok['WRAP'] == 3) or (blok['WRAP'] == 2):
newtex.setExtend('Extend')
elif (blok['WRAP'] == 1):
newtex.setExtend('Repeat')
elif (blok['WRAP'] == 0):
newtex.setExtend('Clip')
tobj.pprint ("generated texture %s" % tname)
blendmode_list = ['Mix',
'Subtractive',
'Difference',
'Multiply',
'Divide',
'Mix (CalcAlpha already set; try setting Stencil!',
'Texture Displacement',
'Additive']
set_blendmode = 7 #default additive
if blok.has_key('OPAC'):
set_blendmode = blok['OPAC']
if set_blendmode == 5: #transparency
newtex.imageFlags |= Blender.Texture.ImageFlags.CALCALPHA
tobj.pprint ("!!!Set Texture -> MapTo -> Blending Mode = %s" % blendmode_list[set_blendmode])
set_dvar = 1.0
if blok.has_key('OPACVAL'):
set_dvar = blok['OPACVAL']
#MapTo is determined by CHAN parameter
mapflag = Blender.Texture.MapTo.COL #default to color
if blok.has_key('CHAN'):
if blok['CHAN'] == 'COLR':
tobj.pprint ("!!!Set Texture -> MapTo -> Col = %.3f" % set_dvar)
if set_blendmode == 0:
surf['g_IM'] = img #do not set anything, just save image object for later assignment
if blok['CHAN'] == 'BUMP':
mapflag = Blender.Texture.MapTo.NOR
tobj.pprint ("!!!Set Texture -> MapTo -> Nor = %.3f" % set_dvar)
if blok['CHAN'] == 'LUMI':
mapflag = Blender.Texture.MapTo.EMIT
tobj.pprint ("!!!Set Texture -> MapTo -> DVar = %.3f" % set_dvar)
if blok['CHAN'] == 'DIFF':
mapflag = Blender.Texture.MapTo.REF
tobj.pprint ("!!!Set Texture -> MapTo -> DVar = %.3f" % set_dvar)
if blok['CHAN'] == 'SPEC':
mapflag = Blender.Texture.MapTo.SPEC
tobj.pprint ("!!!Set Texture -> MapTo -> DVar = %.3f" % set_dvar)
if blok['CHAN'] == 'TRAN':
mapflag = Blender.Texture.MapTo.ALPHA
tobj.pprint ("!!!Set Texture -> MapTo -> DVar = %.3f" % set_dvar)
if blok.has_key('NEGA'):
tobj.pprint ("!!!Watch-out: effect of this texture channel must be INVERTED!")
#the TexCo flag is determined by PROJ parameter
if blok.has_key('PROJ'):
if blok['PROJ'] == 0: #0 - Planar
tobj.pprint ("!!!Flat projection")
coordflag = Blender.Texture.TexCo.ORCO
output_size_ofs(obj_size, obj_pos, blok)
elif blok['PROJ'] == 1: #1 - Cylindrical
tobj.pprint ("!!!Cylindrical projection")
coordflag = Blender.Texture.TexCo.ORCO
output_size_ofs(obj_size, obj_pos, blok)
elif blok['PROJ'] == 2: #2 - Spherical
tobj.pprint ("!!!Spherical projection")
coordflag = Blender.Texture.TexCo.ORCO
output_size_ofs(obj_size, obj_pos, blok)
elif blok['PROJ'] == 3: #3 - Cubic
tobj.pprint ("!!!Cubic projection")
coordflag = Blender.Texture.TexCo.ORCO
output_size_ofs(obj_size, obj_pos, blok)
elif blok['PROJ'] == 4: #4 - Front Projection
tobj.pprint ("!!!Front projection")
coordflag = Blender.Texture.TexCo.ORCO
output_size_ofs(obj_size, obj_pos, blok)
elif blok['PROJ'] == 5: #5 - UV
tobj.pprint ("UVMapped")
coordflag = Blender.Texture.TexCo.UV
mat.setTexture(ti, newtex, coordflag, mapflag)
ti += 1
#end loop over bloks
return
# ========================================
# === Create and assign a new material ===
# ========================================
#def create_material(surf_list, ptag_dict, obj, clip_list, uv_dict, dir_part):
def create_material(clip_list, surf_list, objspec, dir_part):
if (surf_list == []) or (objspec[5] == {}) or (objspec[1] == {}):
tobj.pprint( surf_list)
tobj.pprint( objspec[5])
tobj.pprint( objspec[1])
tobj.pprint( "something getting wrong in create_material ...")
return
obj_dict = objspec[1]
obj_dim_dict = objspec[4]
ptag_dict = objspec[5]
uvcoords_dict = objspec[7]
facesuv_dict = objspec[8]
for surf in surf_list:
if (surf['NAME'] in ptag_dict.keys()):
tobj.pprint ("#-------------------------------------------------------------------#")
tobj.pprint ("Processing surface (material): %s" % surf['NAME'])
tobj.pprint ("#-------------------------------------------------------------------#")
#material set up
facelist = ptag_dict[surf['NAME']]
#bounding box and position
cur_obj = obj_dict[surf['NAME']]
obj_size = obj_dim_dict[surf['NAME']][0]
obj_pos = obj_dim_dict[surf['NAME']][1]
tobj.pprint(surf)
mat = Blender.Material.New(surf['NAME'])
if surf.has_key('COLR'):
mat.rgbCol = surf['COLR']
if surf.has_key('LUMI'):
mat.setEmit(surf['LUMI'])
if surf.has_key('GVAL'):
mat.setAdd(surf['GVAL'])
if surf.has_key('SPEC'):
mat.setSpec(surf['SPEC']) #it should be * 2 but seems to be a bit higher lwo [0.0, 1.0] - blender [0.0, 2.0]
if surf.has_key('DIFF'):
mat.setRef(surf['DIFF']) #lwo [0.0, 1.0] - blender [0.0, 1.0]
if surf.has_key('REFL'):
mat.setRayMirr(surf['REFL']) #lwo [0.0, 1.0] - blender [0.0, 1.0]
#mat.setMode('RAYMIRROR')
mat.mode |= Blender.Material.Modes.RAYMIRROR
#WARNING translucency not implemented yet check 2.36 API
#if surf.has_key('TRNL'):
#
if surf.has_key('GLOS'): #lwo [0.0, 1.0] - blender [0, 255]
glo = int(371.67 * surf['GLOS'] - 42.334) #linear mapping - seems to work better than exp mapping
if glo <32: glo = 32 #clamped to 32-255
if glo >255: glo = 255
mat.setHardness(glo)
if surf.has_key('TRAN'):
mat.setAlpha(1.0-surf['TRAN']) #lwo [0.0, 1.0] - blender [1.0, 0.0]
mat.mode |= Blender.Material.Modes.RAYTRANSP
if surf.has_key('RIND'):
s = surf['RIND']
if s < 1.0: s = 1.0
if s > 3.0: s = 3.0
mat.setIOR(s) #clipped to blender [1.0, 3.0]
mat.mode |= Blender.Material.Modes.RAYTRANSP
if surf.has_key('BLOK') and surf['BLOK'] != []:
#update the material according to texture.
create_blok(surf, mat, clip_list, dir_part, obj_size, obj_pos)
#finished setting up the material
#associate material to mesh
msh = cur_obj.getData()
mat_index = len(msh.getMaterials(1))
msh.addMaterial(mat)
msh.mode |= Blender.NMesh.Modes.AUTOSMOOTH #smooth it anyway
msh.update(1)
for f in range(len(msh.faces)):
msh.faces[f].materialIndex = mat_index
msh.faces[f].smooth = 1 #smooth it anyway
msh.faces[f].mode |= Blender.NMesh.FaceModes.TWOSIDE #set it anyway
msh.faces[f].transp = Blender.NMesh.FaceTranspModes['SOLID']
msh.faces[f].flag = Blender.NMesh.FaceTranspModes['SOLID']
if surf.has_key('SMAN'):
#not allowed mixed mode mesh (all the mesh is smoothed and all with the same angle)
#only one smoothing angle will be active! => take the max one
s = int(surf['SMAN']/3.1415926535897932384626433832795*180.0) #lwo in radians - blender in degrees
if msh.getMaxSmoothAngle() < s: msh.setMaxSmoothAngle(s)
#if surf.has_key('SIDE'):
# msh.faces[f].mode |= Blender.NMesh.FaceModes.TWOSIDE #set it anyway
if surf.has_key('TRAN') and mat.getAlpha()<1.0:
msh.faces[f].transp = Blender.NMesh.FaceTranspModes['ALPHA']
if surf.has_key('UVNAME') and facesuv_dict.has_key(surf['UVNAME']):
#assign uv-data
msh.hasFaceUV(1)
#WARNING every block could have its own uvmap set of coordinate. take only the first one
facesuv_list = facesuv_dict[surf['UVNAME']]
#print "facesuv_list: ",f , facelist[f]
rev_face = revert(facesuv_list[facelist[f]])
for vi in rev_face:
msh.faces[f].uv.append(uvcoords_dict[surf['UVNAME']][vi])
if surf.has_key('g_IM'):
msh.faces[f].mode |= Blender.NMesh.FaceModes['TEX']
msh.faces[f].image = surf['g_IM']
#end loop over faces
msh.update(1)
mat_index += 1
#end if exist faces ib this object belonging to surf
#end loop on surfaces
return
# ======================
# === Read Faces 6.0 ===
# ======================
def read_faces_6(lwochunk):
data = cStringIO.StringIO(lwochunk.read())
faces = []
polygon_type = data.read(4)
subsurf = 0
if polygon_type != "FACE" and polygon_type != "PTCH":
tobj.pprint("No FACE/PATCH Were Found. Polygon Type: %s" % polygon_type)
return "", 2
if polygon_type == 'PTCH': subsurf = 1
i = 0
while(i < lwochunk.chunksize-4):
if not i%100 and my_meshtools.show_progress:
Blender.Window.DrawProgressBar(float(i)/lwochunk.chunksize, "Reading Faces")
facev = []
numfaceverts, = struct.unpack(">H", data.read(2))
i += 2
for j in range(numfaceverts):
index, index_size = read_vx(data)
i += index_size
facev.append(index)
faces.append(facev)
tobj.pprint("read %s faces; type of block %d (0=FACE; 1=PATCH)" % (len(faces), subsurf))
return faces, subsurf
# ===========================================================
# === Start the show and main callback ======================
# ===========================================================
def fs_callback(filename):
read(filename)
Blender.Window.FileSelector(fs_callback, "Import LWO")
|