Skip to content

Commit b17f849

Browse files
committed
updated for latest
1 parent 354c383 commit b17f849

File tree

4 files changed

+109
-24
lines changed

4 files changed

+109
-24
lines changed

bindings/python/hako_binary/binary_io.py

Lines changed: 54 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -40,9 +40,20 @@ def binTofloat32(binary):
4040
def binTofloat64(binary):
4141
return struct.unpack('d', binary)[0]
4242

43-
def binTostring(binary):
44-
return binary.decode().replace('\0', '')
45-
#return "Not Supported.."
43+
def binTostring(binary, max_len=128):
44+
try:
45+
sub = binary[:max_len]
46+
end = sub.find(b'\0')
47+
if end == -1:
48+
end = max_len
49+
return sub[:end].decode('utf-8')
50+
except UnicodeDecodeError as e:
51+
print("UnicodeDecodeError:")
52+
print(f" ERROR: {e}")
53+
print(f" RAW DATA: {binary}")
54+
print(f" HEXDUMP: {' '.join(f'{b:02x}' for b in binary)}")
55+
raise e
56+
4657

4758

4859
def int8Tobin(arg):
@@ -163,6 +174,46 @@ def binToArrayValues(type, arg):
163174
else:
164175
return None
165176

177+
def typeTobin_array(type, values, elm_size=None):
178+
count = len(values)
179+
if type == "int8":
180+
return struct.pack(f'<{count}b', *values)
181+
elif type == "uint8":
182+
return struct.pack(f'<{count}B', *values)
183+
elif type == "int16":
184+
return struct.pack(f'<{count}h', *values)
185+
elif type == "uint16":
186+
return struct.pack(f'<{count}H', *values)
187+
elif type == "int32":
188+
return struct.pack(f'<{count}i', *values)
189+
elif type == "bool":
190+
return struct.pack(f'<{count}i', *values)
191+
elif type == "uint32":
192+
return struct.pack(f'<{count}I', *values)
193+
elif type == "int64":
194+
return struct.pack(f'<{count}q', *values)
195+
elif type == "uint64":
196+
return struct.pack(f'<{count}Q', *values)
197+
elif type == "float32":
198+
return struct.pack(f'<{count}f', *values)
199+
elif type == "float64":
200+
return struct.pack(f'<{count}d', *values)
201+
elif type == "string":
202+
if elm_size is None:
203+
raise ValueError("elm_size required for string array")
204+
binaries = []
205+
for val in values:
206+
raw = val.encode('utf-8') + b'\x00'
207+
if len(raw) > elm_size:
208+
raw = raw[:elm_size]
209+
buffer = bytearray(elm_size)
210+
buffer[:len(raw)] = raw
211+
binaries.append(buffer)
212+
return b''.join(binaries)
213+
else:
214+
raise ValueError(f"typeTobin_array: Unsupported type {type}")
215+
216+
166217
def writeBinary(binary_data, off, bin):
167218
i = 0
168219
for data in bin:

bindings/python/hako_binary/binary_reader.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,8 @@
44
import sys
55
import base64
66

7-
from hako_binary import binary_io
8-
from hako_binary import offset_parser
7+
from . import binary_io
8+
from . import offset_parser
99

1010
def decode_base64(data):
1111
return base64.b64decode(data)
@@ -31,9 +31,11 @@ def binary_read_recursive(meta: binary_io.PduMetaData, offmap, binary_data, json
3131
type = offset_parser.member_type(line)
3232
name = offset_parser.member_name(line)
3333
size = offset_parser.member_size(line)
34+
#print(f"binary_read_recursive: {name} off={off} size={size} type={type}")
3435
if (offset_parser.is_primitive(line)):
3536
if (offset_parser.is_single(line)):
3637
bin = binary_io.readBinary(binary_data, off, size)
38+
#print(f"binary_read_recursive: {name} off={off} size={size} type={type} bin={bin}")
3739
value = binary_io.binTovalue(type, bin)
3840
json_data[name] = value
3941
elif (offset_parser.is_array(line)):
@@ -44,6 +46,7 @@ def binary_read_recursive(meta: binary_io.PduMetaData, offmap, binary_data, json
4446
array_size = binary_io.binTovalue("int32", binary_io.readBinary(binary_data, off, 4))
4547
offset_from_heap = binary_io.binTovalue("int32", binary_io.readBinary(binary_data, off + 4, 4))
4648
one_elm_size = size
49+
#print(f"binary_read_recursive: {name} off={off} size={size} type={type} array_size={array_size} offset_from_heap={offset_from_heap}")
4750
array_value = binary_io.readBinary(binary_data, meta.heap_off + offset_from_heap, one_elm_size * array_size)
4851
json_data[name + '__raw' ] = array_value
4952
json_data[name] = binary_io.binToArrayValues(type, array_value)

bindings/python/hako_binary/binary_writer.py

Lines changed: 33 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,8 @@
33
import json
44
import sys
55

6-
from hako_binary import binary_io
7-
from hako_binary import offset_parser
6+
from . import binary_io
7+
from . import offset_parser
88

99
class DynamicAllocator:
1010
def __init__(self, is_heap: bool):
@@ -13,16 +13,14 @@ def __init__(self, is_heap: bool):
1313
self.is_heap = is_heap
1414

1515
def add(self, bytes_data, expected_offset=None, key=None):
16-
if self.is_heap == False:
17-
if expected_offset is not None:
18-
current_size = len(self.data)
19-
if current_size < expected_offset:
20-
padding = bytearray(expected_offset - current_size)
21-
self.data.extend(padding)
22-
16+
current_size = len(self.data)
17+
#print(f"is_heap: {self.is_heap} current_size: {current_size} expected_offset: {expected_offset} len(bytes_data): {len(bytes_data)}")
18+
if (current_size < expected_offset):
19+
padding = bytearray(expected_offset - current_size)
20+
self.data.extend(padding)
2321
offset = len(self.data)
2422
self.data.extend(bytes_data)
25-
23+
#print(f"add: {bytes_data} offset: {offset} len(self.data): {len(self.data)}")
2624
if key:
2725
self.offset_map[key] = offset
2826

@@ -84,6 +82,7 @@ def binary_write_recursive(parent_off: int, bw_container: BinaryWriterContainer,
8482
continue
8583
type = offset_parser.member_type(line)
8684
off = offset_parser.member_off(line)
85+
#print(f"key: {key} type: {type} off: {off} line: {line}")
8786
if offset_parser.is_primitive(line):
8887
if offset_parser.is_single(line):
8988
bin = binary_io.typeTobin(type, json_data[key])
@@ -94,15 +93,25 @@ def binary_write_recursive(parent_off: int, bw_container: BinaryWriterContainer,
9493
elm_size = offset_parser.member_size(line)
9594
array_size = offset_parser.array_size(line)
9695
one_elm_size = int(elm_size / array_size)
97-
for i, elm in enumerate(json_data[key]):
98-
bin = binary_io.typeTobin(type, elm)
99-
bin = get_binary(type, bin, one_elm_size)
100-
allocator.add(bin, expected_offset=(parent_off + off + i * one_elm_size))
96+
#for i, elm in enumerate(json_data[key]):
97+
# bin = binary_io.typeTobin(type, elm)
98+
# bin = get_binary(type, bin, one_elm_size)
99+
# allocator.add(bin, expected_offset=(parent_off + off + i * one_elm_size))
100+
binary = binary_io.typeTobin_array(type, json_data[key], one_elm_size)
101+
allocator.add(binary, expected_offset=(parent_off + off))
101102
else: # varray
102-
for i, elm in enumerate(json_data[key]):
103-
bin = binary_io.typeTobin(type, elm)
104-
bin = get_binary(type, bin, offset_parser.member_size(line))
105-
bw_container.heap_allocator.add(bin, expected_offset=(off + i * offset_parser.member_size(line)))
103+
offset_from_heap = bw_container.heap_allocator.size()
104+
array_size = len(json_data[key])
105+
#print(f"varray: {key} array_size: {array_size} offset_from_heap: {offset_from_heap}")
106+
#for i, elm in enumerate(json_data[key]):
107+
# bin = binary_io.typeTobin(type, elm)
108+
# bin = get_binary(type, bin, offset_parser.member_size(line))
109+
# bw_container.heap_allocator.add(bin, expected_offset=0)
110+
binary = binary_io.typeTobin_array(type, json_data[key], offset_parser.member_size(line))
111+
bw_container.heap_allocator.add(binary, expected_offset=0)
112+
a_b = array_size.to_bytes(4, byteorder='little')
113+
o_b = offset_from_heap.to_bytes(4, byteorder='little')
114+
allocator.add(a_b + o_b, expected_offset=parent_off + off)
106115
else:
107116
if offset_parser.is_single(line):
108117
binary_write_recursive(parent_off + off, bw_container, offmap, allocator, json_data[key], type)
@@ -113,5 +122,11 @@ def binary_write_recursive(parent_off: int, bw_container: BinaryWriterContainer,
113122
one_elm_size = int(elm_size / array_size)
114123
binary_write_recursive((parent_off + off + i * one_elm_size), bw_container, offmap, allocator, elm, type)
115124
else: # varray
125+
offset_from_heap = bw_container.heap_allocator.size()
126+
array_size = len(json_data[key])
116127
for i, elm in enumerate(json_data[key]):
117128
binary_write_recursive(0, bw_container, offmap, bw_container.heap_allocator, elm, type)
129+
a_b = array_size.to_bytes(4, byteorder='little')
130+
o_b = offset_from_heap.to_bytes(4, byteorder='little')
131+
allocator.add(a_b + o_b, expected_offset=parent_off + off)
132+

bindings/python/hako_binary/offset_map.py

Lines changed: 17 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
import glob
44
import json
55
import sys
6-
from hako_binary import offset_parser
6+
from . import offset_parser
77

88
class OffsetMap:
99
def __init__(self, offset_path):
@@ -17,6 +17,22 @@ def get(self, typename):
1717
self.map[typename] = lines
1818
return self.map[typename]
1919

20+
def align8(self, value):
21+
return ((value + 7) // 8) * 8
22+
23+
def get_pdu_size(self, typename):
24+
lines = self.get(typename)
25+
size = 0
26+
last_line = lines[-1]
27+
last_offset = offset_parser.member_off(last_line)
28+
if (offset_parser.is_varray(last_line)):
29+
last_size = 8 # len + off
30+
else:
31+
last_size = offset_parser.member_size(last_line)
32+
33+
size = self.align8(last_offset + last_size + 8)
34+
return size
35+
2036
def find_filepath(self, path, filename):
2137
f_array = filename.split('/')
2238
if (len(f_array) > 1):

0 commit comments

Comments
 (0)