|
5 | 5 |
|
6 | 6 | from hako_binary import binary_io
|
7 | 7 | from hako_binary import offset_parser
|
8 |
| -from hako_binary import offset_map |
| 8 | + |
| 9 | +class DynamicAllocator: |
| 10 | + def __init__(self): |
| 11 | + self.data = bytearray() |
| 12 | + self.offset_map = {} |
| 13 | + |
| 14 | + def add(self, bytes_data, expected_offset=None, key=None): |
| 15 | + if expected_offset is not None: |
| 16 | + current_size = len(self.data) |
| 17 | + if current_size < expected_offset: |
| 18 | + padding = bytearray(expected_offset - current_size) |
| 19 | + self.data.extend(padding) |
| 20 | + |
| 21 | + offset = len(self.data) |
| 22 | + self.data.extend(bytes_data) |
| 23 | + |
| 24 | + if key: |
| 25 | + self.offset_map[key] = offset |
| 26 | + |
| 27 | + return offset |
| 28 | + |
| 29 | + def to_array(self): |
| 30 | + return self.data |
| 31 | + |
| 32 | + def size(self): |
| 33 | + return len(self.data) |
| 34 | + |
| 35 | + def get_offset(self, key): |
| 36 | + return self.offset_map.get(key, None) |
| 37 | + |
| 38 | +class BinaryWriterContainer: |
| 39 | + def __init__(self): |
| 40 | + self.heap_allocator = DynamicAllocator() |
| 41 | + self.meta = binary_io.PduMetaData() |
| 42 | + self.meta.set_empty() |
9 | 43 |
|
10 | 44 | def binary_write(offmap, binary_data, json_data, typename):
|
11 |
| - binary_write_recursive(offmap, binary_data, json_data, 0, typename) |
| 45 | + base_allocator = DynamicAllocator() |
| 46 | + bw_container = BinaryWriterContainer() |
| 47 | + binary_write_recursive(bw_container, offmap, base_allocator, json_data, typename) |
| 48 | + |
| 49 | + # メタデータの設定 |
| 50 | + total_size = base_allocator.size() + bw_container.heap_allocator.size() + binary_io.PduMetaData.PDU_META_DATA_SIZE |
| 51 | + bw_container.meta.total_size = total_size |
| 52 | + bw_container.meta.heap_off = binary_io.PduMetaData.PDU_META_DATA_SIZE + base_allocator.size() |
| 53 | + |
| 54 | + # binary_data のサイズを total_size に調整 |
| 55 | + if len(binary_data) < total_size: |
| 56 | + binary_data.extend(bytearray(total_size - len(binary_data))) |
| 57 | + elif len(binary_data) > total_size: |
| 58 | + del binary_data[total_size:] |
| 59 | + |
| 60 | + # メタデータをバッファにコピー |
| 61 | + binary_io.writeBinary(binary_data, 0, bw_container.meta.to_bytes()) |
12 | 62 |
|
13 |
| -def binary_write_recursive(offmap, binary_data, json_data, base_off, typename): |
14 |
| - #lines = offmap[typename] |
| 63 | + # 基本データをバッファにコピー |
| 64 | + binary_io.writeBinary(binary_data, bw_container.meta.base_off, base_allocator.to_array()) |
| 65 | + |
| 66 | + # ヒープデータをバッファにコピー |
| 67 | + binary_io.writeBinary(binary_data, bw_container.meta.heap_off, bw_container.heap_allocator.to_array()) |
| 68 | + |
| 69 | +def get_binary(type, bin, elm_size): |
| 70 | + if type == "string": |
| 71 | + buffer = bytearray(elm_size) |
| 72 | + buffer[:len(bin)] = bin |
| 73 | + return buffer |
| 74 | + else: |
| 75 | + return bin |
| 76 | + |
| 77 | +def binary_write_recursive(bw_container: BinaryWriterContainer, offmap, allocator, json_data, typename): |
15 | 78 | lines = offmap.get(typename)
|
16 | 79 | for key in json_data:
|
17 | 80 | line = offset_parser.select_by_name(lines, key)
|
18 | 81 | if line is None:
|
19 | 82 | continue
|
20 |
| - off = offset_parser.member_off(line) + base_off |
21 | 83 | type = offset_parser.member_type(line)
|
22 |
| - if (offset_parser.is_primitive(line)): |
23 |
| - if (offset_parser.is_single(line)): |
| 84 | + off = offset_parser.member_off(line) |
| 85 | + if offset_parser.is_primitive(line): |
| 86 | + if offset_parser.is_single(line): |
24 | 87 | bin = binary_io.typeTobin(type, json_data[key])
|
25 |
| - binary_io.writeBinary(binary_data, off, bin) |
26 |
| - else: |
27 |
| - i = 0 |
| 88 | + bin = get_binary(type, bin, offset_parser.member_size(line)) |
| 89 | + # print(f"{type} {key} = {json_data[key]} : bin: {bin} size: {offset_parser.member_size(line)} bin_size: {len(bin)}") |
| 90 | + allocator.add(bin, expected_offset=off) |
| 91 | + elif offset_parser.is_array(line): |
28 | 92 | elm_size = offset_parser.member_size(line)
|
29 | 93 | array_size = offset_parser.array_size(line)
|
30 | 94 | one_elm_size = int(elm_size / array_size)
|
31 |
| - for elm in json_data[key]: |
| 95 | + for i, elm in enumerate(json_data[key]): |
| 96 | + bin = binary_io.typeTobin(type, elm) |
| 97 | + bin = get_binary(type, bin, one_elm_size) |
| 98 | + allocator.add(bin, expected_offset=(off + i * one_elm_size)) |
| 99 | + else: # varray |
| 100 | + for i, elm in enumerate(json_data[key]): |
32 | 101 | bin = binary_io.typeTobin(type, elm)
|
33 |
| - binary_io.writeBinary(binary_data, off + (i * one_elm_size), bin) |
34 |
| - i = i + 1 |
| 102 | + bin = get_binary(type, bin, offset_parser.member_size(line)) |
| 103 | + bw_container.heap_allocator.add(bin, expected_offset=(off + i * offset_parser.member_size(line))) |
35 | 104 | else:
|
36 |
| - if (offset_parser.is_single(line)): |
37 |
| - binary_write_recursive(offmap, binary_data, json_data[key], off, type) |
38 |
| - else: |
39 |
| - i = 0 |
40 |
| - elm_size = offset_parser.member_size(line) |
41 |
| - array_size = offset_parser.array_size(line) |
42 |
| - one_elm_size = int(elm_size / array_size) |
43 |
| - for elm in json_data[key]: |
44 |
| - binary_write_recursive(offmap, binary_data, elm, off + (i * one_elm_size), type) |
45 |
| - i = i + 1 |
46 |
| - |
47 |
| - |
| 105 | + if offset_parser.is_single(line): |
| 106 | + binary_write_recursive(bw_container, offmap, allocator, json_data[key], type) |
| 107 | + elif offset_parser.is_array(line): |
| 108 | + for i, elm in enumerate(json_data[key]): |
| 109 | + binary_write_recursive(bw_container, offmap, allocator, elm, type) |
| 110 | + else: # varray |
| 111 | + for i, elm in enumerate(json_data[key]): |
| 112 | + binary_write_recursive(bw_container, offmap, bw_container.heap_allocator, elm, type) |
0 commit comments