Details | Last modification | View Log | RSS feed
| Rev | Author | Line No. | Line | 
|---|---|---|---|
| 14 | pmbaty | 1 | #!/usr/bin/env python3 | 
| 2 | from __future__ import absolute_import, division, print_function | ||
| 3 | |||
| 4 | from ctypes import ArgumentError | ||
| 5 | import json | ||
| 6 | import optparse | ||
| 7 | import os | ||
| 8 | import struct | ||
| 9 | import sys | ||
| 10 | |||
| 11 | ### | ||
| 12 | |||
| 13 | k_header_magic_LE = b'pamh' | ||
| 14 | k_header_magic_BE = b'hmap' | ||
| 15 | |||
| 16 | def hmap_hash(str): | ||
| 17 | """hash(str) -> int | ||
| 18 | |||
| 19 | Apply the "well-known" headermap hash function. | ||
| 20 | """ | ||
| 21 | |||
| 22 | return sum((ord(c.lower()) * 13 | ||
| 23 | for c in str), 0) | ||
| 24 | |||
| 25 | class HeaderMap(object): | ||
| 26 | @staticmethod | ||
| 27 | def frompath(path): | ||
| 28 | with open(path, 'rb') as f: | ||
| 29 | magic = f.read(4) | ||
| 30 | if magic == k_header_magic_LE: | ||
| 31 | endian_code = '<' | ||
| 32 | elif magic == k_header_magic_BE: | ||
| 33 | endian_code = '>' | ||
| 34 | else: | ||
| 35 |                 raise SystemExit("error: %s: not a headermap" % ( | ||
| 36 | path,)) | ||
| 37 | |||
| 38 | # Read the header information. | ||
| 39 | header_fmt = endian_code + 'HHIIII' | ||
| 40 | header_size = struct.calcsize(header_fmt) | ||
| 41 | data = f.read(header_size) | ||
| 42 | if len(data) != header_size: | ||
| 43 |                 raise SystemExit("error: %s: truncated headermap header" % ( | ||
| 44 | path,)) | ||
| 45 | |||
| 46 | (version, reserved, strtable_offset, num_entries, | ||
| 47 | num_buckets, _) = struct.unpack(header_fmt, data) | ||
| 48 | |||
| 49 | if version != 1: | ||
| 50 |                 raise SystemExit("error: %s: unknown headermap version: %r" % ( | ||
| 51 | path, version)) | ||
| 52 | if reserved != 0: | ||
| 53 |                 raise SystemExit("error: %s: invalid reserved value in header" % ( | ||
| 54 | path,)) | ||
| 55 | |||
| 56 | # The number of buckets must be a power of two. | ||
| 57 | if num_buckets == 0 or (num_buckets & num_buckets - 1) != 0: | ||
| 58 |                 raise SystemExit("error: %s: invalid number of buckets" % ( | ||
| 59 | path,)) | ||
| 60 | |||
| 61 | # Read all of the buckets. | ||
| 62 | bucket_fmt = endian_code + 'III' | ||
| 63 | bucket_size = struct.calcsize(bucket_fmt) | ||
| 64 | buckets_data = f.read(num_buckets * bucket_size) | ||
| 65 | if len(buckets_data) != num_buckets * bucket_size: | ||
| 66 |                 raise SystemExit("error: %s: truncated headermap buckets" % ( | ||
| 67 | path,)) | ||
| 68 | buckets = [struct.unpack(bucket_fmt, | ||
| 69 | buckets_data[i*bucket_size:(i+1)*bucket_size]) | ||
| 70 | for i in range(num_buckets)] | ||
| 71 | |||
| 72 | # Read the string table; the format doesn't explicitly communicate the | ||
| 73 | # size of the string table (which is dumb), so assume it is the rest of | ||
| 74 | # the file. | ||
| 75 | f.seek(0, 2) | ||
| 76 | strtable_size = f.tell() - strtable_offset | ||
| 77 | f.seek(strtable_offset) | ||
| 78 | |||
| 79 | if strtable_size == 0: | ||
| 80 |                 raise SystemExit("error: %s: unable to read zero-sized string table"%( | ||
| 81 | path,)) | ||
| 82 | strtable = f.read(strtable_size) | ||
| 83 | |||
| 84 | if len(strtable) != strtable_size: | ||
| 85 |                 raise SystemExit("error: %s: unable to read complete string table"%( | ||
| 86 | path,)) | ||
| 87 | if strtable[-1] != 0: | ||
| 88 |                 raise SystemExit("error: %s: invalid string table in headermap" % ( | ||
| 89 | path,)) | ||
| 90 | |||
| 91 | return HeaderMap(num_entries, buckets, strtable) | ||
| 92 | |||
| 93 | def __init__(self, num_entries, buckets, strtable): | ||
| 94 | self.num_entries = num_entries | ||
| 95 | self.buckets = buckets | ||
| 96 | self.strtable = strtable | ||
| 97 | |||
| 98 | def get_string(self, idx): | ||
| 99 | if idx >= len(self.strtable): | ||
| 100 |             raise SystemExit("error: %s: invalid string index" % ( | ||
| 101 | idx,)) | ||
| 102 | end_idx = self.strtable.index(0, idx) | ||
| 103 | return self.strtable[idx:end_idx].decode() | ||
| 104 | |||
| 105 | @property | ||
| 106 | def mappings(self): | ||
| 107 | for key_idx,prefix_idx,suffix_idx in self.buckets: | ||
| 108 | if key_idx == 0: | ||
| 109 | continue | ||
| 110 | yield (self.get_string(key_idx), | ||
| 111 | self.get_string(prefix_idx) + self.get_string(suffix_idx)) | ||
| 112 | |||
| 113 | ### | ||
| 114 | |||
| 115 | def action_dump(name, args): | ||
| 116 | "dump a headermap file" | ||
| 117 | |||
| 118 |     parser = optparse.OptionParser("%%prog %s [options] <headermap path>" % ( | ||
| 119 | name,)) | ||
| 120 |     parser.add_option("-v", "--verbose", dest="verbose", | ||
| 121 | help="show more verbose output [%default]", | ||
| 122 | action="store_true", default=False) | ||
| 123 | (opts, args) = parser.parse_args(args) | ||
| 124 | |||
| 125 | if len(args) != 1: | ||
| 126 |         parser.error("invalid number of arguments") | ||
| 127 | |||
| 128 | path, = args | ||
| 129 | |||
| 130 | hmap = HeaderMap.frompath(path) | ||
| 131 | |||
| 132 | # Dump all of the buckets. | ||
| 133 |     print ('Header Map: %s' % (path,)) | ||
| 134 | if opts.verbose: | ||
| 135 |         print ('headermap: %r' % (path,)) | ||
| 136 |         print ('  num entries: %d' % (hmap.num_entries,)) | ||
| 137 |         print ('  num buckets: %d' % (len(hmap.buckets),)) | ||
| 138 |         print ('  string table size: %d' % (len(hmap.strtable),)) | ||
| 139 | for i,bucket in enumerate(hmap.buckets): | ||
| 140 | key_idx,prefix_idx,suffix_idx = bucket | ||
| 141 | |||
| 142 | if key_idx == 0: | ||
| 143 | continue | ||
| 144 | |||
| 145 | # Get the strings. | ||
| 146 | key = hmap.get_string(key_idx) | ||
| 147 | prefix = hmap.get_string(prefix_idx) | ||
| 148 | suffix = hmap.get_string(suffix_idx) | ||
| 149 | |||
| 150 |             print ("  bucket[%d]: %r -> (%r, %r) -- %d" % ( | ||
| 151 | i, key, prefix, suffix, (hmap_hash(key) & (len(hmap.buckets) - 1)))) | ||
| 152 | else: | ||
| 153 | mappings = sorted(hmap.mappings) | ||
| 154 | for key,value in mappings: | ||
| 155 |             print ("%s -> %s" % (key, value)) | ||
| 156 | print () | ||
| 157 | |||
| 158 | def next_power_of_two(value): | ||
| 159 | if value < 0: | ||
| 160 | raise ArgumentError | ||
| 161 | return 1 if value == 0 else 2**(value - 1).bit_length() | ||
| 162 | |||
| 163 | def action_write(name, args): | ||
| 164 | "write a headermap file from a JSON definition" | ||
| 165 | |||
| 166 |     parser = optparse.OptionParser("%%prog %s [options] <input path> <output path>" % ( | ||
| 167 | name,)) | ||
| 168 | (opts, args) = parser.parse_args(args) | ||
| 169 | |||
| 170 | if len(args) != 2: | ||
| 171 |         parser.error("invalid number of arguments") | ||
| 172 | |||
| 173 | input_path,output_path = args | ||
| 174 | |||
| 175 | with open(input_path, "r") as f: | ||
| 176 | input_data = json.load(f) | ||
| 177 | |||
| 178 | # Compute the headermap contents, we make a table that is 1/3 full. | ||
| 179 | mappings = input_data['mappings'] | ||
| 180 | num_buckets = next_power_of_two(len(mappings) * 3) | ||
| 181 | |||
| 182 | table = [(0, 0, 0) | ||
| 183 | for i in range(num_buckets)] | ||
| 184 | max_value_len = 0 | ||
| 185 | strtable = "\0" | ||
| 186 | for key,value in mappings.items(): | ||
| 187 | if not isinstance(key, str): | ||
| 188 |             key = key.decode('utf-8') | ||
| 189 | if not isinstance(value, str): | ||
| 190 |             value = value.decode('utf-8') | ||
| 191 | max_value_len = max(max_value_len, len(value)) | ||
| 192 | |||
| 193 | key_idx = len(strtable) | ||
| 194 | strtable += key + '\0' | ||
| 195 | prefix = os.path.dirname(value) + '/' | ||
| 196 | suffix = os.path.basename(value) | ||
| 197 | prefix_idx = len(strtable) | ||
| 198 | strtable += prefix + '\0' | ||
| 199 | suffix_idx = len(strtable) | ||
| 200 | strtable += suffix + '\0' | ||
| 201 | |||
| 202 | hash = hmap_hash(key) | ||
| 203 | for i in range(num_buckets): | ||
| 204 | idx = (hash + i) % num_buckets | ||
| 205 | if table[idx][0] == 0: | ||
| 206 | table[idx] = (key_idx, prefix_idx, suffix_idx) | ||
| 207 | break | ||
| 208 | else: | ||
| 209 | raise RuntimeError | ||
| 210 | |||
| 211 | endian_code = '<' | ||
| 212 | magic = k_header_magic_LE | ||
| 213 | magic_size = 4 | ||
| 214 | header_fmt = endian_code + 'HHIIII' | ||
| 215 | header_size = struct.calcsize(header_fmt) | ||
| 216 | bucket_fmt = endian_code + 'III' | ||
| 217 | bucket_size = struct.calcsize(bucket_fmt) | ||
| 218 | strtable_offset = magic_size + header_size + num_buckets * bucket_size | ||
| 219 | header = (1, 0, strtable_offset, len(mappings), | ||
| 220 | num_buckets, max_value_len) | ||
| 221 | |||
| 222 | # Write out the headermap. | ||
| 223 | with open(output_path, 'wb') as f: | ||
| 224 | f.write(magic) | ||
| 225 | f.write(struct.pack(header_fmt, *header)) | ||
| 226 | for bucket in table: | ||
| 227 | f.write(struct.pack(bucket_fmt, *bucket)) | ||
| 228 | f.write(strtable.encode()) | ||
| 229 | |||
| 230 | def action_tovfs(name, args): | ||
| 231 | "convert a headermap to a VFS layout" | ||
| 232 | |||
| 233 |     parser = optparse.OptionParser("%%prog %s [options] <headermap path>" % ( | ||
| 234 | name,)) | ||
| 235 |     parser.add_option("", "--build-path", dest="build_path", | ||
| 236 | help="build path prefix", | ||
| 237 | action="store", type=str) | ||
| 238 | (opts, args) = parser.parse_args(args) | ||
| 239 | |||
| 240 | if len(args) != 2: | ||
| 241 |         parser.error("invalid number of arguments") | ||
| 242 | if opts.build_path is None: | ||
| 243 |         parser.error("--build-path is required") | ||
| 244 | |||
| 245 | input_path,output_path = args | ||
| 246 | |||
| 247 | hmap = HeaderMap.frompath(input_path) | ||
| 248 | |||
| 249 | # Create the table for all the objects. | ||
| 250 |     vfs = {} | ||
| 251 | vfs['version'] = 0 | ||
| 252 | build_dir_contents = [] | ||
| 253 |     vfs['roots'] = [{ | ||
| 254 | 'name' : opts.build_path, | ||
| 255 | 'type' : 'directory', | ||
| 256 | 'contents' : build_dir_contents }] | ||
| 257 | |||
| 258 | # We assume we are mapping framework paths, so a key of "Foo/Bar.h" maps to | ||
| 259 | # "<build path>/Foo.framework/Headers/Bar.h". | ||
| 260 | for key,value in hmap.mappings: | ||
| 261 | # If this isn't a framework style mapping, ignore it. | ||
| 262 |         components = key.split('/') | ||
| 263 | if len(components) != 2: | ||
| 264 | continue | ||
| 265 | framework_name,header_name = components | ||
| 266 |         build_dir_contents.append({ | ||
| 267 | 'name' : '%s.framework/Headers/%s' % (framework_name, | ||
| 268 | header_name), | ||
| 269 | 'type' : 'file', | ||
| 270 | 'external-contents' : value }) | ||
| 271 | |||
| 272 | with open(output_path, 'w') as f: | ||
| 273 | json.dump(vfs, f, indent=2) | ||
| 274 | |||
| 275 | commands = dict((name[7:].replace("_","-"), f) | ||
| 276 | for name,f in locals().items() | ||
| 277 |                 if name.startswith('action_')) | ||
| 278 | |||
| 279 | def usage(): | ||
| 280 |     print ("Usage: %s command [options]" % ( | ||
| 281 | os.path.basename(sys.argv[0])), file=sys.stderr) | ||
| 282 | print (file=sys.stderr) | ||
| 283 |     print ("Available commands:", file=sys.stderr) | ||
| 284 | cmds_width = max(map(len, commands)) | ||
| 285 | for name,func in sorted(commands.items()): | ||
| 286 |         print ("  %-*s - %s" % (cmds_width, name, func.__doc__), file=sys.stderr) | ||
| 287 | sys.exit(1) | ||
| 288 | |||
| 289 | def main(): | ||
| 290 | if len(sys.argv) < 2 or sys.argv[1] not in commands: | ||
| 291 | usage() | ||
| 292 | |||
| 293 | cmd = sys.argv[1] | ||
| 294 | commands[cmd](cmd, sys.argv[2:]) | ||
| 295 | |||
| 296 | if __name__ == '__main__': | ||
| 297 | main() |