1#!/usr/bin/env python
2# kate: replace-tabs on; indent-width 4;
3
4from __future__ import unicode_literals
5
6'''Generate header file for nanopb from a ProtoBuf FileDescriptorSet.'''
7nanopb_version = "nanopb-0.4.0-dev"
8
9import sys
10import re
11import codecs
12import copy
13from functools import reduce
14
15try:
16    # Add some dummy imports to keep packaging tools happy.
17    import google, distutils.util # bbfreeze seems to need these
18    import pkg_resources # pyinstaller / protobuf 2.5 seem to need these
19except:
20    # Don't care, we will error out later if it is actually important.
21    pass
22
23try:
24    import google.protobuf.text_format as text_format
25    import google.protobuf.descriptor_pb2 as descriptor
26    import google.protobuf.compiler.plugin_pb2 as plugin_pb2
27    import google.protobuf.reflection as reflection
28    import google.protobuf.descriptor
29except:
30    sys.stderr.write('''
31         *************************************************************
32         *** Could not import the Google protobuf Python libraries ***
33         *** Try installing package 'python-protobuf' or similar.  ***
34         *************************************************************
35    ''' + '\n')
36    raise
37
38try:
39    import proto.nanopb_pb2 as nanopb_pb2
40except TypeError:
41    sys.stderr.write('''
42         ****************************************************************************
43         *** Got TypeError when importing the protocol definitions for generator. ***
44         *** This usually means that the protoc in your path doesn't match the    ***
45         *** Python protobuf library version.                                     ***
46         ***                                                                      ***
47         *** Please check the output of the following commands:                   ***
48         *** which protoc                                                         ***
49         *** protoc --version                                                     ***
50         *** python -c 'import google.protobuf; print(google.protobuf.__file__)'  ***
51         *** If you are not able to find the python protobuf version using the    ***
52         *** above command, use this command.                                     ***
53         *** pip freeze | grep -i protobuf                                        ***
54         ****************************************************************************
55    ''' + '\n')
56    raise
57except:
58    sys.stderr.write('''
59         ********************************************************************
60         *** Failed to import the protocol definitions for generator.     ***
61         *** You have to run 'make' in the nanopb/generator/proto folder. ***
62         ********************************************************************
63    ''' + '\n')
64    raise
65
66# ---------------------------------------------------------------------------
67#                     Generation of single fields
68# ---------------------------------------------------------------------------
69
70import time
71import os.path
72
73# Values are tuple (c type, pb type, encoded size, data_size)
74FieldD = descriptor.FieldDescriptorProto
75datatypes = {
76    FieldD.TYPE_BOOL:       ('bool',     'BOOL',        1,  4),
77    FieldD.TYPE_DOUBLE:     ('double',   'DOUBLE',      8,  8),
78    FieldD.TYPE_FIXED32:    ('uint32_t', 'FIXED32',     4,  4),
79    FieldD.TYPE_FIXED64:    ('uint64_t', 'FIXED64',     8,  8),
80    FieldD.TYPE_FLOAT:      ('float',    'FLOAT',       4,  4),
81    FieldD.TYPE_INT32:      ('int32_t',  'INT32',      10,  4),
82    FieldD.TYPE_INT64:      ('int64_t',  'INT64',      10,  8),
83    FieldD.TYPE_SFIXED32:   ('int32_t',  'SFIXED32',    4,  4),
84    FieldD.TYPE_SFIXED64:   ('int64_t',  'SFIXED64',    8,  8),
85    FieldD.TYPE_SINT32:     ('int32_t',  'SINT32',      5,  4),
86    FieldD.TYPE_SINT64:     ('int64_t',  'SINT64',     10,  8),
87    FieldD.TYPE_UINT32:     ('uint32_t', 'UINT32',      5,  4),
88    FieldD.TYPE_UINT64:     ('uint64_t', 'UINT64',     10,  8),
89
90    # Integer size override options
91    (FieldD.TYPE_INT32,   nanopb_pb2.IS_8):   ('int8_t',   'INT32', 10,  1),
92    (FieldD.TYPE_INT32,  nanopb_pb2.IS_16):   ('int16_t',  'INT32', 10,  2),
93    (FieldD.TYPE_INT32,  nanopb_pb2.IS_32):   ('int32_t',  'INT32', 10,  4),
94    (FieldD.TYPE_INT32,  nanopb_pb2.IS_64):   ('int64_t',  'INT32', 10,  8),
95    (FieldD.TYPE_SINT32,  nanopb_pb2.IS_8):   ('int8_t',  'SINT32',  2,  1),
96    (FieldD.TYPE_SINT32, nanopb_pb2.IS_16):   ('int16_t', 'SINT32',  3,  2),
97    (FieldD.TYPE_SINT32, nanopb_pb2.IS_32):   ('int32_t', 'SINT32',  5,  4),
98    (FieldD.TYPE_SINT32, nanopb_pb2.IS_64):   ('int64_t', 'SINT32', 10,  8),
99    (FieldD.TYPE_UINT32,  nanopb_pb2.IS_8):   ('uint8_t', 'UINT32',  2,  1),
100    (FieldD.TYPE_UINT32, nanopb_pb2.IS_16):   ('uint16_t','UINT32',  3,  2),
101    (FieldD.TYPE_UINT32, nanopb_pb2.IS_32):   ('uint32_t','UINT32',  5,  4),
102    (FieldD.TYPE_UINT32, nanopb_pb2.IS_64):   ('uint64_t','UINT32', 10,  8),
103    (FieldD.TYPE_INT64,   nanopb_pb2.IS_8):   ('int8_t',   'INT64', 10,  1),
104    (FieldD.TYPE_INT64,  nanopb_pb2.IS_16):   ('int16_t',  'INT64', 10,  2),
105    (FieldD.TYPE_INT64,  nanopb_pb2.IS_32):   ('int32_t',  'INT64', 10,  4),
106    (FieldD.TYPE_INT64,  nanopb_pb2.IS_64):   ('int64_t',  'INT64', 10,  8),
107    (FieldD.TYPE_SINT64,  nanopb_pb2.IS_8):   ('int8_t',  'SINT64',  2,  1),
108    (FieldD.TYPE_SINT64, nanopb_pb2.IS_16):   ('int16_t', 'SINT64',  3,  2),
109    (FieldD.TYPE_SINT64, nanopb_pb2.IS_32):   ('int32_t', 'SINT64',  5,  4),
110    (FieldD.TYPE_SINT64, nanopb_pb2.IS_64):   ('int64_t', 'SINT64', 10,  8),
111    (FieldD.TYPE_UINT64,  nanopb_pb2.IS_8):   ('uint8_t', 'UINT64',  2,  1),
112    (FieldD.TYPE_UINT64, nanopb_pb2.IS_16):   ('uint16_t','UINT64',  3,  2),
113    (FieldD.TYPE_UINT64, nanopb_pb2.IS_32):   ('uint32_t','UINT64',  5,  4),
114    (FieldD.TYPE_UINT64, nanopb_pb2.IS_64):   ('uint64_t','UINT64', 10,  8),
115}
116
117# String types (for python 2 / python 3 compatibility)
118try:
119    strtypes = (unicode, str)
120except NameError:
121    strtypes = (str, )
122
123
124class Names:
125    '''Keeps a set of nested names and formats them to C identifier.'''
126    def __init__(self, parts = ()):
127        if isinstance(parts, Names):
128            parts = parts.parts
129        elif isinstance(parts, strtypes):
130            parts = (parts,)
131        self.parts = tuple(parts)
132
133    def __str__(self):
134        return '_'.join(self.parts)
135
136    def __add__(self, other):
137        if isinstance(other, strtypes):
138            return Names(self.parts + (other,))
139        elif isinstance(other, Names):
140            return Names(self.parts + other.parts)
141        elif isinstance(other, tuple):
142            return Names(self.parts + other)
143        else:
144            raise ValueError("Name parts should be of type str")
145
146    def __eq__(self, other):
147        return isinstance(other, Names) and self.parts == other.parts
148
149def names_from_type_name(type_name):
150    '''Parse Names() from FieldDescriptorProto type_name'''
151    if type_name[0] != '.':
152        raise NotImplementedError("Lookup of non-absolute type names is not supported")
153    return Names(type_name[1:].split('.'))
154
155def varint_max_size(max_value):
156    '''Returns the maximum number of bytes a varint can take when encoded.'''
157    if max_value < 0:
158        max_value = 2**64 - max_value
159    for i in range(1, 11):
160        if (max_value >> (i * 7)) == 0:
161            return i
162    raise ValueError("Value too large for varint: " + str(max_value))
163
164assert varint_max_size(-1) == 10
165assert varint_max_size(0) == 1
166assert varint_max_size(127) == 1
167assert varint_max_size(128) == 2
168
169class EncodedSize:
170    '''Class used to represent the encoded size of a field or a message.
171    Consists of a combination of symbolic sizes and integer sizes.'''
172    def __init__(self, value = 0, symbols = []):
173        if isinstance(value, EncodedSize):
174            self.value = value.value
175            self.symbols = value.symbols
176        elif isinstance(value, strtypes + (Names,)):
177            self.symbols = [str(value)]
178            self.value = 0
179        else:
180            self.value = value
181            self.symbols = symbols
182
183    def __add__(self, other):
184        if isinstance(other, int):
185            return EncodedSize(self.value + other, self.symbols)
186        elif isinstance(other, strtypes + (Names,)):
187            return EncodedSize(self.value, self.symbols + [str(other)])
188        elif isinstance(other, EncodedSize):
189            return EncodedSize(self.value + other.value, self.symbols + other.symbols)
190        else:
191            raise ValueError("Cannot add size: " + repr(other))
192
193    def __mul__(self, other):
194        if isinstance(other, int):
195            return EncodedSize(self.value * other, [str(other) + '*' + s for s in self.symbols])
196        else:
197            raise ValueError("Cannot multiply size: " + repr(other))
198
199    def __str__(self):
200        if not self.symbols:
201            return str(self.value)
202        else:
203            return '(' + str(self.value) + ' + ' + ' + '.join(self.symbols) + ')'
204
205    def upperlimit(self):
206        if not self.symbols:
207            return self.value
208        else:
209            return 2**32 - 1
210
211class Enum:
212    def __init__(self, names, desc, enum_options):
213        '''desc is EnumDescriptorProto'''
214
215        self.options = enum_options
216        self.names = names
217
218        # by definition, `names` include this enum's name
219        base_name = Names(names.parts[:-1])
220
221        if enum_options.long_names:
222            self.values = [(names + x.name, x.number) for x in desc.value]
223        else:
224            self.values = [(base_name + x.name, x.number) for x in desc.value]
225
226        self.value_longnames = [self.names + x.name for x in desc.value]
227        self.packed = enum_options.packed_enum
228
229    def has_negative(self):
230        for n, v in self.values:
231            if v < 0:
232                return True
233        return False
234
235    def encoded_size(self):
236        return max([varint_max_size(v) for n,v in self.values])
237
238    def __str__(self):
239        result = 'typedef enum _%s {\n' % self.names
240        result += ',\n'.join(["    %s = %d" % x for x in self.values])
241        result += '\n}'
242
243        if self.packed:
244            result += ' pb_packed'
245
246        result += ' %s;' % self.names
247
248        # sort the enum by value
249        sorted_values = sorted(self.values, key = lambda x: (x[1], x[0]))
250
251        result += '\n#define _%s_MIN %s' % (self.names, sorted_values[0][0])
252        result += '\n#define _%s_MAX %s' % (self.names, sorted_values[-1][0])
253        result += '\n#define _%s_ARRAYSIZE ((%s)(%s+1))' % (self.names, self.names, sorted_values[-1][0])
254
255        if not self.options.long_names:
256            # Define the long names always so that enum value references
257            # from other files work properly.
258            for i, x in enumerate(self.values):
259                result += '\n#define %s %s' % (self.value_longnames[i], x[0])
260
261        if self.options.enum_to_string:
262            result += '\nconst char *%s_name(%s v);\n' % (self.names, self.names)
263
264        return result
265
266    def enum_to_string_definition(self):
267        if not self.options.enum_to_string:
268            return ""
269
270        result = 'const char *%s_name(%s v) {\n' % (self.names, self.names)
271        result += '    switch (v) {\n'
272
273        for ((enumname, _), strname) in zip(self.values, self.value_longnames):
274            # Strip off the leading type name from the string value.
275            strval = str(strname)[len(str(self.names)) + 1:]
276            result += '        case %s: return "%s";\n' % (enumname, strval)
277
278        result += '    }\n'
279        result += '    return "unknown";\n'
280        result += '}\n'
281
282        return result
283
284class FieldMaxSize:
285    def __init__(self, worst = 0, checks = [], field_name = 'undefined'):
286        if isinstance(worst, list):
287            self.worst = max(i for i in worst if i is not None)
288        else:
289            self.worst = worst
290
291        self.worst_field = field_name
292        self.checks = list(checks)
293
294    def extend(self, extend, field_name = None):
295        self.worst = max(self.worst, extend.worst)
296
297        if self.worst == extend.worst:
298            self.worst_field = extend.worst_field
299
300        self.checks.extend(extend.checks)
301
302class Field:
303    def __init__(self, struct_name, desc, field_options):
304        '''desc is FieldDescriptorProto'''
305        self.tag = desc.number
306        self.struct_name = struct_name
307        self.union_name = None
308        self.name = desc.name
309        self.default = None
310        self.max_size = None
311        self.max_count = None
312        self.array_decl = ""
313        self.enc_size = None
314        self.data_item_size = None
315        self.ctype = None
316        self.fixed_count = False
317        self.callback_datatype = field_options.callback_datatype
318
319        if field_options.type == nanopb_pb2.FT_INLINE:
320            # Before nanopb-0.3.8, fixed length bytes arrays were specified
321            # by setting type to FT_INLINE. But to handle pointer typed fields,
322            # it makes sense to have it as a separate option.
323            field_options.type = nanopb_pb2.FT_STATIC
324            field_options.fixed_length = True
325
326        # Parse field options
327        if field_options.HasField("max_size"):
328            self.max_size = field_options.max_size
329
330        if desc.type == FieldD.TYPE_STRING and field_options.HasField("max_length"):
331            # max_length overrides max_size for strings
332            self.max_size = field_options.max_length + 1
333
334        if field_options.HasField("max_count"):
335            self.max_count = field_options.max_count
336
337        if desc.HasField('default_value'):
338            self.default = desc.default_value
339
340        # Check field rules, i.e. required/optional/repeated.
341        can_be_static = True
342        if desc.label == FieldD.LABEL_REPEATED:
343            self.rules = 'REPEATED'
344            if self.max_count is None:
345                can_be_static = False
346            else:
347                self.array_decl = '[%d]' % self.max_count
348                if field_options.fixed_count:
349                  self.rules = 'FIXARRAY'
350
351        elif field_options.proto3:
352            self.rules = 'SINGULAR'
353        elif desc.label == FieldD.LABEL_REQUIRED:
354            self.rules = 'REQUIRED'
355        elif desc.label == FieldD.LABEL_OPTIONAL:
356            self.rules = 'OPTIONAL'
357        else:
358            raise NotImplementedError(desc.label)
359
360        # Check if the field can be implemented with static allocation
361        # i.e. whether the data size is known.
362        if desc.type == FieldD.TYPE_STRING and self.max_size is None:
363            can_be_static = False
364
365        if desc.type == FieldD.TYPE_BYTES and self.max_size is None:
366            can_be_static = False
367
368        # Decide how the field data will be allocated
369        if field_options.type == nanopb_pb2.FT_DEFAULT:
370            if can_be_static:
371                field_options.type = nanopb_pb2.FT_STATIC
372            else:
373                field_options.type = nanopb_pb2.FT_CALLBACK
374
375        if field_options.type == nanopb_pb2.FT_STATIC and not can_be_static:
376            raise Exception("Field '%s' is defined as static, but max_size or "
377                            "max_count is not given." % self.name)
378
379        if field_options.fixed_count and self.max_count is None:
380            raise Exception("Field '%s' is defined as fixed count, "
381                            "but max_count is not given." % self.name)
382
383        if field_options.type == nanopb_pb2.FT_STATIC:
384            self.allocation = 'STATIC'
385        elif field_options.type == nanopb_pb2.FT_POINTER:
386            self.allocation = 'POINTER'
387        elif field_options.type == nanopb_pb2.FT_CALLBACK:
388            self.allocation = 'CALLBACK'
389        else:
390            raise NotImplementedError(field_options.type)
391
392        # Decide the C data type to use in the struct.
393        if desc.type in datatypes:
394            self.ctype, self.pbtype, self.enc_size, self.data_item_size = datatypes[desc.type]
395
396            # Override the field size if user wants to use smaller integers
397            if (desc.type, field_options.int_size) in datatypes:
398                self.ctype, self.pbtype, self.enc_size, self.data_item_size = datatypes[(desc.type, field_options.int_size)]
399        elif desc.type == FieldD.TYPE_ENUM:
400            self.pbtype = 'ENUM'
401            self.data_item_size = 4
402            self.ctype = names_from_type_name(desc.type_name)
403            if self.default is not None:
404                self.default = self.ctype + self.default
405            self.enc_size = None # Needs to be filled in when enum values are known
406        elif desc.type == FieldD.TYPE_STRING:
407            self.pbtype = 'STRING'
408            self.ctype = 'char'
409            if self.allocation == 'STATIC':
410                self.ctype = 'char'
411                self.array_decl += '[%d]' % self.max_size
412                # -1 because of null terminator. Both pb_encode and pb_decode
413                # check the presence of it.
414                self.enc_size = varint_max_size(self.max_size) + self.max_size - 1
415        elif desc.type == FieldD.TYPE_BYTES:
416            if field_options.fixed_length:
417                self.pbtype = 'FIXED_LENGTH_BYTES'
418
419                if self.max_size is None:
420                    raise Exception("Field '%s' is defined as fixed length, "
421                                    "but max_size is not given." % self.name)
422
423                self.enc_size = varint_max_size(self.max_size) + self.max_size
424                self.ctype = 'pb_byte_t'
425                self.array_decl += '[%d]' % self.max_size
426            else:
427                self.pbtype = 'BYTES'
428                self.ctype = 'pb_bytes_array_t'
429                if self.allocation == 'STATIC':
430                    self.ctype = self.struct_name + self.name + 't'
431                    self.enc_size = varint_max_size(self.max_size) + self.max_size
432        elif desc.type == FieldD.TYPE_MESSAGE:
433            self.pbtype = 'MESSAGE'
434            self.ctype = self.submsgname = names_from_type_name(desc.type_name)
435            self.enc_size = None # Needs to be filled in after the message type is available
436        else:
437            raise NotImplementedError(desc.type)
438
439    def __lt__(self, other):
440        return self.tag < other.tag
441
442    def __str__(self):
443        result = ''
444        if self.allocation == 'POINTER':
445            if self.rules == 'REPEATED':
446                result += '    pb_size_t ' + self.name + '_count;\n'
447
448            if self.pbtype == 'MESSAGE':
449                # Use struct definition, so recursive submessages are possible
450                result += '    struct _%s *%s;' % (self.ctype, self.name)
451            elif self.pbtype == 'FIXED_LENGTH_BYTES':
452                # Pointer to fixed size array
453                result += '    %s (*%s)%s;' % (self.ctype, self.name, self.array_decl)
454            elif self.rules in ['REPEATED', 'FIXARRAY'] and self.pbtype in ['STRING', 'BYTES']:
455                # String/bytes arrays need to be defined as pointers to pointers
456                result += '    %s **%s;' % (self.ctype, self.name)
457            else:
458                result += '    %s *%s;' % (self.ctype, self.name)
459        elif self.allocation == 'CALLBACK':
460            result += '    %s %s;' % (self.callback_datatype, self.name)
461        else:
462            if self.rules == 'OPTIONAL':
463                result += '    bool has_' + self.name + ';\n'
464            elif self.rules == 'REPEATED':
465                result += '    pb_size_t ' + self.name + '_count;\n'
466            result += '    %s %s%s;' % (self.ctype, self.name, self.array_decl)
467        return result
468
469    def types(self):
470        '''Return definitions for any special types this field might need.'''
471        if self.pbtype == 'BYTES' and self.allocation == 'STATIC':
472            result = 'typedef PB_BYTES_ARRAY_T(%d) %s;\n' % (self.max_size, self.ctype)
473        else:
474            result = ''
475        return result
476
477    def get_dependencies(self):
478        '''Get list of type names used by this field.'''
479        if self.allocation == 'STATIC':
480            return [str(self.ctype)]
481        else:
482            return []
483
484    def get_initializer(self, null_init, inner_init_only = False):
485        '''Return literal expression for this field's default value.
486        null_init: If True, initialize to a 0 value instead of default from .proto
487        inner_init_only: If True, exclude initialization for any count/has fields
488        '''
489
490        inner_init = None
491        if self.pbtype == 'MESSAGE':
492            if null_init:
493                inner_init = '%s_init_zero' % self.ctype
494            else:
495                inner_init = '%s_init_default' % self.ctype
496        elif self.default is None or null_init:
497            if self.pbtype == 'STRING':
498                inner_init = '""'
499            elif self.pbtype == 'BYTES':
500                inner_init = '{0, {0}}'
501            elif self.pbtype == 'FIXED_LENGTH_BYTES':
502                inner_init = '{0}'
503            elif self.pbtype in ('ENUM', 'UENUM'):
504                inner_init = '_%s_MIN' % self.ctype
505            else:
506                inner_init = '0'
507        else:
508            if self.pbtype == 'STRING':
509                data = codecs.escape_encode(self.default.encode('utf-8'))[0]
510                inner_init = '"' + data.decode('ascii') + '"'
511            elif self.pbtype == 'BYTES':
512                data = codecs.escape_decode(self.default)[0]
513                data = ["0x%02x" % c for c in bytearray(data)]
514                if len(data) == 0:
515                    inner_init = '{0, {0}}'
516                else:
517                    inner_init = '{%d, {%s}}' % (len(data), ','.join(data))
518            elif self.pbtype == 'FIXED_LENGTH_BYTES':
519                data = codecs.escape_decode(self.default)[0]
520                data = ["0x%02x" % c for c in bytearray(data)]
521                if len(data) == 0:
522                    inner_init = '{0}'
523                else:
524                    inner_init = '{%s}' % ','.join(data)
525            elif self.pbtype in ['FIXED32', 'UINT32']:
526                inner_init = str(self.default) + 'u'
527            elif self.pbtype in ['FIXED64', 'UINT64']:
528                inner_init = str(self.default) + 'ull'
529            elif self.pbtype in ['SFIXED64', 'INT64']:
530                inner_init = str(self.default) + 'll'
531            else:
532                inner_init = str(self.default)
533
534        if inner_init_only:
535            return inner_init
536
537        outer_init = None
538        if self.allocation == 'STATIC':
539            if self.rules == 'REPEATED':
540                outer_init = '0, {' + ', '.join([inner_init] * self.max_count) + '}'
541            elif self.rules == 'FIXARRAY':
542                outer_init = '{' + ', '.join([inner_init] * self.max_count) + '}'
543            elif self.rules == 'OPTIONAL':
544                outer_init = 'false, ' + inner_init
545            else:
546                outer_init = inner_init
547        elif self.allocation == 'POINTER':
548            if self.rules == 'REPEATED':
549                outer_init = '0, NULL'
550            else:
551                outer_init = 'NULL'
552        elif self.allocation == 'CALLBACK':
553            if self.pbtype == 'EXTENSION':
554                outer_init = 'NULL'
555            else:
556                outer_init = '{{NULL}, NULL}'
557
558        return outer_init
559
560    def tags(self):
561        '''Return the #define for the tag number of this field.'''
562        identifier = '%s_%s_tag' % (self.struct_name, self.name)
563        return '#define %-40s %d\n' % (identifier, self.tag)
564
565    def fieldlist(self):
566        '''Return the FIELDLIST macro entry for this field.
567        Format is: X(a, ATYPE, HTYPE, LTYPE, field_name, tag)
568        '''
569        name = self.name
570
571        if self.rules == "ONEOF":
572          # For oneofs, make a tuple of the union name, union member name,
573          # and the name inside the parent struct.
574          if not self.anonymous:
575            name = '(%s,%s,%s)' % (self.union_name, self.name, self.union_name + '.' + self.name)
576          else:
577            name = '(%s,%s,%s)' % (self.union_name, self.name, self.name)
578
579        return 'X(a, %s, %s, %s, %s, %d)' % (self.allocation, self.rules, self.pbtype, name, self.tag)
580
581    def data_size(self, dependencies):
582        '''Return estimated size of this field in the C struct.
583        This is used to try to automatically pick right descriptor size.
584        If the estimate is wrong, it will result in compile time error and
585        user having to specify descriptor_width option.
586        '''
587        if self.allocation == 'POINTER' or self.pbtype == 'EXTENSION':
588            size = 8
589        elif self.allocation == 'CALLBACK':
590            size = 16
591        elif self.pbtype == 'MESSAGE' and str(self.submsgname) in dependencies:
592            size = dependencies[str(self.submsgname)].data_size(dependencies)
593        elif self.pbtype in ['STRING', 'FIXED_LENGTH_BYTES']:
594            size = self.max_size
595        elif self.pbtype == 'BYTES':
596            size = self.max_size + 4
597        elif self.data_item_size is not None:
598            size = self.data_item_size
599        else:
600            raise Exception("Unhandled field type: %s" % self.pbtype)
601
602        if self.rules in ['REPEATED', 'FIXARRAY'] and self.allocation == 'STATIC':
603            size *= self.max_count
604
605        if self.rules not in ('REQUIRED', 'SINGULAR'):
606            size += 4
607
608        if size % 4 != 0:
609            # Estimate how much alignment requirements will increase the size.
610            size += 4 - (size % 4)
611
612        return size
613
614    def encoded_size(self, dependencies):
615        '''Return the maximum size that this field can take when encoded,
616        including the field tag. If the size cannot be determined, returns
617        None.'''
618
619        if self.allocation != 'STATIC':
620            return None
621
622        if self.pbtype == 'MESSAGE':
623            encsize = None
624            if str(self.submsgname) in dependencies:
625                submsg = dependencies[str(self.submsgname)]
626                encsize = submsg.encoded_size(dependencies)
627                if encsize is not None:
628                    # Include submessage length prefix
629                    encsize += varint_max_size(encsize.upperlimit())
630                else:
631                    my_msg = dependencies.get(str(self.struct_name))
632                    if my_msg and submsg.protofile == my_msg.protofile:
633                        # The dependency is from the same file and size cannot be
634                        # determined for it, thus we know it will not be possible
635                        # in runtime either.
636                        return None
637
638            if encsize is None:
639                # Submessage or its size cannot be found.
640                # This can occur if submessage is defined in different
641                # file, and it or its .options could not be found.
642                # Instead of direct numeric value, reference the size that
643                # has been #defined in the other file.
644                encsize = EncodedSize(self.submsgname + 'size')
645
646                # We will have to make a conservative assumption on the length
647                # prefix size, though.
648                encsize += 5
649
650        elif self.pbtype in ['ENUM', 'UENUM']:
651            if str(self.ctype) in dependencies:
652                enumtype = dependencies[str(self.ctype)]
653                encsize = enumtype.encoded_size()
654            else:
655                # Conservative assumption
656                encsize = 10
657
658        elif self.enc_size is None:
659            raise RuntimeError("Could not determine encoded size for %s.%s"
660                               % (self.struct_name, self.name))
661        else:
662            encsize = EncodedSize(self.enc_size)
663
664        encsize += varint_max_size(self.tag << 3) # Tag + wire type
665
666        if self.rules in ['REPEATED', 'FIXARRAY']:
667            # Decoders must be always able to handle unpacked arrays.
668            # Therefore we have to reserve space for it, even though
669            # we emit packed arrays ourselves. For length of 1, packed
670            # arrays are larger however so we need to add allowance
671            # for the length byte.
672            encsize *= self.max_count
673
674            if self.max_count == 1:
675                encsize += 1
676
677        return encsize
678
679    def requires_custom_field_callback(self):
680        if self.allocation == 'CALLBACK' and self.callback_datatype != 'pb_callback_t':
681            return True
682        else:
683            return False
684
685
686class ExtensionRange(Field):
687    def __init__(self, struct_name, range_start, field_options):
688        '''Implements a special pb_extension_t* field in an extensible message
689        structure. The range_start signifies the index at which the extensions
690        start. Not necessarily all tags above this are extensions, it is merely
691        a speed optimization.
692        '''
693        self.tag = range_start
694        self.struct_name = struct_name
695        self.name = 'extensions'
696        self.pbtype = 'EXTENSION'
697        self.rules = 'OPTIONAL'
698        self.allocation = 'CALLBACK'
699        self.ctype = 'pb_extension_t'
700        self.array_decl = ''
701        self.default = None
702        self.max_size = 0
703        self.max_count = 0
704        self.data_item_size = 0
705        self.fixed_count = False
706        self.callback_datatype = 'pb_extension_t*'
707
708    def requires_custom_field_callback(self):
709        return False
710
711    def __str__(self):
712        return '    pb_extension_t *extensions;'
713
714    def types(self):
715        return ''
716
717    def tags(self):
718        return ''
719
720    def encoded_size(self, dependencies):
721        # We exclude extensions from the count, because they cannot be known
722        # until runtime. Other option would be to return None here, but this
723        # way the value remains useful if extensions are not used.
724        return EncodedSize(0)
725
726class ExtensionField(Field):
727    def __init__(self, fullname, desc, field_options):
728        self.fullname = fullname
729        self.extendee_name = names_from_type_name(desc.extendee)
730        Field.__init__(self, self.fullname + "extmsg", desc, field_options)
731
732        if self.rules != 'OPTIONAL':
733            self.skip = True
734        else:
735            self.skip = False
736            self.rules = 'REQUIRED' # We don't really want the has_field for extensions
737            self.msg = Message(self.fullname + "extmsg", None, field_options)
738            self.msg.fields.append(self)
739
740    def tags(self):
741        '''Return the #define for the tag number of this field.'''
742        identifier = '%s_tag' % self.fullname
743        return '#define %-40s %d\n' % (identifier, self.tag)
744
745    def extension_decl(self):
746        '''Declaration of the extension type in the .pb.h file'''
747        if self.skip:
748            msg = '/* Extension field %s was skipped because only "optional"\n' % self.fullname
749            msg +='   type of extension fields is currently supported. */\n'
750            return msg
751
752        return ('extern const pb_extension_type_t %s; /* field type: %s */\n' %
753            (self.fullname, str(self).strip()))
754
755    def extension_def(self, dependencies):
756        '''Definition of the extension type in the .pb.c file'''
757
758        if self.skip:
759            return ''
760
761        result = "/* Definition for extension field %s */\n" % self.fullname
762        result += str(self.msg)
763        result += self.msg.fields_declaration(dependencies)
764        result += 'pb_byte_t %s_default[] = {0x00};\n' % self.msg.name
765        result += self.msg.fields_definition(dependencies)
766        result += 'const pb_extension_type_t %s = {\n' % self.fullname
767        result += '    NULL,\n'
768        result += '    NULL,\n'
769        result += '    &%s_msg\n' % self.msg.name
770        result += '};\n'
771        return result
772
773
774# ---------------------------------------------------------------------------
775#                   Generation of oneofs (unions)
776# ---------------------------------------------------------------------------
777
778class OneOf(Field):
779    def __init__(self, struct_name, oneof_desc):
780        self.struct_name = struct_name
781        self.name = oneof_desc.name
782        self.ctype = 'union'
783        self.pbtype = 'oneof'
784        self.fields = []
785        self.allocation = 'ONEOF'
786        self.default = None
787        self.rules = 'ONEOF'
788        self.anonymous = False
789
790    def add_field(self, field):
791        if field.allocation == 'CALLBACK':
792            raise Exception("Callback fields inside of oneof are not supported"
793                            + " (field %s)" % field.name)
794
795        field.union_name = self.name
796        field.rules = 'ONEOF'
797        field.anonymous = self.anonymous
798        self.fields.append(field)
799        self.fields.sort(key = lambda f: f.tag)
800
801        # Sort by the lowest tag number inside union
802        self.tag = min([f.tag for f in self.fields])
803
804    def __str__(self):
805        result = ''
806        if self.fields:
807            result += '    pb_size_t which_' + self.name + ";\n"
808            result += '    union {\n'
809            for f in self.fields:
810                result += '    ' + str(f).replace('\n', '\n    ') + '\n'
811            if self.anonymous:
812                result += '    };'
813            else:
814                result += '    } ' + self.name + ';'
815        return result
816
817    def types(self):
818        return ''.join([f.types() for f in self.fields])
819
820    def get_dependencies(self):
821        deps = []
822        for f in self.fields:
823            deps += f.get_dependencies()
824        return deps
825
826    def get_initializer(self, null_init):
827        return '0, {' + self.fields[0].get_initializer(null_init) + '}'
828
829    def tags(self):
830        return ''.join([f.tags() for f in self.fields])
831
832    def fieldlist(self):
833        return ' \\\n'.join(field.fieldlist() for field in self.fields)
834
835    def data_size(self, dependencies):
836        return max(f.data_size(dependencies) for f in self.fields)
837
838    def encoded_size(self, dependencies):
839        '''Returns the size of the largest oneof field.'''
840        largest = 0
841        symbols = []
842        for f in self.fields:
843            size = EncodedSize(f.encoded_size(dependencies))
844            if size is None or size.value is None:
845                return None
846            elif size.symbols:
847                symbols.append((f.tag, size.symbols[0]))
848            elif size.value > largest:
849                largest = size.value
850
851        if not symbols:
852            # Simple case, all sizes were known at generator time
853            return largest
854
855        if largest > 0:
856            # Some sizes were known, some were not
857            symbols.insert(0, (0, largest))
858
859        if len(symbols) == 1:
860            # Only one symbol was needed
861            return EncodedSize(5, [symbols[0][1]])
862        else:
863            # Use sizeof(union{}) construct to find the maximum size of
864            # submessages.
865            union_def = ' '.join('char f%d[%s];' % s for s in symbols)
866            return EncodedSize(5, ['sizeof(union{%s})' % union_def])
867
868# ---------------------------------------------------------------------------
869#                   Generation of messages (structures)
870# ---------------------------------------------------------------------------
871
872
873class Message:
874    def __init__(self, names, desc, message_options):
875        self.name = names
876        self.fields = []
877        self.oneofs = {}
878        self.desc = desc
879
880        if message_options.msgid:
881            self.msgid = message_options.msgid
882
883        if desc is not None:
884            self.load_fields(desc, message_options)
885
886        self.callback_function = message_options.callback_function
887        if not message_options.HasField('callback_function'):
888            # Automatically assign a per-message callback if any field has
889            # a special callback_datatype.
890            for field in self.fields:
891                if field.requires_custom_field_callback():
892                    self.callback_function = "%s_callback" % self.name
893                    break
894
895        self.packed = message_options.packed_struct
896        self.descriptorsize = message_options.descriptorsize
897
898    def load_fields(self, desc, message_options):
899        '''Load field list from DescriptorProto'''
900
901        no_unions = []
902
903        if hasattr(desc, 'oneof_decl'):
904            for i, f in enumerate(desc.oneof_decl):
905                oneof_options = get_nanopb_suboptions(desc, message_options, self.name + f.name)
906                if oneof_options.no_unions:
907                    no_unions.append(i) # No union, but add fields normally
908                elif oneof_options.type == nanopb_pb2.FT_IGNORE:
909                    pass # No union and skip fields also
910                else:
911                    oneof = OneOf(self.name, f)
912                    if oneof_options.anonymous_oneof:
913                        oneof.anonymous = True
914                    self.oneofs[i] = oneof
915                    self.fields.append(oneof)
916        else:
917            sys.stderr.write('Note: This Python protobuf library has no OneOf support\n')
918
919        for f in desc.field:
920            field_options = get_nanopb_suboptions(f, message_options, self.name + f.name)
921            if field_options.type == nanopb_pb2.FT_IGNORE:
922                continue
923
924            field = Field(self.name, f, field_options)
925            if (hasattr(f, 'oneof_index') and
926                f.HasField('oneof_index') and
927                f.oneof_index not in no_unions):
928                if f.oneof_index in self.oneofs:
929                    self.oneofs[f.oneof_index].add_field(field)
930            else:
931                self.fields.append(field)
932
933        if len(desc.extension_range) > 0:
934            field_options = get_nanopb_suboptions(desc, message_options, self.name + 'extensions')
935            range_start = min([r.start for r in desc.extension_range])
936            if field_options.type != nanopb_pb2.FT_IGNORE:
937                self.fields.append(ExtensionRange(self.name, range_start, field_options))
938
939    def get_dependencies(self):
940        '''Get list of type names that this structure refers to.'''
941        deps = []
942        for f in self.fields:
943            deps += f.get_dependencies()
944        return deps
945
946    def __str__(self):
947        result = 'typedef struct _%s {\n' % self.name
948
949        if not self.fields:
950            # Empty structs are not allowed in C standard.
951            # Therefore add a dummy field if an empty message occurs.
952            result += '    char dummy_field;'
953
954        result += '\n'.join([str(f) for f in sorted(self.fields)])
955        result += '\n/* @@protoc_insertion_point(struct:%s) */' % self.name
956        result += '\n}'
957
958        if self.packed:
959            result += ' pb_packed'
960
961        result += ' %s;' % self.name
962
963        if self.packed:
964            result = 'PB_PACKED_STRUCT_START\n' + result
965            result += '\nPB_PACKED_STRUCT_END'
966
967        return result + '\n'
968
969    def types(self):
970        return ''.join([f.types() for f in self.fields])
971
972    def get_initializer(self, null_init):
973        if not self.fields:
974            return '{0}'
975
976        parts = []
977        for field in sorted(self.fields):
978            parts.append(field.get_initializer(null_init))
979        return '{' + ', '.join(parts) + '}'
980
981    def count_required_fields(self):
982        '''Returns number of required fields inside this message'''
983        count = 0
984        for f in self.fields:
985            if not isinstance(f, OneOf):
986                if f.rules == 'REQUIRED':
987                    count += 1
988        return count
989
990    def all_fields(self):
991        '''Iterate over all fields in this message, including nested OneOfs.'''
992        for f in self.fields:
993            if isinstance(f, OneOf):
994                for f2 in f.fields:
995                    yield f2
996            else:
997                yield f
998
999
1000    def field_for_tag(self, tag):
1001        '''Given a tag number, return the Field instance.'''
1002        for field in self.all_fields():
1003            if field.tag == tag:
1004                return field
1005        return None
1006
1007    def count_all_fields(self):
1008        '''Count the total number of fields in this message.'''
1009        count = 0
1010        for f in self.fields:
1011            if isinstance(f, OneOf):
1012                count += len(f.fields)
1013            else:
1014                count += 1
1015        return count
1016
1017    def fields_declaration(self, dependencies):
1018        '''Return X-macro declaration of all fields in this message.'''
1019        result = '#define %s_FIELDLIST(X, a) \\\n' % (self.name)
1020        result += ' \\\n'.join(field.fieldlist() for field in sorted(self.fields))
1021        result += '\n'
1022
1023        has_callbacks = bool([f for f in self.fields if f.allocation == 'CALLBACK'])
1024        if has_callbacks:
1025            if self.callback_function != 'pb_default_field_callback':
1026                result += "extern bool %s(pb_istream_t *istream, pb_ostream_t *ostream, const pb_field_t *field);\n" % self.callback_function
1027            result += "#define %s_CALLBACK %s\n" % (self.name, self.callback_function)
1028        else:
1029            result += "#define %s_CALLBACK NULL\n" % self.name
1030
1031        defval = self.default_value(dependencies)
1032        if defval:
1033            hexcoded = ''.join("\\x%02x" % ord(defval[i:i+1]) for i in range(len(defval)))
1034            result += '#define %s_DEFAULT (const uint8_t*)"%s\\x00"\n' % (self.name, hexcoded)
1035        else:
1036            result += '#define %s_DEFAULT NULL\n' % self.name
1037
1038        for field in sorted(self.fields):
1039            if field.pbtype == 'MESSAGE':
1040                result += "#define %s_%s_MSGTYPE %s\n" % (self.name, field.name, field.ctype)
1041            elif field.rules == 'ONEOF':
1042                for member in field.fields:
1043                    if member.pbtype == 'MESSAGE':
1044                        result += "#define %s_%s_%s_MSGTYPE %s\n" % (self.name, member.union_name, member.name, member.ctype)
1045
1046        return result
1047
1048    def fields_declaration_cpp_lookup(self):
1049        result = 'template <>\n'
1050        result += 'struct MessageDescriptor<%s> {\n' % (self.name)
1051        result += '    static PB_INLINE_CONSTEXPR const pb_size_t fields_array_length = %d;\n' % (self.count_all_fields())
1052        result += '    static inline const pb_msgdesc_t* fields() {\n'
1053        result += '        return &%s_msg;\n' % (self.name)
1054        result += '    }\n'
1055        result += '};'
1056        return result
1057
1058    def fields_definition(self, dependencies):
1059        '''Return the field descriptor definition that goes in .pb.c file.'''
1060        width = self.required_descriptor_width(dependencies)
1061        if width == 1:
1062          width = 'AUTO'
1063
1064        result = 'PB_BIND(%s, %s, %s)\n' % (self.name, self.name, width)
1065        return result
1066
1067    def required_descriptor_width(self, dependencies):
1068        '''Estimate how many words are necessary for each field descriptor.'''
1069        if self.descriptorsize != nanopb_pb2.DS_AUTO:
1070            return int(self.descriptorsize)
1071
1072        if not self.fields:
1073          return 1
1074
1075        max_tag = max(field.tag for field in self.all_fields())
1076        max_offset = self.data_size(dependencies)
1077        max_arraysize = max((field.max_count or 0) for field in self.all_fields())
1078        max_datasize = max(field.data_size(dependencies) for field in self.all_fields())
1079
1080        if max_arraysize > 0xFFFF:
1081            return 8
1082        elif (max_tag > 0x3FF or max_offset > 0xFFFF or
1083              max_arraysize > 0x0FFF or max_datasize > 0x0FFF):
1084            return 4
1085        elif max_tag > 0x3F or max_offset > 0xFF:
1086            return 2
1087        else:
1088            # NOTE: Macro logic in pb.h ensures that width 1 will
1089            # be raised to 2 automatically for string/submsg fields
1090            # and repeated fields. Thus only tag and offset need to
1091            # be checked.
1092            return 1
1093
1094    def data_size(self, dependencies):
1095        '''Return approximate sizeof(struct) in the compiled code.'''
1096        return sum(f.data_size(dependencies) for f in self.fields)
1097
1098    def encoded_size(self, dependencies):
1099        '''Return the maximum size that this message can take when encoded.
1100        If the size cannot be determined, returns None.
1101        '''
1102        size = EncodedSize(0)
1103        for field in self.fields:
1104            fsize = field.encoded_size(dependencies)
1105            if fsize is None:
1106                return None
1107            size += fsize
1108
1109        return size
1110
1111    def default_value(self, dependencies):
1112        '''Generate serialized protobuf message that contains the
1113        default values for optional fields.'''
1114
1115        if not self.desc:
1116            return b''
1117
1118        if self.desc.options.map_entry:
1119            return b''
1120
1121        optional_only = copy.deepcopy(self.desc)
1122        enums = []
1123
1124        # Remove fields without default values
1125        # The iteration is done in reverse order to avoid remove() messing up iteration.
1126        for field in reversed(list(optional_only.field)):
1127            parsed_field = self.field_for_tag(field.number)
1128            if parsed_field is None or parsed_field.allocation != 'STATIC':
1129                optional_only.field.remove(field)
1130            elif (field.label == FieldD.LABEL_REPEATED or
1131                  field.type == FieldD.TYPE_MESSAGE or
1132                  not field.HasField('default_value')):
1133                optional_only.field.remove(field)
1134            elif hasattr(field, 'oneof_index') and field.HasField('oneof_index'):
1135                optional_only.field.remove(field)
1136            elif field.type == FieldD.TYPE_ENUM:
1137                # The partial descriptor doesn't include the enum type
1138                # so we fake it with int64.
1139                enums.append(field.name)
1140                field.type = FieldD.TYPE_INT64
1141
1142        if len(optional_only.field) == 0:
1143            return b''
1144
1145        optional_only.ClearField(str('oneof_decl'))
1146        desc = google.protobuf.descriptor.MakeDescriptor(optional_only)
1147        msg = reflection.MakeClass(desc)()
1148
1149        for field in optional_only.field:
1150            if field.type == FieldD.TYPE_STRING:
1151                setattr(msg, field.name, field.default_value)
1152            elif field.type == FieldD.TYPE_BYTES:
1153                setattr(msg, field.name, codecs.escape_decode(field.default_value)[0])
1154            elif field.type in [FieldD.TYPE_FLOAT, FieldD.TYPE_DOUBLE]:
1155                setattr(msg, field.name, float(field.default_value))
1156            elif field.type == FieldD.TYPE_BOOL:
1157                setattr(msg, field.name, field.default_value == 'true')
1158            elif field.name in enums:
1159                # Lookup the enum default value
1160                enumname = names_from_type_name(field.type_name)
1161                enumtype = dependencies[str(enumname)]
1162                defvals = [v for n,v in enumtype.values if n.parts[-1] == field.default_value]
1163                if defvals:
1164                    setattr(msg, field.name, defvals[0])
1165            else:
1166                setattr(msg, field.name, int(field.default_value))
1167
1168        return msg.SerializeToString()
1169
1170
1171# ---------------------------------------------------------------------------
1172#                    Processing of entire .proto files
1173# ---------------------------------------------------------------------------
1174
1175def iterate_messages(desc, flatten = False, names = Names()):
1176    '''Recursively find all messages. For each, yield name, DescriptorProto.'''
1177    if hasattr(desc, 'message_type'):
1178        submsgs = desc.message_type
1179    else:
1180        submsgs = desc.nested_type
1181
1182    for submsg in submsgs:
1183        sub_names = names + submsg.name
1184        if flatten:
1185            yield Names(submsg.name), submsg
1186        else:
1187            yield sub_names, submsg
1188
1189        for x in iterate_messages(submsg, flatten, sub_names):
1190            yield x
1191
1192def iterate_extensions(desc, flatten = False, names = Names()):
1193    '''Recursively find all extensions.
1194    For each, yield name, FieldDescriptorProto.
1195    '''
1196    for extension in desc.extension:
1197        yield names, extension
1198
1199    for subname, subdesc in iterate_messages(desc, flatten, names):
1200        for extension in subdesc.extension:
1201            yield subname, extension
1202
1203def toposort2(data):
1204    '''Topological sort.
1205    From http://code.activestate.com/recipes/577413-topological-sort/
1206    This function is under the MIT license.
1207    '''
1208    for k, v in list(data.items()):
1209        v.discard(k) # Ignore self dependencies
1210    extra_items_in_deps = reduce(set.union, list(data.values()), set()) - set(data.keys())
1211    data.update(dict([(item, set()) for item in extra_items_in_deps]))
1212    while True:
1213        ordered = set(item for item,dep in list(data.items()) if not dep)
1214        if not ordered:
1215            break
1216        for item in sorted(ordered):
1217            yield item
1218        data = dict([(item, (dep - ordered)) for item,dep in list(data.items())
1219                if item not in ordered])
1220    assert not data, "A cyclic dependency exists amongst %r" % data
1221
1222def sort_dependencies(messages):
1223    '''Sort a list of Messages based on dependencies.'''
1224    dependencies = {}
1225    message_by_name = {}
1226    for message in messages:
1227        dependencies[str(message.name)] = set(message.get_dependencies())
1228        message_by_name[str(message.name)] = message
1229
1230    for msgname in toposort2(dependencies):
1231        if msgname in message_by_name:
1232            yield message_by_name[msgname]
1233
1234def make_identifier(headername):
1235    '''Make #ifndef identifier that contains uppercase A-Z and digits 0-9'''
1236    result = ""
1237    for c in headername.upper():
1238        if c.isalnum():
1239            result += c
1240        else:
1241            result += '_'
1242    return result
1243
1244class ProtoFile:
1245    def __init__(self, fdesc, file_options):
1246        '''Takes a FileDescriptorProto and parses it.'''
1247        self.fdesc = fdesc
1248        self.file_options = file_options
1249        self.dependencies = {}
1250        self.parse()
1251
1252        # Some of types used in this file probably come from the file itself.
1253        # Thus it has implicit dependency on itself.
1254        self.add_dependency(self)
1255
1256    def parse(self):
1257        self.enums = []
1258        self.messages = []
1259        self.extensions = []
1260
1261        mangle_names = self.file_options.mangle_names
1262        flatten = mangle_names == nanopb_pb2.M_FLATTEN
1263        strip_prefix = None
1264        replacement_prefix = None
1265        if mangle_names == nanopb_pb2.M_STRIP_PACKAGE:
1266            strip_prefix = "." + self.fdesc.package
1267        elif mangle_names == nanopb_pb2.M_PACKAGE_INITIALS:
1268            strip_prefix = "." + self.fdesc.package
1269            replacement_prefix = ""
1270            for part in self.fdesc.package.split("."):
1271                replacement_prefix += part[0]
1272
1273        def create_name(names):
1274            if mangle_names == nanopb_pb2.M_NONE or mangle_names == nanopb_pb2.M_PACKAGE_INITIALS:
1275                return base_name + names
1276            elif mangle_names == nanopb_pb2.M_STRIP_PACKAGE:
1277                return Names(names)
1278            else:
1279                single_name = names
1280                if isinstance(names, Names):
1281                    single_name = names.parts[-1]
1282                return Names(single_name)
1283
1284        def mangle_field_typename(typename):
1285            if mangle_names == nanopb_pb2.M_FLATTEN:
1286                return "." + typename.split(".")[-1]
1287            elif strip_prefix is not None and typename.startswith(strip_prefix):
1288                if replacement_prefix is not None:
1289                    return "." + replacement_prefix + typename[len(strip_prefix):]
1290                else:
1291                    return typename[len(strip_prefix):]
1292            else:
1293                return typename
1294
1295        if self.fdesc.package:
1296            if replacement_prefix is not None:
1297                base_name = Names(replacement_prefix)
1298            else:
1299                base_name = Names(self.fdesc.package.split('.'))
1300        else:
1301            base_name = Names()
1302
1303        for enum in self.fdesc.enum_type:
1304            name = create_name(enum.name)
1305            enum_options = get_nanopb_suboptions(enum, self.file_options, name)
1306            self.enums.append(Enum(name, enum, enum_options))
1307
1308        for names, message in iterate_messages(self.fdesc, flatten):
1309            name = create_name(names)
1310            message_options = get_nanopb_suboptions(message, self.file_options, name)
1311
1312            if message_options.skip_message:
1313                continue
1314
1315            message = copy.deepcopy(message)
1316            for field in message.field:
1317                if field.type in (FieldD.TYPE_MESSAGE, FieldD.TYPE_ENUM):
1318                    field.type_name = mangle_field_typename(field.type_name)
1319
1320            self.messages.append(Message(name, message, message_options))
1321            for enum in message.enum_type:
1322                name = create_name(names + enum.name)
1323                enum_options = get_nanopb_suboptions(enum, message_options, name)
1324                self.enums.append(Enum(name, enum, enum_options))
1325
1326        for names, extension in iterate_extensions(self.fdesc, flatten):
1327            name = create_name(names + extension.name)
1328            field_options = get_nanopb_suboptions(extension, self.file_options, name)
1329            if field_options.type != nanopb_pb2.FT_IGNORE:
1330                self.extensions.append(ExtensionField(name, extension, field_options))
1331
1332    def add_dependency(self, other):
1333        for enum in other.enums:
1334            self.dependencies[str(enum.names)] = enum
1335            enum.protofile = other
1336
1337        for msg in other.messages:
1338            self.dependencies[str(msg.name)] = msg
1339            msg.protofile = other
1340
1341        # Fix field default values where enum short names are used.
1342        for enum in other.enums:
1343            if not enum.options.long_names:
1344                for message in self.messages:
1345                    for field in message.fields:
1346                        if field.default in enum.value_longnames:
1347                            idx = enum.value_longnames.index(field.default)
1348                            field.default = enum.values[idx][0]
1349
1350        # Fix field data types where enums have negative values.
1351        for enum in other.enums:
1352            if not enum.has_negative():
1353                for message in self.messages:
1354                    for field in message.fields:
1355                        if field.pbtype == 'ENUM' and field.ctype == enum.names:
1356                            field.pbtype = 'UENUM'
1357
1358    def generate_header(self, includes, headername, options):
1359        '''Generate content for a header file.
1360        Generates strings, which should be concatenated and stored to file.
1361        '''
1362
1363        yield '/* Automatically generated nanopb header */\n'
1364        if options.notimestamp:
1365            yield '/* Generated by %s */\n\n' % (nanopb_version)
1366        else:
1367            yield '/* Generated by %s at %s. */\n\n' % (nanopb_version, time.asctime())
1368
1369        if self.fdesc.package:
1370            symbol = make_identifier(self.fdesc.package + '_' + headername)
1371        else:
1372            symbol = make_identifier(headername)
1373        yield '#ifndef PB_%s_INCLUDED\n' % symbol
1374        yield '#define PB_%s_INCLUDED\n' % symbol
1375        try:
1376            yield options.libformat % ('pb.h')
1377        except TypeError:
1378            # no %s specified - use whatever was passed in as options.libformat
1379            yield options.libformat
1380        yield '\n'
1381
1382        for incfile in includes:
1383            noext = os.path.splitext(incfile)[0]
1384            yield options.genformat % (noext + options.extension + options.header_extension)
1385            yield '\n'
1386
1387        yield '/* @@protoc_insertion_point(includes) */\n'
1388
1389        yield '#if PB_PROTO_HEADER_VERSION != 40\n'
1390        yield '#error Regenerate this file with the current version of nanopb generator.\n'
1391        yield '#endif\n'
1392        yield '\n'
1393
1394        yield '#ifdef __cplusplus\n'
1395        yield 'extern "C" {\n'
1396        yield '#endif\n\n'
1397
1398        if self.enums:
1399            yield '/* Enum definitions */\n'
1400            for enum in self.enums:
1401                yield str(enum) + '\n\n'
1402
1403        if self.messages:
1404            yield '/* Struct definitions */\n'
1405            for msg in sort_dependencies(self.messages):
1406                yield msg.types()
1407                yield str(msg) + '\n\n'
1408
1409        if self.extensions:
1410            yield '/* Extensions */\n'
1411            for extension in self.extensions:
1412                yield extension.extension_decl()
1413            yield '\n'
1414
1415        if self.messages:
1416            yield '/* Initializer values for message structs */\n'
1417            for msg in self.messages:
1418                identifier = '%s_init_default' % msg.name
1419                yield '#define %-40s %s\n' % (identifier, msg.get_initializer(False))
1420            for msg in self.messages:
1421                identifier = '%s_init_zero' % msg.name
1422                yield '#define %-40s %s\n' % (identifier, msg.get_initializer(True))
1423            yield '\n'
1424
1425            yield '/* Field tags (for use in manual encoding/decoding) */\n'
1426            for msg in sort_dependencies(self.messages):
1427                for field in msg.fields:
1428                    yield field.tags()
1429            for extension in self.extensions:
1430                yield extension.tags()
1431            yield '\n'
1432
1433            yield '/* Struct field encoding specification for nanopb */\n'
1434            for msg in self.messages:
1435                yield msg.fields_declaration(self.dependencies) + '\n'
1436            for msg in self.messages:
1437                yield 'extern const pb_msgdesc_t %s_msg;\n' % msg.name
1438            yield '\n'
1439
1440            yield '/* Defines for backwards compatibility with code written before nanopb-0.4.0 */\n'
1441            for msg in self.messages:
1442              yield '#define %s_fields &%s_msg\n' % (msg.name, msg.name)
1443            yield '\n'
1444
1445            yield '/* Maximum encoded size of messages (where known) */\n'
1446            for msg in self.messages:
1447                msize = msg.encoded_size(self.dependencies)
1448                identifier = '%s_size' % msg.name
1449                if msize is not None:
1450                    yield '#define %-40s %s\n' % (identifier, msize)
1451                else:
1452                    yield '/* %s depends on runtime parameters */\n' % identifier
1453            yield '\n'
1454
1455            if [msg for msg in self.messages if hasattr(msg,'msgid')]:
1456              yield '/* Message IDs (where set with "msgid" option) */\n'
1457              yield '#ifdef PB_MSGID\n'
1458              for msg in self.messages:
1459                  if hasattr(msg,'msgid'):
1460                      yield '#define PB_MSG_%d %s\n' % (msg.msgid, msg.name)
1461              yield '\n'
1462
1463              symbol = make_identifier(headername.split('.')[0])
1464              yield '#define %s_MESSAGES \\\n' % symbol
1465
1466              for msg in self.messages:
1467                  m = "-1"
1468                  msize = msg.encoded_size(self.dependencies)
1469                  if msize is not None:
1470                      m = msize
1471                  if hasattr(msg,'msgid'):
1472                      yield '\tPB_MSG(%d,%s,%s) \\\n' % (msg.msgid, m, msg.name)
1473              yield '\n'
1474
1475              for msg in self.messages:
1476                  if hasattr(msg,'msgid'):
1477                      yield '#define %s_msgid %d\n' % (msg.name, msg.msgid)
1478              yield '\n'
1479              yield '#endif\n\n'
1480
1481        yield '#ifdef __cplusplus\n'
1482        yield '} /* extern "C" */\n'
1483        yield '#endif\n'
1484
1485        if options.cpp_descriptors:
1486            yield '\n'
1487            yield '#ifdef __cplusplus\n'
1488            yield '/* Message descriptors for nanopb */\n'
1489            yield 'namespace nanopb {\n'
1490            for msg in self.messages:
1491                yield msg.fields_declaration_cpp_lookup() + '\n'
1492            yield '}  // namespace nanopb\n'
1493            yield '\n'
1494            yield '#endif  /* __cplusplus */\n'
1495            yield '\n'
1496
1497        # End of header
1498        yield '/* @@protoc_insertion_point(eof) */\n'
1499        yield '\n#endif\n'
1500
1501    def generate_source(self, headername, options):
1502        '''Generate content for a source file.'''
1503
1504        yield '/* Automatically generated nanopb constant definitions */\n'
1505        if options.notimestamp:
1506            yield '/* Generated by %s */\n\n' % (nanopb_version)
1507        else:
1508            yield '/* Generated by %s at %s. */\n\n' % (nanopb_version, time.asctime())
1509        yield options.genformat % (headername)
1510        yield '\n'
1511        yield '/* @@protoc_insertion_point(includes) */\n'
1512
1513        yield '#if PB_PROTO_HEADER_VERSION != 40\n'
1514        yield '#error Regenerate this file with the current version of nanopb generator.\n'
1515        yield '#endif\n'
1516        yield '\n'
1517
1518        for msg in self.messages:
1519            yield msg.fields_definition(self.dependencies) + '\n\n'
1520
1521        for ext in self.extensions:
1522            yield ext.extension_def(self.dependencies) + '\n'
1523
1524        for enum in self.enums:
1525            yield enum.enum_to_string_definition() + '\n'
1526
1527        # Add checks for numeric limits
1528        if self.messages:
1529            largest_msg = max(self.messages, key = lambda m: m.count_required_fields())
1530            largest_count = largest_msg.count_required_fields()
1531            if largest_count > 64:
1532                yield '\n/* Check that missing required fields will be properly detected */\n'
1533                yield '#if PB_MAX_REQUIRED_FIELDS < %d\n' % largest_count
1534                yield '#error Properly detecting missing required fields in %s requires \\\n' % largest_msg.name
1535                yield '       setting PB_MAX_REQUIRED_FIELDS to %d or more.\n' % largest_count
1536                yield '#endif\n'
1537
1538        # Add check for sizeof(double)
1539        has_double = False
1540        for msg in self.messages:
1541            for field in msg.fields:
1542                if field.ctype == 'double':
1543                    has_double = True
1544
1545        if has_double:
1546            yield '\n'
1547            yield '/* On some platforms (such as AVR), double is really float.\n'
1548            yield ' * Using double on these platforms is not directly supported\n'
1549            yield ' * by nanopb, but see example_avr_double.\n'
1550            yield ' * To get rid of this error, remove any double fields from your .proto.\n'
1551            yield ' */\n'
1552            yield 'PB_STATIC_ASSERT(sizeof(double) == 8, DOUBLE_MUST_BE_8_BYTES)\n'
1553
1554        yield '\n'
1555        yield '/* @@protoc_insertion_point(eof) */\n'
1556
1557# ---------------------------------------------------------------------------
1558#                    Options parsing for the .proto files
1559# ---------------------------------------------------------------------------
1560
1561from fnmatch import fnmatchcase
1562
1563def read_options_file(infile):
1564    '''Parse a separate options file to list:
1565        [(namemask, options), ...]
1566    '''
1567    results = []
1568    data = infile.read()
1569    data = re.sub('/\*.*?\*/', '', data, flags = re.MULTILINE)
1570    data = re.sub('//.*?$', '', data, flags = re.MULTILINE)
1571    data = re.sub('#.*?$', '', data, flags = re.MULTILINE)
1572    for i, line in enumerate(data.split('\n')):
1573        line = line.strip()
1574        if not line:
1575            continue
1576
1577        parts = line.split(None, 1)
1578
1579        if len(parts) < 2:
1580            sys.stderr.write("%s:%d: " % (infile.name, i + 1) +
1581                             "Option lines should have space between field name and options. " +
1582                             "Skipping line: '%s'\n" % line)
1583            continue
1584
1585        opts = nanopb_pb2.NanoPBOptions()
1586
1587        try:
1588            text_format.Merge(parts[1], opts)
1589        except Exception as e:
1590            sys.stderr.write("%s:%d: " % (infile.name, i + 1) +
1591                             "Unparseable option line: '%s'. " % line +
1592                             "Error: %s\n" % str(e))
1593            continue
1594        results.append((parts[0], opts))
1595
1596    return results
1597
1598class Globals:
1599    '''Ugly global variables, should find a good way to pass these.'''
1600    verbose_options = False
1601    separate_options = []
1602    matched_namemasks = set()
1603
1604def get_nanopb_suboptions(subdesc, options, name):
1605    '''Get copy of options, and merge information from subdesc.'''
1606    new_options = nanopb_pb2.NanoPBOptions()
1607    new_options.CopyFrom(options)
1608
1609    if hasattr(subdesc, 'syntax') and subdesc.syntax == "proto3":
1610        new_options.proto3 = True
1611
1612    # Handle options defined in a separate file
1613    dotname = '.'.join(name.parts)
1614    for namemask, options in Globals.separate_options:
1615        if fnmatchcase(dotname, namemask):
1616            Globals.matched_namemasks.add(namemask)
1617            new_options.MergeFrom(options)
1618
1619    # Handle options defined in .proto
1620    if isinstance(subdesc.options, descriptor.FieldOptions):
1621        ext_type = nanopb_pb2.nanopb
1622    elif isinstance(subdesc.options, descriptor.FileOptions):
1623        ext_type = nanopb_pb2.nanopb_fileopt
1624    elif isinstance(subdesc.options, descriptor.MessageOptions):
1625        ext_type = nanopb_pb2.nanopb_msgopt
1626    elif isinstance(subdesc.options, descriptor.EnumOptions):
1627        ext_type = nanopb_pb2.nanopb_enumopt
1628    else:
1629        raise Exception("Unknown options type")
1630
1631    if subdesc.options.HasExtension(ext_type):
1632        ext = subdesc.options.Extensions[ext_type]
1633        new_options.MergeFrom(ext)
1634
1635    if Globals.verbose_options:
1636        sys.stderr.write("Options for " + dotname + ": ")
1637        sys.stderr.write(text_format.MessageToString(new_options) + "\n")
1638
1639    return new_options
1640
1641
1642# ---------------------------------------------------------------------------
1643#                         Command line interface
1644# ---------------------------------------------------------------------------
1645
1646import sys
1647import os.path
1648from optparse import OptionParser
1649
1650optparser = OptionParser(
1651    usage = "Usage: nanopb_generator.py [options] file.pb ...",
1652    epilog = "Compile file.pb from file.proto by: 'protoc -ofile.pb file.proto'. " +
1653             "Output will be written to file.pb.h and file.pb.c.")
1654optparser.add_option("-x", dest="exclude", metavar="FILE", action="append", default=[],
1655    help="Exclude file from generated #include list.")
1656optparser.add_option("-e", "--extension", dest="extension", metavar="EXTENSION", default=".pb",
1657    help="Set extension to use instead of '.pb' for generated files. [default: %default]")
1658optparser.add_option("-H", "--header-extension", dest="header_extension", metavar="EXTENSION", default=".h",
1659    help="Set extension to use for generated header files. [default: %default]")
1660optparser.add_option("-S", "--source-extension", dest="source_extension", metavar="EXTENSION", default=".c",
1661    help="Set extension to use for generated source files. [default: %default]")
1662optparser.add_option("-f", "--options-file", dest="options_file", metavar="FILE", default="%s.options",
1663    help="Set name of a separate generator options file.")
1664optparser.add_option("-I", "--options-path", dest="options_path", metavar="DIR",
1665    action="append", default = [],
1666    help="Search for .options files additionally in this path")
1667optparser.add_option("-D", "--output-dir", dest="output_dir",
1668                     metavar="OUTPUTDIR", default=None,
1669                     help="Output directory of .pb.h and .pb.c files")
1670optparser.add_option("-Q", "--generated-include-format", dest="genformat",
1671    metavar="FORMAT", default='#include "%s"\n',
1672    help="Set format string to use for including other .pb.h files. [default: %default]")
1673optparser.add_option("-L", "--library-include-format", dest="libformat",
1674    metavar="FORMAT", default='#include <%s>\n',
1675    help="Set format string to use for including the nanopb pb.h header. [default: %default]")
1676optparser.add_option("--strip-path", dest="strip_path", action="store_true", default=False,
1677    help="Strip directory path from #included .pb.h file name")
1678optparser.add_option("--no-strip-path", dest="strip_path", action="store_false",
1679    help="Opposite of --strip-path (default since 0.4.0)")
1680optparser.add_option("--cpp-descriptors", action="store_true",
1681    help="Generate C++ descriptors to lookup by type (e.g. pb_field_t for a message)")
1682optparser.add_option("-T", "--no-timestamp", dest="notimestamp", action="store_true", default=True,
1683    help="Don't add timestamp to .pb.h and .pb.c preambles (default since 0.4.0)")
1684optparser.add_option("-t", "--timestamp", dest="notimestamp", action="store_false", default=True,
1685    help="Add timestamp to .pb.h and .pb.c preambles")
1686optparser.add_option("-q", "--quiet", dest="quiet", action="store_true", default=False,
1687    help="Don't print anything except errors.")
1688optparser.add_option("-v", "--verbose", dest="verbose", action="store_true", default=False,
1689    help="Print more information.")
1690optparser.add_option("-s", dest="settings", metavar="OPTION:VALUE", action="append", default=[],
1691    help="Set generator option (max_size, max_count etc.).")
1692
1693def parse_file(filename, fdesc, options):
1694    '''Parse a single file. Returns a ProtoFile instance.'''
1695    toplevel_options = nanopb_pb2.NanoPBOptions()
1696    for s in options.settings:
1697        text_format.Merge(s, toplevel_options)
1698
1699    if not fdesc:
1700        data = open(filename, 'rb').read()
1701        fdesc = descriptor.FileDescriptorSet.FromString(data).file[0]
1702
1703    # Check if there is a separate .options file
1704    had_abspath = False
1705    try:
1706        optfilename = options.options_file % os.path.splitext(filename)[0]
1707    except TypeError:
1708        # No %s specified, use the filename as-is
1709        optfilename = options.options_file
1710        had_abspath = True
1711
1712    paths = ['.'] + options.options_path
1713    for p in paths:
1714        if os.path.isfile(os.path.join(p, optfilename)):
1715            optfilename = os.path.join(p, optfilename)
1716            if options.verbose:
1717                sys.stderr.write('Reading options from ' + optfilename + '\n')
1718            Globals.separate_options = read_options_file(open(optfilename, "rU"))
1719            break
1720    else:
1721        # If we are given a full filename and it does not exist, give an error.
1722        # However, don't give error when we automatically look for .options file
1723        # with the same name as .proto.
1724        if options.verbose or had_abspath:
1725            sys.stderr.write('Options file not found: ' + optfilename + '\n')
1726        Globals.separate_options = []
1727
1728    Globals.matched_namemasks = set()
1729
1730    # Parse the file
1731    file_options = get_nanopb_suboptions(fdesc, toplevel_options, Names([filename]))
1732    f = ProtoFile(fdesc, file_options)
1733    f.optfilename = optfilename
1734
1735    return f
1736
1737def process_file(filename, fdesc, options, other_files = {}):
1738    '''Process a single file.
1739    filename: The full path to the .proto or .pb source file, as string.
1740    fdesc: The loaded FileDescriptorSet, or None to read from the input file.
1741    options: Command line options as they come from OptionsParser.
1742
1743    Returns a dict:
1744        {'headername': Name of header file,
1745         'headerdata': Data for the .h header file,
1746         'sourcename': Name of the source code file,
1747         'sourcedata': Data for the .c source code file
1748        }
1749    '''
1750    f = parse_file(filename, fdesc, options)
1751
1752    # Provide dependencies if available
1753    for dep in f.fdesc.dependency:
1754        if dep in other_files:
1755            f.add_dependency(other_files[dep])
1756
1757    # Decide the file names
1758    noext = os.path.splitext(filename)[0]
1759    headername = noext + options.extension + options.header_extension
1760    sourcename = noext + options.extension + options.source_extension
1761
1762    if options.strip_path:
1763        headerbasename = os.path.basename(headername)
1764    else:
1765        headerbasename = headername
1766
1767    # List of .proto files that should not be included in the C header file
1768    # even if they are mentioned in the source .proto.
1769    excludes = ['nanopb.proto', 'google/protobuf/descriptor.proto'] + options.exclude
1770    includes = [d for d in f.fdesc.dependency if d not in excludes]
1771
1772    headerdata = ''.join(f.generate_header(includes, headerbasename, options))
1773    sourcedata = ''.join(f.generate_source(headerbasename, options))
1774
1775    # Check if there were any lines in .options that did not match a member
1776    unmatched = [n for n,o in Globals.separate_options if n not in Globals.matched_namemasks]
1777    if unmatched and not options.quiet:
1778        sys.stderr.write("Following patterns in " + f.optfilename + " did not match any fields: "
1779                         + ', '.join(unmatched) + "\n")
1780        if not Globals.verbose_options:
1781            sys.stderr.write("Use  protoc --nanopb-out=-v:.   to see a list of the field names.\n")
1782
1783    return {'headername': headername, 'headerdata': headerdata,
1784            'sourcename': sourcename, 'sourcedata': sourcedata}
1785
1786def main_cli():
1787    '''Main function when invoked directly from the command line.'''
1788
1789    options, filenames = optparser.parse_args()
1790
1791    if not filenames:
1792        optparser.print_help()
1793        sys.exit(1)
1794
1795    if options.quiet:
1796        options.verbose = False
1797
1798    if options.output_dir and not os.path.exists(options.output_dir):
1799        optparser.print_help()
1800        sys.stderr.write("\noutput_dir does not exist: %s\n" % options.output_dir)
1801        sys.exit(1)
1802
1803    if options.verbose:
1804        sys.stderr.write('Google Python protobuf library imported from %s, version %s\n'
1805                         % (google.protobuf.__file__, google.protobuf.__version__))
1806
1807    Globals.verbose_options = options.verbose
1808    for filename in filenames:
1809        results = process_file(filename, None, options)
1810
1811        base_dir = options.output_dir or ''
1812        to_write = [
1813            (os.path.join(base_dir, results['headername']), results['headerdata']),
1814            (os.path.join(base_dir, results['sourcename']), results['sourcedata']),
1815        ]
1816
1817        if not options.quiet:
1818            paths = " and ".join([x[0] for x in to_write])
1819            sys.stderr.write("Writing to %s\n" % paths)
1820
1821        for path, data in to_write:
1822            with open(path, 'w') as f:
1823                f.write(data)
1824
1825def main_plugin():
1826    '''Main function when invoked as a protoc plugin.'''
1827
1828    import io, sys
1829    if sys.platform == "win32":
1830        import os, msvcrt
1831        # Set stdin and stdout to binary mode
1832        msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
1833        msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
1834
1835    data = io.open(sys.stdin.fileno(), "rb").read()
1836
1837    request = plugin_pb2.CodeGeneratorRequest.FromString(data)
1838
1839    try:
1840        # Versions of Python prior to 2.7.3 do not support unicode
1841        # input to shlex.split(). Try to convert to str if possible.
1842        params = str(request.parameter)
1843    except UnicodeEncodeError:
1844        params = request.parameter
1845
1846    import shlex
1847    args = shlex.split(params)
1848
1849    if len(args) == 1 and ',' in args[0]:
1850        # For compatibility with other protoc plugins, support options
1851        # separated by comma.
1852        lex = shlex.shlex(params)
1853        lex.whitespace_split = True
1854        lex.whitespace = ','
1855        args = list(lex)
1856
1857    optparser.usage = "Usage: protoc --nanopb_out=[options][,more_options]:outdir file.proto"
1858    optparser.epilog = "Output will be written to file.pb.h and file.pb.c."
1859
1860    if '-h' in args or '--help' in args:
1861        # By default optparser prints help to stdout, which doesn't work for
1862        # protoc plugins.
1863        optparser.print_help(sys.stderr)
1864        sys.exit(1)
1865
1866    options, dummy = optparser.parse_args(args)
1867
1868    Globals.verbose_options = options.verbose
1869
1870    if options.verbose:
1871        sys.stderr.write('Google Python protobuf library imported from %s, version %s\n'
1872                         % (google.protobuf.__file__, google.protobuf.__version__))
1873
1874    response = plugin_pb2.CodeGeneratorResponse()
1875
1876    # Google's protoc does not currently indicate the full path of proto files.
1877    # Instead always add the main file path to the search dirs, that works for
1878    # the common case.
1879    import os.path
1880    options.options_path.append(os.path.dirname(request.file_to_generate[0]))
1881
1882    # Process any include files first, in order to have them
1883    # available as dependencies
1884    other_files = {}
1885    for fdesc in request.proto_file:
1886        other_files[fdesc.name] = parse_file(fdesc.name, fdesc, options)
1887
1888    for filename in request.file_to_generate:
1889        for fdesc in request.proto_file:
1890            if fdesc.name == filename:
1891                results = process_file(filename, fdesc, options, other_files)
1892
1893                f = response.file.add()
1894                f.name = results['headername']
1895                f.content = results['headerdata']
1896
1897                f = response.file.add()
1898                f.name = results['sourcename']
1899                f.content = results['sourcedata']
1900
1901    io.open(sys.stdout.fileno(), "wb").write(response.SerializeToString())
1902
1903if __name__ == '__main__':
1904    # Check if we are running as a plugin under protoc
1905    if 'protoc-gen-' in sys.argv[0] or '--protoc-plugin' in sys.argv:
1906        main_plugin()
1907    else:
1908        main_cli()
1909