2020-01-17 21:05:03 +08:00
|
|
|
# Copyright 2019 Google LLC
|
2019-03-19 00:21:48 +08:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
"""Module related to code analysis and generation."""
|
|
|
|
|
|
|
|
from __future__ import absolute_import
|
|
|
|
from __future__ import division
|
|
|
|
from __future__ import print_function
|
2019-06-24 21:30:10 +08:00
|
|
|
from ctypes import util
|
2019-03-19 00:21:48 +08:00
|
|
|
import itertools
|
|
|
|
import os
|
|
|
|
from clang import cindex
|
|
|
|
|
|
|
|
# pylint: disable=unused-import
|
2020-02-26 22:01:39 +08:00
|
|
|
from typing import (Text, List, Optional, Set, Dict, Callable, IO, Generator as
|
|
|
|
Gen, Tuple, Union, Sequence)
|
2019-03-19 00:21:48 +08:00
|
|
|
# pylint: enable=unused-import
|
|
|
|
|
2020-02-26 22:01:39 +08:00
|
|
|
_PARSE_OPTIONS = (
|
|
|
|
cindex.TranslationUnit.PARSE_SKIP_FUNCTION_BODIES
|
|
|
|
| cindex.TranslationUnit.PARSE_INCOMPLETE |
|
|
|
|
# for include directives
|
|
|
|
cindex.TranslationUnit.PARSE_DETAILED_PROCESSING_RECORD)
|
2019-03-19 00:21:48 +08:00
|
|
|
|
|
|
|
|
2019-06-24 21:30:10 +08:00
|
|
|
def _init_libclang():
|
|
|
|
"""Finds and initializes the libclang library."""
|
|
|
|
if cindex.Config.loaded:
|
|
|
|
return
|
|
|
|
# Try to find libclang in the standard location and a few versioned paths
|
|
|
|
# that are used on Debian (and others). If LD_LIBRARY_PATH is set, it is
|
|
|
|
# used as well.
|
|
|
|
for lib in [
|
|
|
|
'clang', 'clang-9', 'clang-8', 'clang-7', 'clang-6.0', 'clang-5.0',
|
|
|
|
'clang-4.0'
|
|
|
|
]:
|
|
|
|
libclang = util.find_library(lib)
|
|
|
|
if libclang:
|
|
|
|
cindex.Config.set_library_file(libclang)
|
|
|
|
break
|
|
|
|
|
|
|
|
|
2019-03-19 00:21:48 +08:00
|
|
|
def get_header_guard(path):
|
|
|
|
# type: (Text) -> Text
|
|
|
|
"""Generates header guard string from path."""
|
|
|
|
# the output file will be most likely somewhere in genfiles, strip the
|
|
|
|
# prefix in that case, also strip .gen if this is a step before clang-format
|
|
|
|
if not path:
|
|
|
|
raise ValueError('Cannot prepare header guard from path: {}'.format(path))
|
|
|
|
if 'genfiles/' in path:
|
|
|
|
path = path.split('genfiles/')[1]
|
|
|
|
if path.endswith('.gen'):
|
|
|
|
path = path.split('.gen')[0]
|
|
|
|
path = path.upper().replace('.', '_').replace('-', '_').replace('/', '_')
|
|
|
|
return path + '_'
|
|
|
|
|
|
|
|
|
2019-07-02 17:57:57 +08:00
|
|
|
def _stringify_tokens(tokens, separator='\n'):
|
|
|
|
# type: (Sequence[cindex.Token], Text) -> Text
|
2019-03-19 00:21:48 +08:00
|
|
|
"""Converts tokens to text respecting line position (disrespecting column)."""
|
|
|
|
previous = OutputLine(0, []) # not used in output
|
|
|
|
lines = [] # type: List[OutputLine]
|
|
|
|
|
|
|
|
for _, group in itertools.groupby(tokens, lambda t: t.location.line):
|
|
|
|
group_list = list(group)
|
|
|
|
line = OutputLine(previous.next_tab, group_list)
|
|
|
|
|
|
|
|
lines.append(line)
|
|
|
|
previous = line
|
|
|
|
|
|
|
|
return separator.join(str(l) for l in lines)
|
|
|
|
|
2020-02-26 22:01:39 +08:00
|
|
|
|
2019-03-19 00:21:48 +08:00
|
|
|
TYPE_MAPPING = {
|
|
|
|
cindex.TypeKind.VOID: '::sapi::v::Void',
|
|
|
|
cindex.TypeKind.CHAR_S: '::sapi::v::Char',
|
2020-09-16 21:00:24 +08:00
|
|
|
cindex.TypeKind.CHAR_U: '::sapi::v::UChar',
|
2019-03-19 00:21:48 +08:00
|
|
|
cindex.TypeKind.INT: '::sapi::v::Int',
|
|
|
|
cindex.TypeKind.UINT: '::sapi::v::UInt',
|
|
|
|
cindex.TypeKind.LONG: '::sapi::v::Long',
|
|
|
|
cindex.TypeKind.ULONG: '::sapi::v::ULong',
|
|
|
|
cindex.TypeKind.UCHAR: '::sapi::v::UChar',
|
|
|
|
cindex.TypeKind.USHORT: '::sapi::v::UShort',
|
|
|
|
cindex.TypeKind.SHORT: '::sapi::v::Short',
|
|
|
|
cindex.TypeKind.LONGLONG: '::sapi::v::LLong',
|
|
|
|
cindex.TypeKind.ULONGLONG: '::sapi::v::ULLong',
|
|
|
|
cindex.TypeKind.FLOAT: '::sapi::v::Reg<float>',
|
|
|
|
cindex.TypeKind.DOUBLE: '::sapi::v::Reg<double>',
|
|
|
|
cindex.TypeKind.LONGDOUBLE: '::sapi::v::Reg<long double>',
|
|
|
|
cindex.TypeKind.SCHAR: '::sapi::v::SChar',
|
|
|
|
cindex.TypeKind.SHORT: '::sapi::v::Short',
|
|
|
|
cindex.TypeKind.BOOL: '::sapi::v::Bool',
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
class Type(object):
|
|
|
|
"""Class representing a type.
|
|
|
|
|
|
|
|
Wraps cindex.Type of the argument/return value and provides helpers for the
|
|
|
|
code generation.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, tu, clang_type):
|
|
|
|
# type: (_TranslationUnit, cindex.Type) -> None
|
|
|
|
self._clang_type = clang_type
|
|
|
|
self._tu = tu
|
|
|
|
|
|
|
|
# pylint: disable=protected-access
|
|
|
|
def __eq__(self, other):
|
|
|
|
# type: (Type) -> bool
|
|
|
|
# Use get_usr() to deduplicate Type objects based on declaration
|
|
|
|
decl = self._get_declaration()
|
|
|
|
decl_o = other._get_declaration()
|
|
|
|
|
|
|
|
return decl.get_usr() == decl_o.get_usr()
|
|
|
|
|
|
|
|
def __ne__(self, other):
|
|
|
|
# type: (Type) -> bool
|
|
|
|
return not self.__eq__(other)
|
|
|
|
|
|
|
|
def __lt__(self, other):
|
|
|
|
# type: (Type) -> bool
|
|
|
|
"""Compares two Types belonging to the same TranslationUnit.
|
|
|
|
|
|
|
|
This is being used to properly order types before emitting to generated
|
|
|
|
file. To be more specific: structure definition that contains field that is
|
|
|
|
a typedef should end up after that typedef definition. This is achieved by
|
|
|
|
exploiting the order in which clang iterate over AST in translation unit.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
other: other comparison type
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
true if this Type occurs earlier in the AST than 'other'
|
|
|
|
"""
|
|
|
|
self._validate_tu(other)
|
|
|
|
return (self._tu.order[self._get_declaration().hash] <
|
|
|
|
self._tu.order[other._get_declaration().hash]) # pylint: disable=protected-access
|
|
|
|
|
|
|
|
def __gt__(self, other):
|
|
|
|
# type: (Type) -> bool
|
|
|
|
"""Compares two Types belonging to the same TranslationUnit.
|
|
|
|
|
|
|
|
This is being used to properly order types before emitting to generated
|
|
|
|
file. To be more specific: structure definition that contains field that is
|
|
|
|
a typedef should end up after that typedef definition. This is achieved by
|
|
|
|
exploiting the order in which clang iterate over AST in translation unit.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
other: other comparison type
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
true if this Type occurs later in the AST than 'other'
|
|
|
|
"""
|
|
|
|
self._validate_tu(other)
|
|
|
|
return (self._tu.order[self._get_declaration().hash] >
|
|
|
|
self._tu.order[other._get_declaration().hash]) # pylint: disable=protected-access
|
|
|
|
|
|
|
|
def __hash__(self):
|
|
|
|
"""Types with the same declaration should hash to the same value."""
|
|
|
|
return hash(self._get_declaration().get_usr())
|
|
|
|
|
|
|
|
def _validate_tu(self, other):
|
|
|
|
# type: (Type) -> None
|
|
|
|
if self._tu != other._tu: # pylint: disable=protected-access
|
|
|
|
raise ValueError('Cannot compare types from different translation units.')
|
|
|
|
|
|
|
|
def is_void(self):
|
|
|
|
# type: () -> bool
|
|
|
|
return self._clang_type.kind == cindex.TypeKind.VOID
|
|
|
|
|
|
|
|
def is_typedef(self):
|
|
|
|
# type: () -> bool
|
|
|
|
return self._clang_type.kind == cindex.TypeKind.TYPEDEF
|
|
|
|
|
|
|
|
# Hack: both class and struct types are indistinguishable except for
|
|
|
|
# declaration cursor kind
|
|
|
|
def is_elaborated(self): # class, struct, union
|
|
|
|
# type: () -> bool
|
|
|
|
return (self._clang_type.kind == cindex.TypeKind.ELABORATED or
|
|
|
|
self._clang_type.kind == cindex.TypeKind.RECORD)
|
|
|
|
|
|
|
|
def is_struct(self):
|
|
|
|
# type: () -> bool
|
|
|
|
return (self.is_elaborated() and
|
|
|
|
self._get_declaration().kind == cindex.CursorKind.STRUCT_DECL)
|
|
|
|
|
|
|
|
def is_class(self):
|
|
|
|
# type: () -> bool
|
|
|
|
return (self.is_elaborated() and
|
|
|
|
self._get_declaration().kind == cindex.CursorKind.CLASS_DECL)
|
|
|
|
|
|
|
|
def is_function(self):
|
|
|
|
# type: () -> bool
|
|
|
|
return self._clang_type.kind == cindex.TypeKind.FUNCTIONPROTO
|
|
|
|
|
|
|
|
def is_ptr(self):
|
|
|
|
# type: () -> bool
|
|
|
|
if self.is_typedef():
|
|
|
|
return self._clang_type.get_canonical().kind == cindex.TypeKind.POINTER
|
|
|
|
return self._clang_type.kind == cindex.TypeKind.POINTER
|
|
|
|
|
|
|
|
def is_enum(self):
|
|
|
|
# type: () -> bool
|
|
|
|
return self._clang_type.kind == cindex.TypeKind.ENUM
|
|
|
|
|
|
|
|
def is_const_array(self):
|
|
|
|
# type: () -> bool
|
|
|
|
return self._clang_type.kind == cindex.TypeKind.CONSTANTARRAY
|
|
|
|
|
|
|
|
def is_simple_type(self):
|
|
|
|
# type: () -> bool
|
|
|
|
return self._clang_type.kind in TYPE_MAPPING
|
|
|
|
|
|
|
|
def get_pointee(self):
|
|
|
|
# type: () -> Type
|
|
|
|
return Type(self._tu, self._clang_type.get_pointee())
|
|
|
|
|
|
|
|
def _get_declaration(self):
|
|
|
|
# type: () -> cindex.Cursor
|
|
|
|
decl = self._clang_type.get_declaration()
|
|
|
|
if decl.kind == cindex.CursorKind.NO_DECL_FOUND and self.is_ptr():
|
|
|
|
decl = self.get_pointee()._get_declaration() # pylint: disable=protected-access
|
|
|
|
|
|
|
|
return decl
|
|
|
|
|
|
|
|
def get_related_types(self, result=None, skip_self=False):
|
2020-02-25 17:15:31 +08:00
|
|
|
# type: (Optional[Set[Type]], bool) -> Set[Type]
|
2019-03-19 00:21:48 +08:00
|
|
|
"""Returns all types related to this one eg. typedefs, nested structs."""
|
|
|
|
if result is None:
|
|
|
|
result = set()
|
|
|
|
|
|
|
|
if self in result or self.is_simple_type() or self.is_class():
|
|
|
|
return result
|
|
|
|
|
|
|
|
if self.is_const_array():
|
|
|
|
t = Type(self._tu, self._clang_type.get_array_element_type())
|
|
|
|
return t.get_related_types(result)
|
|
|
|
|
|
|
|
if self.is_typedef():
|
|
|
|
return self._get_related_types_of_typedef(result)
|
|
|
|
|
|
|
|
if self._clang_type.kind in (cindex.TypeKind.POINTER,
|
|
|
|
cindex.TypeKind.MEMBERPOINTER,
|
|
|
|
cindex.TypeKind.LVALUEREFERENCE,
|
|
|
|
cindex.TypeKind.RVALUEREFERENCE):
|
|
|
|
return self.get_pointee().get_related_types(result, skip_self)
|
|
|
|
|
|
|
|
if self.is_elaborated(): # union + struct, class hould be filtered out
|
|
|
|
return self._get_related_types_of_elaborated(result, skip_self)
|
|
|
|
|
|
|
|
if self.is_function():
|
|
|
|
return self._get_related_types_of_function(result)
|
|
|
|
|
|
|
|
if self.is_enum():
|
|
|
|
if not skip_self:
|
|
|
|
result.add(self)
|
|
|
|
self._tu.search_for_macro_name(self._get_declaration())
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
2019-03-27 00:57:13 +08:00
|
|
|
raise ValueError('Unhandled kind: {}'.format(self._clang_type.kind))
|
2019-03-19 00:21:48 +08:00
|
|
|
|
|
|
|
def _get_related_types_of_typedef(self, result):
|
|
|
|
# type: (Set[Type]) -> Set[Type]
|
|
|
|
"""Returns all intermediate types related to the typedef."""
|
|
|
|
result.add(self)
|
|
|
|
decl = self._get_declaration()
|
|
|
|
t = Type(self._tu, decl.underlying_typedef_type)
|
|
|
|
if t.is_ptr():
|
|
|
|
t = t.get_pointee()
|
|
|
|
|
|
|
|
self._tu.search_for_macro_name(decl)
|
|
|
|
|
|
|
|
if not t.is_simple_type():
|
|
|
|
skip_child = self.contains_declaration(t)
|
|
|
|
if t.is_elaborated() and skip_child:
|
|
|
|
# if child declaration is contained in parent, we don't have to emit it
|
|
|
|
self._tu.types_to_skip.add(t)
|
|
|
|
result.update(t.get_related_types(result, skip_child))
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
|
|
|
def _get_related_types_of_elaborated(self, result, skip_self=False):
|
|
|
|
# type: (Set[Type], bool) -> Set[Type]
|
|
|
|
"""Returns all types related to the structure."""
|
|
|
|
# skip unnamed structures eg. typedef struct {...} x;
|
|
|
|
# struct {...} will be rendered as part of typedef rendering
|
|
|
|
if self._get_declaration().spelling and not skip_self:
|
2019-07-02 17:57:57 +08:00
|
|
|
self._tu.search_for_macro_name(self._get_declaration())
|
2019-03-19 00:21:48 +08:00
|
|
|
result.add(self)
|
|
|
|
|
|
|
|
for f in self._clang_type.get_fields():
|
|
|
|
self._tu.search_for_macro_name(f)
|
|
|
|
result.update(Type(self._tu, f.type).get_related_types(result))
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
|
|
|
def _get_related_types_of_function(self, result):
|
|
|
|
# type: (Set[Type]) -> Set[Type]
|
|
|
|
"""Returns all types related to the function."""
|
|
|
|
for arg in self._clang_type.argument_types():
|
|
|
|
result.update(Type(self._tu, arg).get_related_types(result))
|
|
|
|
related = Type(self._tu,
|
|
|
|
self._clang_type.get_result()).get_related_types(result)
|
|
|
|
result.update(related)
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
|
|
|
def contains_declaration(self, other):
|
|
|
|
# type: (Type) -> bool
|
|
|
|
"""Checks if string representation of a type contains the other type."""
|
|
|
|
self_extent = self._get_declaration().extent
|
|
|
|
other_extent = other._get_declaration().extent # pylint: disable=protected-access
|
|
|
|
|
|
|
|
if other_extent.start.file is None:
|
|
|
|
return False
|
|
|
|
return (other_extent.start in self_extent and
|
|
|
|
other_extent.end in self_extent)
|
|
|
|
|
|
|
|
def stringify(self):
|
|
|
|
# type: () -> Text
|
|
|
|
"""Returns string representation of the Type."""
|
|
|
|
# (szwl): as simple as possible, keeps macros in separate lines not to
|
|
|
|
# break things; this will go through clang format nevertheless
|
2020-02-26 22:01:39 +08:00
|
|
|
tokens = [
|
|
|
|
x for x in self._get_declaration().get_tokens()
|
|
|
|
if x.kind is not cindex.TokenKind.COMMENT
|
|
|
|
]
|
2019-07-02 17:57:57 +08:00
|
|
|
|
|
|
|
return _stringify_tokens(tokens)
|
2019-03-19 00:21:48 +08:00
|
|
|
|
|
|
|
|
|
|
|
class OutputLine(object):
|
|
|
|
"""Helper class for Type printing."""
|
|
|
|
|
|
|
|
def __init__(self, tab, tokens):
|
|
|
|
# type: (int, List[cindex.Token]) -> None
|
|
|
|
self.tokens = tokens
|
2019-07-02 17:57:57 +08:00
|
|
|
self.spellings = []
|
2019-03-19 00:21:48 +08:00
|
|
|
self.define = False
|
|
|
|
self.tab = tab
|
|
|
|
self.next_tab = tab
|
2019-07-02 17:57:57 +08:00
|
|
|
list(map(self._process_token, self.tokens))
|
2019-03-19 00:21:48 +08:00
|
|
|
|
|
|
|
def _process_token(self, t):
|
|
|
|
# type: (cindex.Token) -> None
|
2019-07-02 17:57:57 +08:00
|
|
|
"""Processes a token, setting up internal states rel. to intendation."""
|
2019-03-19 00:21:48 +08:00
|
|
|
if t.spelling == '#':
|
|
|
|
self.define = True
|
|
|
|
elif t.spelling == '{':
|
|
|
|
self.next_tab += 1
|
|
|
|
elif t.spelling == '}':
|
|
|
|
self.tab -= 1
|
|
|
|
self.next_tab -= 1
|
|
|
|
|
2019-07-02 17:57:57 +08:00
|
|
|
is_bracket = t.spelling == '('
|
|
|
|
is_macro = len(self.spellings) == 1 and self.spellings[0] == '#'
|
|
|
|
if self.spellings and not is_bracket and not is_macro:
|
|
|
|
self.spellings.append(' ')
|
|
|
|
self.spellings.append(t.spelling)
|
|
|
|
|
2019-03-19 00:21:48 +08:00
|
|
|
def __str__(self):
|
|
|
|
# type: () -> Text
|
2020-02-26 22:01:39 +08:00
|
|
|
tabs = ('\t' * self.tab) if not self.define else ''
|
2019-07-02 17:57:57 +08:00
|
|
|
return tabs + ''.join(t for t in self.spellings)
|
2019-03-19 00:21:48 +08:00
|
|
|
|
|
|
|
|
|
|
|
class ArgumentType(Type):
|
|
|
|
"""Class representing function argument type.
|
|
|
|
|
|
|
|
Object fields are being used by the code template:
|
|
|
|
pos: argument position
|
|
|
|
type: string representation of the type
|
|
|
|
argument: string representation of the type as function argument
|
|
|
|
mapped_type: SAPI equivalent of the type
|
|
|
|
wrapped: wraps type in SAPI object constructor
|
|
|
|
call_argument: type (or it's sapi wrapper) used in function call
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, function, pos, arg_type, name=None):
|
2020-02-25 17:15:31 +08:00
|
|
|
# type: (Function, int, cindex.Type, Optional[Text]) -> None
|
2019-03-19 00:21:48 +08:00
|
|
|
super(ArgumentType, self).__init__(function.translation_unit(), arg_type)
|
|
|
|
self._function = function
|
|
|
|
|
|
|
|
self.pos = pos
|
|
|
|
self.name = name or 'a{}'.format(pos)
|
|
|
|
self.type = arg_type.spelling
|
|
|
|
|
|
|
|
template = '{}' if self.is_ptr() else '&{}_'
|
|
|
|
self.call_argument = template.format(self.name)
|
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
# type: () -> Text
|
|
|
|
"""Returns function argument prepared from the type."""
|
|
|
|
if self.is_ptr():
|
|
|
|
return '::sapi::v::Ptr* {}'.format(self.name)
|
|
|
|
|
|
|
|
return '{} {}'.format(self._clang_type.spelling, self.name)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def wrapped(self):
|
|
|
|
# type: () -> Text
|
|
|
|
return '{} {name}_(({name}))'.format(self.mapped_type, name=self.name)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def mapped_type(self):
|
|
|
|
# type: () -> Text
|
|
|
|
"""Maps the type to its SAPI equivalent."""
|
|
|
|
if self.is_ptr():
|
|
|
|
# TODO(szwl): const ptrs do not play well with SAPI C++ API...
|
|
|
|
spelling = self._clang_type.spelling.replace('const', '')
|
|
|
|
return '::sapi::v::Reg<{}>'.format(spelling)
|
|
|
|
|
|
|
|
type_ = self._clang_type
|
|
|
|
|
|
|
|
if type_.kind == cindex.TypeKind.TYPEDEF:
|
|
|
|
type_ = self._clang_type.get_canonical()
|
|
|
|
if type_.kind == cindex.TypeKind.ELABORATED:
|
|
|
|
type_ = type_.get_canonical()
|
|
|
|
if type_.kind == cindex.TypeKind.ENUM:
|
|
|
|
return '::sapi::v::IntBase<{}>'.format(self._clang_type.spelling)
|
2020-02-26 22:01:39 +08:00
|
|
|
if type_.kind in [
|
|
|
|
cindex.TypeKind.CONSTANTARRAY, cindex.TypeKind.INCOMPLETEARRAY
|
|
|
|
]:
|
2019-03-19 00:21:48 +08:00
|
|
|
return '::sapi::v::Reg<{}>'.format(self._clang_type.spelling)
|
|
|
|
|
|
|
|
if type_.kind == cindex.TypeKind.LVALUEREFERENCE:
|
|
|
|
return 'LVALUEREFERENCE::NOT_SUPPORTED'
|
|
|
|
|
|
|
|
if type_.kind == cindex.TypeKind.RVALUEREFERENCE:
|
|
|
|
return 'RVALUEREFERENCE::NOT_SUPPORTED'
|
|
|
|
|
|
|
|
if type_.kind in [cindex.TypeKind.RECORD, cindex.TypeKind.ELABORATED]:
|
|
|
|
raise ValueError('Elaborate type (eg. struct) in mapped_type is not '
|
|
|
|
'supported: function {}, arg {}, type {}, location {}'
|
|
|
|
''.format(self._function.name, self.pos,
|
|
|
|
self._clang_type.spelling,
|
|
|
|
self._function.cursor.location))
|
|
|
|
|
|
|
|
if type_.kind not in TYPE_MAPPING:
|
|
|
|
raise KeyError('Key {} does not exist in TYPE_MAPPING.'
|
|
|
|
' function {}, arg {}, type {}, location {}'
|
|
|
|
''.format(type_.kind, self._function.name, self.pos,
|
|
|
|
self._clang_type.spelling,
|
|
|
|
self._function.cursor.location))
|
|
|
|
|
|
|
|
return TYPE_MAPPING[type_.kind]
|
|
|
|
|
|
|
|
|
|
|
|
class ReturnType(ArgumentType):
|
|
|
|
"""Class representing function return type.
|
|
|
|
|
|
|
|
Attributes:
|
2020-09-02 23:46:48 +08:00
|
|
|
return_type: absl::StatusOr<T> where T is original return type, or
|
2020-02-28 01:23:44 +08:00
|
|
|
absl::Status for functions returning void
|
2019-03-19 00:21:48 +08:00
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, function, arg_type):
|
|
|
|
# type: (Function, cindex.Type) -> None
|
|
|
|
super(ReturnType, self).__init__(function, 0, arg_type, None)
|
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
# type: () -> Text
|
|
|
|
"""Returns function return type prepared from the type."""
|
|
|
|
# TODO(szwl): const ptrs do not play well with SAPI C++ API...
|
|
|
|
spelling = self._clang_type.spelling.replace('const', '')
|
2020-09-02 23:46:48 +08:00
|
|
|
return_type = 'absl::StatusOr<{}>'.format(spelling)
|
2020-02-28 01:23:44 +08:00
|
|
|
return_type = 'absl::Status' if self.is_void() else return_type
|
2019-03-19 00:21:48 +08:00
|
|
|
return return_type
|
|
|
|
|
|
|
|
|
|
|
|
class Function(object):
|
|
|
|
"""Class representing SAPI-wrapped function used by the template.
|
|
|
|
|
|
|
|
Wraps Clang cursor object of kind FUNCTION_DECL and provides helpers to
|
|
|
|
aid code generation.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, tu, cursor):
|
|
|
|
# type: (_TranslationUnit, cindex.Cursor) -> None
|
|
|
|
self._tu = tu
|
|
|
|
self.cursor = cursor # type: cindex.Index
|
|
|
|
self.name = cursor.spelling # type: Text
|
|
|
|
self.result = ReturnType(self, cursor.result_type)
|
2020-02-26 22:01:39 +08:00
|
|
|
self.original_definition = '{} {}'.format(
|
|
|
|
cursor.result_type.spelling, self.cursor.displayname) # type: Text
|
2019-03-19 00:21:48 +08:00
|
|
|
|
|
|
|
types = self.cursor.get_arguments()
|
2020-02-26 22:01:39 +08:00
|
|
|
self.argument_types = [
|
|
|
|
ArgumentType(self, i, t.type, t.spelling) for i, t in enumerate(types)
|
|
|
|
]
|
2019-03-19 00:21:48 +08:00
|
|
|
|
|
|
|
def translation_unit(self):
|
|
|
|
# type: () -> _TranslationUnit
|
|
|
|
return self._tu
|
|
|
|
|
|
|
|
def arguments(self):
|
|
|
|
# type: () -> List[ArgumentType]
|
|
|
|
return self.argument_types
|
|
|
|
|
|
|
|
def call_arguments(self):
|
|
|
|
# type: () -> List[Text]
|
|
|
|
return [a.call_argument for a in self.argument_types]
|
|
|
|
|
|
|
|
def get_absolute_path(self):
|
|
|
|
# type: () -> Text
|
|
|
|
return self.cursor.location.file.name
|
|
|
|
|
|
|
|
def get_include_path(self, prefix):
|
|
|
|
# type: (Optional[Text]) -> Text
|
|
|
|
"""Creates a proper include path."""
|
|
|
|
# TODO(szwl): sanity checks
|
|
|
|
# TODO(szwl): prefix 'utils/' and the path is '.../fileutils/...' case
|
|
|
|
if prefix and not prefix.endswith('/'):
|
|
|
|
prefix += '/'
|
|
|
|
|
|
|
|
if not prefix:
|
|
|
|
return self.get_absolute_path()
|
|
|
|
elif prefix in self.get_absolute_path():
|
|
|
|
return prefix + self.get_absolute_path().split(prefix)[-1]
|
|
|
|
return prefix + self.get_absolute_path().split('/')[-1]
|
|
|
|
|
|
|
|
def get_related_types(self, processed=None):
|
2020-02-25 17:15:31 +08:00
|
|
|
# type: (Optional[Set[Type]]) -> Set[Type]
|
2019-03-19 00:21:48 +08:00
|
|
|
result = self.result.get_related_types(processed)
|
|
|
|
for a in self.argument_types:
|
|
|
|
result.update(a.get_related_types(processed))
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
|
|
|
def is_mangled(self):
|
|
|
|
# type: () -> bool
|
2020-09-29 20:47:58 +08:00
|
|
|
return self.cursor.mangled_name != self.cursor.spelling
|
2019-03-19 00:21:48 +08:00
|
|
|
|
|
|
|
def __hash__(self):
|
|
|
|
# type: () -> int
|
|
|
|
return hash(self.cursor.get_usr())
|
|
|
|
|
|
|
|
def __eq__(self, other):
|
|
|
|
# type: (Function) -> bool
|
2020-09-29 20:47:58 +08:00
|
|
|
return self.cursor.mangled_name == other.cursor.mangled_name
|
2019-03-19 00:21:48 +08:00
|
|
|
|
|
|
|
|
|
|
|
class _TranslationUnit(object):
|
|
|
|
"""Class wrapping clang's _TranslationUnit. Provides extra utilities."""
|
|
|
|
|
2020-02-26 22:01:39 +08:00
|
|
|
def __init__(self, path, tu, limit_scan_depth=False):
|
|
|
|
# type: (Text, cindex.TranslationUnit, bool) -> None
|
|
|
|
self.path = path
|
|
|
|
self.limit_scan_depth = limit_scan_depth
|
2019-03-19 00:21:48 +08:00
|
|
|
self._tu = tu
|
|
|
|
self._processed = False
|
|
|
|
self.forward_decls = dict()
|
|
|
|
self.functions = set()
|
|
|
|
self.order = dict()
|
|
|
|
self.defines = {}
|
|
|
|
self.required_defines = set()
|
|
|
|
self.types_to_skip = set()
|
|
|
|
|
|
|
|
def _process(self):
|
|
|
|
# type: () -> None
|
|
|
|
"""Walks the cursor tree and caches some for future use."""
|
|
|
|
if not self._processed:
|
|
|
|
# self.includes[self._tu.spelling] = (0, self._tu.cursor)
|
|
|
|
self._processed = True
|
|
|
|
# TODO(szwl): duplicates?
|
|
|
|
# TODO(szwl): for d in translation_unit.diagnostics:, handle that
|
|
|
|
|
|
|
|
for i, cursor in enumerate(self._walk_preorder()):
|
2020-07-14 22:27:51 +08:00
|
|
|
# Workaround for issue#32
|
2020-09-25 19:30:25 +08:00
|
|
|
# ignore all the cursors with kinds not implemented in python bindings
|
2020-07-14 22:27:51 +08:00
|
|
|
try:
|
|
|
|
cursor.kind
|
|
|
|
except ValueError:
|
2020-09-25 19:30:25 +08:00
|
|
|
continue
|
2019-03-19 00:21:48 +08:00
|
|
|
# naive way to order types: they should be ordered when walking the tree
|
|
|
|
if cursor.kind.is_declaration():
|
|
|
|
self.order[cursor.hash] = i
|
|
|
|
|
|
|
|
if (cursor.kind == cindex.CursorKind.MACRO_DEFINITION and
|
|
|
|
cursor.location.file):
|
2019-07-02 17:57:57 +08:00
|
|
|
self.order[cursor.hash] = i
|
2019-03-19 00:21:48 +08:00
|
|
|
self.defines[cursor.spelling] = cursor
|
|
|
|
|
|
|
|
# most likely a forward decl of struct
|
|
|
|
if (cursor.kind == cindex.CursorKind.STRUCT_DECL and
|
|
|
|
not cursor.is_definition()):
|
|
|
|
self.forward_decls[Type(self, cursor.type)] = cursor
|
|
|
|
if (cursor.kind == cindex.CursorKind.FUNCTION_DECL and
|
|
|
|
cursor.linkage != cindex.LinkageKind.INTERNAL):
|
2020-02-26 22:01:39 +08:00
|
|
|
if self.limit_scan_depth:
|
|
|
|
if (cursor.location and cursor.location.file.name == self.path):
|
|
|
|
self.functions.add(Function(self, cursor))
|
|
|
|
else:
|
|
|
|
self.functions.add(Function(self, cursor))
|
2019-03-19 00:21:48 +08:00
|
|
|
|
|
|
|
def get_functions(self):
|
|
|
|
# type: () -> Set[Function]
|
|
|
|
if not self._processed:
|
|
|
|
self._process()
|
|
|
|
return self.functions
|
|
|
|
|
|
|
|
def _walk_preorder(self):
|
|
|
|
# type: () -> Gen
|
|
|
|
for c in self._tu.cursor.walk_preorder():
|
|
|
|
yield c
|
|
|
|
|
|
|
|
def search_for_macro_name(self, cursor):
|
|
|
|
# type: (cindex.Cursor) -> None
|
|
|
|
"""Searches for possible macro usage in constant array types."""
|
|
|
|
tokens = list(t.spelling for t in cursor.get_tokens())
|
|
|
|
try:
|
|
|
|
for token in tokens:
|
2019-07-02 17:57:57 +08:00
|
|
|
if token in self.defines and token not in self.required_defines:
|
2019-03-19 00:21:48 +08:00
|
|
|
self.required_defines.add(token)
|
2019-07-02 17:57:57 +08:00
|
|
|
self.search_for_macro_name(self.defines[token])
|
2019-03-19 00:21:48 +08:00
|
|
|
except ValueError:
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
|
|
class Analyzer(object):
|
|
|
|
"""Class responsible for analysis."""
|
|
|
|
|
|
|
|
@staticmethod
|
2020-02-26 22:01:39 +08:00
|
|
|
def process_files(input_paths, compile_flags, limit_scan_depth=False):
|
|
|
|
# type: (Text, List[Text], bool) -> List[_TranslationUnit]
|
|
|
|
"""Processes files with libclang and returns TranslationUnit objects."""
|
2019-06-24 21:30:10 +08:00
|
|
|
_init_libclang()
|
2020-02-26 22:01:39 +08:00
|
|
|
|
|
|
|
tus = []
|
|
|
|
for path in input_paths:
|
|
|
|
tu = Analyzer._analyze_file_for_tu(
|
|
|
|
path, compile_flags=compile_flags, limit_scan_depth=limit_scan_depth)
|
|
|
|
tus.append(tu)
|
|
|
|
return tus
|
2019-03-19 00:21:48 +08:00
|
|
|
|
|
|
|
# pylint: disable=line-too-long
|
|
|
|
@staticmethod
|
|
|
|
def _analyze_file_for_tu(path,
|
|
|
|
compile_flags=None,
|
|
|
|
test_file_existence=True,
|
2020-02-26 22:01:39 +08:00
|
|
|
unsaved_files=None,
|
|
|
|
limit_scan_depth=False):
|
|
|
|
# type: (Text, Optional[List[Text]], bool, Optional[Tuple[Text, Union[Text, IO[Text]]]], bool) -> _TranslationUnit
|
2019-03-19 00:21:48 +08:00
|
|
|
"""Returns Analysis object for given path."""
|
|
|
|
compile_flags = compile_flags or []
|
|
|
|
if test_file_existence and not os.path.isfile(path):
|
|
|
|
raise IOError('Path {} does not exist.'.format(path))
|
|
|
|
|
2019-11-21 23:32:42 +08:00
|
|
|
_init_libclang()
|
2019-03-19 00:21:48 +08:00
|
|
|
index = cindex.Index.create() # type: cindex.Index
|
|
|
|
# TODO(szwl): hack until I figure out how python swig does that.
|
|
|
|
# Headers will be parsed as C++. C libs usually have
|
|
|
|
# '#ifdef __cplusplus extern "C"' for compatibility with c++
|
|
|
|
lang = '-xc++' if not path.endswith('.c') else '-xc'
|
|
|
|
args = [lang]
|
|
|
|
args += compile_flags
|
|
|
|
args.append('-I.')
|
2020-02-26 22:01:39 +08:00
|
|
|
return _TranslationUnit(
|
|
|
|
path,
|
|
|
|
index.parse(
|
|
|
|
path,
|
|
|
|
args=args,
|
|
|
|
unsaved_files=unsaved_files,
|
|
|
|
options=_PARSE_OPTIONS),
|
|
|
|
limit_scan_depth=limit_scan_depth)
|
2019-03-19 00:21:48 +08:00
|
|
|
|
|
|
|
|
|
|
|
class Generator(object):
|
|
|
|
"""Class responsible for code generation."""
|
|
|
|
|
|
|
|
AUTO_GENERATED = ('// AUTO-GENERATED by the Sandboxed API generator.\n'
|
|
|
|
'// Edits will be discarded when regenerating this file.\n')
|
|
|
|
|
2020-02-26 22:01:39 +08:00
|
|
|
GUARD_START = ('#ifndef {0}\n' '#define {0}')
|
2019-03-19 00:21:48 +08:00
|
|
|
GUARD_END = '#endif // {}'
|
2020-05-14 14:48:59 +08:00
|
|
|
EMBED_INCLUDE = '#include "{}"'
|
2019-03-19 00:21:48 +08:00
|
|
|
EMBED_CLASS = ('class {0}Sandbox : public ::sapi::Sandbox {{\n'
|
|
|
|
' public:\n'
|
|
|
|
' {0}Sandbox() : ::sapi::Sandbox({1}_embed_create()) {{}}\n'
|
|
|
|
'}};')
|
|
|
|
|
|
|
|
def __init__(self, translation_units):
|
|
|
|
# type: (List[cindex.TranslationUnit]) -> None
|
2019-07-26 22:46:43 +08:00
|
|
|
"""Initializes the generator.
|
2019-03-19 00:21:48 +08:00
|
|
|
|
|
|
|
Args:
|
|
|
|
translation_units: list of translation_units for analyzed files,
|
2020-02-26 22:01:39 +08:00
|
|
|
facultative. If not given, then one is computed for each element of
|
|
|
|
input_paths
|
2019-03-19 00:21:48 +08:00
|
|
|
"""
|
|
|
|
self.translation_units = translation_units
|
|
|
|
self.functions = None
|
2019-06-24 21:30:10 +08:00
|
|
|
_init_libclang()
|
2019-03-19 00:21:48 +08:00
|
|
|
|
2020-02-26 22:01:39 +08:00
|
|
|
def generate(self,
|
|
|
|
name,
|
|
|
|
function_names,
|
|
|
|
namespace=None,
|
|
|
|
output_file=None,
|
|
|
|
embed_dir=None,
|
|
|
|
embed_name=None):
|
|
|
|
# pylint: disable=line-too-long
|
2020-02-25 17:15:31 +08:00
|
|
|
# type: (Text, List[Text], Optional[Text], Optional[Text], Optional[Text], Optional[Text]) -> Text
|
2019-03-19 00:21:48 +08:00
|
|
|
"""Generates structures, functions and typedefs.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
name: name of the class that will contain generated interface
|
|
|
|
function_names: list of function names to export to the interface
|
|
|
|
namespace: namespace of the interface
|
|
|
|
output_file: path to the output file, used to generate header guards;
|
2020-02-26 22:01:39 +08:00
|
|
|
defaults to None that does not generate the guard #include directives;
|
|
|
|
defaults to None that causes to emit the whole file path
|
2019-03-19 00:21:48 +08:00
|
|
|
embed_dir: path to directory with embed includes
|
|
|
|
embed_name: name of the embed object
|
2020-02-26 22:01:39 +08:00
|
|
|
|
2019-03-19 00:21:48 +08:00
|
|
|
Returns:
|
|
|
|
generated interface as a string
|
|
|
|
"""
|
|
|
|
related_types = self._get_related_types(function_names)
|
|
|
|
forward_decls = self._get_forward_decls(related_types)
|
|
|
|
functions = self._get_functions(function_names)
|
|
|
|
related_types = [(t.stringify() + ';') for t in related_types]
|
|
|
|
defines = self._get_defines()
|
|
|
|
|
|
|
|
api = {
|
|
|
|
'name': name,
|
|
|
|
'functions': functions,
|
|
|
|
'related_types': defines + forward_decls + related_types,
|
|
|
|
'namespaces': namespace.split('::') if namespace else [],
|
|
|
|
'embed_dir': embed_dir,
|
|
|
|
'embed_name': embed_name,
|
|
|
|
'output_file': output_file
|
|
|
|
}
|
|
|
|
return self.format_template(**api)
|
|
|
|
|
|
|
|
def _get_functions(self, func_names=None):
|
2020-02-25 17:15:31 +08:00
|
|
|
# type: (Optional[List[Text]]) -> List[Function]
|
2019-03-19 00:21:48 +08:00
|
|
|
"""Gets Function objects that will be used to generate interface."""
|
|
|
|
if self.functions is not None:
|
|
|
|
return self.functions
|
|
|
|
self.functions = []
|
|
|
|
# TODO(szwl): for d in translation_unit.diagnostics:, handle that
|
|
|
|
for translation_unit in self.translation_units:
|
2020-02-26 22:01:39 +08:00
|
|
|
self.functions += [
|
|
|
|
f for f in translation_unit.get_functions()
|
|
|
|
if not func_names or f.name in func_names
|
|
|
|
]
|
2019-03-19 00:21:48 +08:00
|
|
|
# allow only nonmangled functions - C++ overloads are not handled in
|
|
|
|
# code generation
|
|
|
|
self.functions = [f for f in self.functions if not f.is_mangled()]
|
|
|
|
|
|
|
|
# remove duplicates
|
|
|
|
self.functions = list(set(self.functions))
|
|
|
|
self.functions.sort(key=lambda x: x.name)
|
|
|
|
return self.functions
|
|
|
|
|
|
|
|
def _get_related_types(self, func_names=None):
|
2020-02-25 17:15:31 +08:00
|
|
|
# type: (Optional[List[Text]]) -> List[Type]
|
2019-03-19 00:21:48 +08:00
|
|
|
"""Gets type definitions related to chosen functions.
|
|
|
|
|
|
|
|
Types related to one function will land in the same translation unit,
|
|
|
|
we gather the types, sort it and put as a sublist in types list.
|
|
|
|
This is necessary as we can't compare types from two different translation
|
|
|
|
units.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
func_names: list of function names to take into consideration, empty means
|
2020-02-26 22:01:39 +08:00
|
|
|
all functions.
|
2019-03-19 00:21:48 +08:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
list of types in correct (ready to render) order
|
|
|
|
"""
|
|
|
|
processed = set()
|
|
|
|
fn_related_types = set()
|
|
|
|
types = []
|
|
|
|
types_to_skip = set()
|
|
|
|
|
|
|
|
for f in self._get_functions(func_names):
|
|
|
|
fn_related_types = f.get_related_types()
|
|
|
|
types += sorted(r for r in fn_related_types if r not in processed)
|
|
|
|
processed.update(fn_related_types)
|
|
|
|
types_to_skip.update(f.translation_unit().types_to_skip)
|
|
|
|
|
|
|
|
return [t for t in types if t not in types_to_skip]
|
|
|
|
|
|
|
|
def _get_defines(self):
|
|
|
|
# type: () -> List[Text]
|
|
|
|
"""Gets #define directives that appeared during TranslationUnit processing.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
list of #define string representations
|
|
|
|
"""
|
2020-02-26 22:01:39 +08:00
|
|
|
|
2019-07-02 17:57:57 +08:00
|
|
|
def make_sort_condition(translation_unit):
|
|
|
|
return lambda cursor: translation_unit.order[cursor.hash]
|
|
|
|
|
2019-03-19 00:21:48 +08:00
|
|
|
result = []
|
|
|
|
for tu in self.translation_units:
|
2019-07-02 17:57:57 +08:00
|
|
|
tmp_result = []
|
|
|
|
sort_condition = make_sort_condition(tu)
|
2019-03-19 00:21:48 +08:00
|
|
|
for name in tu.required_defines:
|
|
|
|
if name in tu.defines:
|
|
|
|
define = tu.defines[name]
|
2019-07-02 17:57:57 +08:00
|
|
|
tmp_result.append(define)
|
|
|
|
for define in sorted(tmp_result, key=sort_condition):
|
2020-02-26 22:01:39 +08:00
|
|
|
result.append('#define ' +
|
|
|
|
_stringify_tokens(define.get_tokens(), separator=' \\\n'))
|
2019-03-19 00:21:48 +08:00
|
|
|
return result
|
|
|
|
|
|
|
|
def _get_forward_decls(self, types):
|
|
|
|
# type: (List[Type]) -> List[Text]
|
|
|
|
"""Gets forward declarations of related types, if present."""
|
|
|
|
forward_decls = dict()
|
|
|
|
result = []
|
|
|
|
done = set()
|
|
|
|
for tu in self.translation_units:
|
|
|
|
forward_decls.update(tu.forward_decls)
|
|
|
|
|
|
|
|
for t in types:
|
|
|
|
if t in forward_decls and t not in done:
|
|
|
|
result.append(_stringify_tokens(forward_decls[t].get_tokens()) + ';')
|
|
|
|
done.add(t)
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
|
|
|
def _format_function(self, f):
|
|
|
|
# type: (Function) -> Text
|
|
|
|
"""Renders one function of the Api.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
f: function object with information necessary to emit full function body
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
filled function template
|
|
|
|
"""
|
|
|
|
result = []
|
|
|
|
result.append(' // {}'.format(f.original_definition))
|
|
|
|
|
|
|
|
arguments = ', '.join(str(a) for a in f.arguments())
|
|
|
|
result.append(' {} {}({}) {{'.format(f.result, f.name, arguments))
|
|
|
|
result.append(' {} ret;'.format(f.result.mapped_type))
|
|
|
|
|
|
|
|
argument_types = []
|
|
|
|
for a in f.argument_types:
|
|
|
|
if not a.is_ptr():
|
|
|
|
argument_types.append(a.wrapped + ';')
|
|
|
|
if argument_types:
|
|
|
|
for arg in argument_types:
|
|
|
|
result.append(' {}'.format(arg))
|
|
|
|
|
|
|
|
call_arguments = f.call_arguments()
|
|
|
|
if call_arguments: # fake empty space to add ',' before first argument
|
|
|
|
call_arguments.insert(0, '')
|
|
|
|
result.append('')
|
2019-03-19 20:51:30 +08:00
|
|
|
# For OSS, the macro below will be replaced.
|
2019-03-19 00:21:48 +08:00
|
|
|
result.append(' SAPI_RETURN_IF_ERROR(sandbox_->Call("{}", &ret{}));'
|
|
|
|
''.format(f.name, ', '.join(call_arguments)))
|
|
|
|
|
2020-02-28 01:23:44 +08:00
|
|
|
return_status = 'return absl::OkStatus();'
|
2019-03-19 00:21:48 +08:00
|
|
|
if f.result and not f.result.is_void():
|
|
|
|
if f.result and f.result.is_enum():
|
|
|
|
return_status = ('return static_cast<{}>'
|
|
|
|
'(ret.GetValue());').format(f.result.type)
|
|
|
|
else:
|
|
|
|
return_status = 'return ret.GetValue();'
|
|
|
|
result.append(' {}'.format(return_status))
|
|
|
|
result.append(' }')
|
|
|
|
|
|
|
|
return '\n'.join(result)
|
|
|
|
|
|
|
|
def format_template(self, name, functions, related_types, namespaces,
|
|
|
|
embed_dir, embed_name, output_file):
|
|
|
|
# pylint: disable=line-too-long
|
|
|
|
# type: (Text, List[Function], List[Type], List[Text], Text, Text, Text) -> Text
|
|
|
|
# pylint: enable=line-too-long
|
|
|
|
"""Formats arguments into proper interface header file.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
name: name of the Api - 'Test' will yield TestApi object
|
|
|
|
functions: list of functions to generate
|
|
|
|
related_types: types used in the above functions
|
|
|
|
namespaces: list of namespaces to wrap the Api class with
|
|
|
|
embed_dir: directory where the embedded library lives
|
|
|
|
embed_name: name of embedded library
|
|
|
|
output_file: interface output path - used in header guard generation
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
generated header file text
|
|
|
|
"""
|
|
|
|
result = [Generator.AUTO_GENERATED]
|
|
|
|
|
|
|
|
header_guard = get_header_guard(output_file) if output_file else ''
|
|
|
|
if header_guard:
|
|
|
|
result.append(Generator.GUARD_START.format(header_guard))
|
|
|
|
|
|
|
|
# Copybara transform results in the paths below.
|
|
|
|
result.append('#include "sandboxed_api/sandbox.h"')
|
|
|
|
result.append('#include "sandboxed_api/vars.h"')
|
|
|
|
|
2020-09-25 19:30:25 +08:00
|
|
|
if embed_dir and embed_name:
|
2020-05-14 14:48:59 +08:00
|
|
|
result.append(
|
|
|
|
Generator.EMBED_INCLUDE.format(
|
|
|
|
os.path.join(embed_dir, embed_name) + '_embed.h'))
|
2019-03-19 00:21:48 +08:00
|
|
|
|
|
|
|
if namespaces:
|
2019-07-26 22:46:43 +08:00
|
|
|
result.append('')
|
2019-03-19 00:21:48 +08:00
|
|
|
for n in namespaces:
|
|
|
|
result.append('namespace {} {{'.format(n))
|
|
|
|
|
|
|
|
if related_types:
|
2019-07-26 22:46:43 +08:00
|
|
|
result.append('')
|
2019-03-19 00:21:48 +08:00
|
|
|
for t in related_types:
|
|
|
|
result.append(t)
|
|
|
|
|
|
|
|
result.append('')
|
|
|
|
|
2020-09-25 19:30:25 +08:00
|
|
|
if embed_dir and embed_name:
|
2019-03-19 00:21:48 +08:00
|
|
|
result.append(
|
|
|
|
Generator.EMBED_CLASS.format(name, embed_name.replace('-', '_')))
|
|
|
|
|
|
|
|
result.append('class {}Api {{'.format(name))
|
|
|
|
result.append(' public:')
|
|
|
|
result.append(' explicit {}Api(::sapi::Sandbox* sandbox)'
|
|
|
|
' : sandbox_(sandbox) {{}}'.format(name))
|
2019-07-26 22:46:43 +08:00
|
|
|
result.append(' // Deprecated')
|
|
|
|
result.append(' ::sapi::Sandbox* GetSandbox() const { return sandbox(); }')
|
|
|
|
result.append(' ::sapi::Sandbox* sandbox() const { return sandbox_; }')
|
2019-03-19 00:21:48 +08:00
|
|
|
|
|
|
|
for f in functions:
|
|
|
|
result.append('')
|
2019-07-26 22:46:43 +08:00
|
|
|
result.append(self._format_function(f))
|
2019-03-19 00:21:48 +08:00
|
|
|
|
|
|
|
result.append('')
|
|
|
|
result.append(' private:')
|
|
|
|
result.append(' ::sapi::Sandbox* sandbox_;')
|
|
|
|
result.append('};')
|
|
|
|
result.append('')
|
|
|
|
|
|
|
|
if namespaces:
|
|
|
|
for n in reversed(namespaces):
|
|
|
|
result.append('}} // namespace {}'.format(n))
|
|
|
|
|
|
|
|
if header_guard:
|
|
|
|
result.append(Generator.GUARD_END.format(header_guard))
|
|
|
|
|
|
|
|
result.append('')
|
|
|
|
|
|
|
|
return '\n'.join(result)
|