mirror of
https://github.com/espressif/esp-idf
synced 2025-03-09 17:19:09 -04:00
refactor: move ldgen into a separate package
This commit is contained in:
parent
79cc650d75
commit
a44953ecd4
@ -3033,8 +3033,6 @@ tools/ldgen/output_commands.py
|
||||
tools/ldgen/samples/template.ld
|
||||
tools/ldgen/sdkconfig.py
|
||||
tools/ldgen/test/data/linker_script.ld
|
||||
tools/ldgen/test/test_entity.py
|
||||
tools/ldgen/test/test_output_commands.py
|
||||
tools/mass_mfg/mfg_gen.py
|
||||
tools/mkdfu.py
|
||||
tools/mkuf2.py
|
||||
|
@ -233,13 +233,14 @@ tools/kconfig_new/test/confgen/test_confgen.py
|
||||
tools/kconfig_new/test/confserver/test_confserver.py
|
||||
tools/kconfig_new/test/gen_kconfig_doc/test_kconfig_out.py
|
||||
tools/kconfig_new/test/gen_kconfig_doc/test_target_visibility.py
|
||||
tools/ldgen/fragments.py
|
||||
tools/ldgen/generation.py
|
||||
tools/ldgen/ldgen.py
|
||||
tools/ldgen/ldgen_common.py
|
||||
tools/ldgen/linker_script.py
|
||||
tools/ldgen/output_commands.py
|
||||
tools/ldgen/sdkconfig.py
|
||||
tools/ldgen/ldgen/entity.py
|
||||
tools/ldgen/ldgen/fragments.py
|
||||
tools/ldgen/ldgen/generation.py
|
||||
tools/ldgen/ldgen/ldgen_common.py
|
||||
tools/ldgen/ldgen/linker_script.py
|
||||
tools/ldgen/ldgen/output_commands.py
|
||||
tools/ldgen/ldgen/sdkconfig.py
|
||||
tools/ldgen/test/test_entity.py
|
||||
tools/ldgen/test/test_fragments.py
|
||||
tools/ldgen/test/test_generation.py
|
||||
|
@ -13,13 +13,13 @@ import sys
|
||||
import tempfile
|
||||
from io import StringIO
|
||||
|
||||
from entity import EntityDB
|
||||
from fragments import FragmentFile
|
||||
from generation import Generation
|
||||
from ldgen_common import LdGenFailure
|
||||
from linker_script import LinkerScript
|
||||
from ldgen.entity import EntityDB
|
||||
from ldgen.fragments import FragmentFile
|
||||
from ldgen.generation import Generation
|
||||
from ldgen.ldgen_common import LdGenFailure
|
||||
from ldgen.linker_script import LinkerScript
|
||||
from ldgen.sdkconfig import SDKConfig
|
||||
from pyparsing import ParseException, ParseFatalException
|
||||
from sdkconfig import SDKConfig
|
||||
|
||||
try:
|
||||
import confgen
|
||||
|
@ -1,17 +1,6 @@
|
||||
#
|
||||
# Copyright 2021 Espressif Systems (Shanghai) CO LTD
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# SPDX-FileCopyrightText: 2021 Espressif Systems (Shanghai) CO LTD
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
|
||||
import collections
|
||||
@ -25,7 +14,7 @@ from pyparsing import (Group, Literal, OneOrMore, ParseException, SkipTo, Suppre
|
||||
|
||||
|
||||
@total_ordering
|
||||
class Entity():
|
||||
class Entity:
|
||||
"""
|
||||
An entity refers to a library, object, symbol whose input
|
||||
sections can be placed or excluded from placement.
|
||||
@ -60,7 +49,7 @@ class Entity():
|
||||
else:
|
||||
raise ValueError("Invalid arguments '(%s, %s, %s)'" % (archive, obj, symbol))
|
||||
|
||||
self.archive = archive
|
||||
self.archive = archive
|
||||
self.obj = obj
|
||||
self.symbol = symbol
|
||||
|
||||
@ -127,7 +116,8 @@ class EntityDB():
|
||||
archive_path = (Literal('In archive').suppress() +
|
||||
White().suppress() +
|
||||
# trim the colon and line ending characters from archive_path
|
||||
restOfLine.setResultsName('archive_path').setParseAction(lambda s, loc, toks: s.rstrip(':\n\r ')))
|
||||
restOfLine.setResultsName('archive_path').setParseAction(
|
||||
lambda s, loc, toks: s.rstrip(':\n\r ')))
|
||||
parser = archive_path
|
||||
|
||||
results = None
|
||||
@ -154,11 +144,10 @@ class EntityDB():
|
||||
section_entry = Suppress(Word(nums)) + SkipTo(' ') + Suppress(restOfLine) + \
|
||||
Suppress(ZeroOrMore(Word(alphas) + Literal(',')) + Word(alphas))
|
||||
|
||||
content = Group(object_line + section_start + section_header + Group(OneOrMore(section_entry)).setResultsName('sections'))
|
||||
content = Group(
|
||||
object_line + section_start + section_header + Group(OneOrMore(section_entry)).setResultsName('sections'))
|
||||
parser = Group(ZeroOrMore(content)).setResultsName('contents')
|
||||
|
||||
results = None
|
||||
|
||||
try:
|
||||
results = parser.parseString(info.content, parseAll=True)
|
||||
except ParseException as p:
|
||||
@ -192,7 +181,8 @@ class EntityDB():
|
||||
|
||||
def _match_obj(self, archive, obj):
|
||||
objs = self.get_objects(archive)
|
||||
match_objs = fnmatch.filter(objs, obj + '.o') + fnmatch.filter(objs, obj + '.*.obj') + fnmatch.filter(objs, obj + '.obj')
|
||||
match_objs = fnmatch.filter(objs, obj + '.o') + fnmatch.filter(objs, obj + '.*.obj') + fnmatch.filter(objs,
|
||||
obj + '.obj')
|
||||
|
||||
if len(match_objs) > 1:
|
||||
raise ValueError("Multiple matches for object: '%s: %s': %s" % (archive, obj, str(match_objs)))
|
@ -8,11 +8,12 @@ import re
|
||||
from collections import namedtuple
|
||||
from enum import Enum
|
||||
|
||||
from entity import Entity
|
||||
from pyparsing import (Combine, Forward, Group, Keyword, Literal, OneOrMore, Optional, Or, ParseFatalException,
|
||||
Suppress, Word, ZeroOrMore, alphanums, alphas, delimitedList, indentedBlock, nums,
|
||||
originalTextFor, restOfLine)
|
||||
from sdkconfig import SDKConfig
|
||||
|
||||
from .entity import Entity
|
||||
from .sdkconfig import SDKConfig
|
||||
|
||||
|
||||
class FragmentFile():
|
||||
@ -70,38 +71,6 @@ class FragmentFile():
|
||||
for tok in toks:
|
||||
expand_conditionals(tok, stmts)
|
||||
|
||||
def key_body_parsed(pstr, loc, toks):
|
||||
stmts = list()
|
||||
expand_conditionals(toks, stmts)
|
||||
|
||||
if parse_ctx.key_grammar.min and len(stmts) < parse_ctx.key_grammar.min:
|
||||
raise ParseFatalException(pstr, loc, "fragment requires at least %d values for key '%s'" %
|
||||
(parse_ctx.key_grammar.min, parse_ctx.key))
|
||||
|
||||
if parse_ctx.key_grammar.max and len(stmts) > parse_ctx.key_grammar.max:
|
||||
raise ParseFatalException(pstr, loc, "fragment requires at most %d values for key '%s'" %
|
||||
(parse_ctx.key_grammar.max, parse_ctx.key))
|
||||
|
||||
try:
|
||||
parse_ctx.fragment.set_key_value(parse_ctx.key, stmts)
|
||||
except Exception as e:
|
||||
raise ParseFatalException(pstr, loc, "unable to add key '%s'; %s" % (parse_ctx.key, str(e)))
|
||||
return None
|
||||
|
||||
key = Word(alphanums + '_') + Suppress(':')
|
||||
key_stmt = Forward()
|
||||
|
||||
condition_block = indentedBlock(key_stmt, indent_stack)
|
||||
key_stmts = OneOrMore(condition_block)
|
||||
key_body = Suppress(key) + key_stmts
|
||||
key_body.setParseAction(key_body_parsed)
|
||||
|
||||
condition = originalTextFor(SDKConfig.get_expression_grammar()).setResultsName('condition')
|
||||
if_condition = Group(Suppress('if') + condition + Suppress(':') + condition_block)
|
||||
elif_condition = Group(Suppress('elif') + condition + Suppress(':') + condition_block)
|
||||
else_condition = Group(Suppress('else') + Suppress(':') + condition_block)
|
||||
conditional = (if_condition + Optional(OneOrMore(elif_condition)) + Optional(else_condition)).setResultsName('conditional')
|
||||
|
||||
def key_parse_action(pstr, loc, toks):
|
||||
key = toks[0]
|
||||
|
||||
@ -123,11 +92,42 @@ class FragmentFile():
|
||||
|
||||
return None
|
||||
|
||||
def key_body_parsed(pstr, loc, toks):
|
||||
stmts = list()
|
||||
expand_conditionals(toks, stmts)
|
||||
|
||||
if parse_ctx.key_grammar.min and len(stmts) < parse_ctx.key_grammar.min:
|
||||
raise ParseFatalException(pstr, loc, "fragment requires at least %d values for key '%s'" %
|
||||
(parse_ctx.key_grammar.min, parse_ctx.key))
|
||||
|
||||
if parse_ctx.key_grammar.max and len(stmts) > parse_ctx.key_grammar.max:
|
||||
raise ParseFatalException(pstr, loc, "fragment requires at most %d values for key '%s'" %
|
||||
(parse_ctx.key_grammar.max, parse_ctx.key))
|
||||
|
||||
try:
|
||||
parse_ctx.fragment.set_key_value(parse_ctx.key, stmts)
|
||||
except Exception as e:
|
||||
raise ParseFatalException(pstr, loc, "unable to add key '%s'; %s" % (parse_ctx.key, str(e)))
|
||||
return None
|
||||
|
||||
key = (Word(alphanums + '_') + Suppress(':')).setParseAction(key_parse_action)
|
||||
key_stmt = Forward()
|
||||
|
||||
condition_block = indentedBlock(key_stmt, indent_stack)
|
||||
key_stmts = OneOrMore(condition_block)
|
||||
key_body = Suppress(key) + key_stmts
|
||||
key_body.setParseAction(key_body_parsed)
|
||||
|
||||
condition = originalTextFor(SDKConfig.get_expression_grammar()).setResultsName('condition')
|
||||
if_condition = Group(Suppress('if') + condition + Suppress(':') + condition_block)
|
||||
elif_condition = Group(Suppress('elif') + condition + Suppress(':') + condition_block)
|
||||
else_condition = Group(Suppress('else') + Suppress(':') + condition_block)
|
||||
conditional = (if_condition + Optional(OneOrMore(elif_condition)) + Optional(else_condition)).setResultsName(
|
||||
'conditional')
|
||||
|
||||
def name_parse_action(pstr, loc, toks):
|
||||
parse_ctx.fragment.name = toks[0]
|
||||
|
||||
key.setParseAction(key_parse_action)
|
||||
|
||||
ftype = Word(alphas).setParseAction(fragment_type_parse_action)
|
||||
fid = Suppress(':') + Word(alphanums + '_.').setResultsName('name')
|
||||
fid.setParseAction(name_parse_action)
|
||||
@ -135,7 +135,7 @@ class FragmentFile():
|
||||
|
||||
def fragment_parse_action(pstr, loc, toks):
|
||||
key_grammars = parse_ctx.fragment.get_key_grammars()
|
||||
required_keys = set([k for (k,v) in key_grammars.items() if v.required])
|
||||
required_keys = set([k for (k, v) in key_grammars.items() if v.required])
|
||||
present_keys = required_keys.intersection(set(parse_ctx.keys))
|
||||
if present_keys != required_keys:
|
||||
raise ParseFatalException(pstr, loc, 'required keys %s for fragment not found' %
|
||||
@ -155,7 +155,8 @@ class FragmentFile():
|
||||
fragment.setParseAction(fragment_parse_action)
|
||||
fragment.ignore('#' + restOfLine)
|
||||
|
||||
deprecated_mapping = DeprecatedMapping.get_fragment_grammar(sdkconfig, fragment_file.name).setResultsName('value')
|
||||
deprecated_mapping = DeprecatedMapping.get_fragment_grammar(sdkconfig, fragment_file.name).setResultsName(
|
||||
'value')
|
||||
|
||||
fragment_stmt << (Group(deprecated_mapping) | Group(fragment) | Group(fragment_conditional))
|
||||
|
||||
@ -164,8 +165,7 @@ class FragmentFile():
|
||||
expand_conditionals(toks, stmts)
|
||||
return stmts
|
||||
|
||||
parser = ZeroOrMore(fragment_stmt)
|
||||
parser.setParseAction(fragment_stmt_parsed)
|
||||
parser = ZeroOrMore(fragment_stmt).setParseAction(fragment_stmt_parsed)
|
||||
|
||||
self.fragments = parser.parseFile(fragment_file, parseAll=True)
|
||||
|
||||
@ -173,7 +173,7 @@ class FragmentFile():
|
||||
fragment.path = path
|
||||
|
||||
|
||||
class Fragment():
|
||||
class Fragment:
|
||||
"""
|
||||
Base class for a fragment that can be parsed from a fragment file. All fragments
|
||||
share the common grammar:
|
||||
@ -242,6 +242,7 @@ class Sections(Fragment):
|
||||
Utility function that returns a list of sections given a sections fragment entry,
|
||||
with the '+' notation and symbol concatenation handled automatically.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def get_section_data_from_entry(sections_entry, symbol=None):
|
||||
if not symbol:
|
||||
@ -504,7 +505,8 @@ class Mapping(Fragment):
|
||||
Optional(Suppress(';') + delimitedList(section_target_flags).setResultsName('sections_target_flags')))
|
||||
|
||||
grammars = {
|
||||
'archive': Fragment.KeyValue(Or([Fragment.ENTITY, Word(Entity.ALL)]).setResultsName('archive'), 1, 1, True),
|
||||
'archive': Fragment.KeyValue(Or([Fragment.ENTITY, Word(Entity.ALL)]).setResultsName('archive'), 1, 1,
|
||||
True),
|
||||
'entries': Fragment.KeyValue(entry, 0, None, True)
|
||||
}
|
||||
|
||||
@ -570,12 +572,13 @@ class DeprecatedMapping():
|
||||
fragment.entries = set()
|
||||
condition_true = False
|
||||
for entries in toks[0].entries[0]:
|
||||
condition = next(iter(entries.condition.asList())).strip()
|
||||
condition = next(iter(entries.condition.asList())).strip()
|
||||
condition_val = sdkconfig.evaluate_expression(condition)
|
||||
|
||||
if condition_val:
|
||||
for entry in entries[1]:
|
||||
fragment.entries.add((entry.object, None if entry.symbol == '' else entry.symbol, entry.scheme))
|
||||
fragment.entries.add(
|
||||
(entry.object, None if entry.symbol == '' else entry.symbol, entry.scheme))
|
||||
condition_true = True
|
||||
break
|
||||
|
||||
@ -591,7 +594,7 @@ class DeprecatedMapping():
|
||||
fragment.entries.add(('*', None, 'default'))
|
||||
|
||||
dep_warning = str(ParseFatalException(pstr, loc,
|
||||
'Warning: Deprecated old-style mapping fragment parsed in file %s.' % fragment_file))
|
||||
'Warning: Deprecated old-style mapping fragment parsed in file %s.' % fragment_file))
|
||||
|
||||
print(dep_warning)
|
||||
return fragment
|
@ -8,13 +8,13 @@ import fnmatch
|
||||
import itertools
|
||||
from collections import namedtuple
|
||||
|
||||
from entity import Entity
|
||||
from fragments import Mapping, Scheme, Sections
|
||||
from ldgen_common import LdGenFailure
|
||||
from output_commands import AlignAtAddress, InputSectionDesc, SymbolAtAddress
|
||||
from .entity import Entity
|
||||
from .fragments import Mapping, Scheme, Sections
|
||||
from .ldgen_common import LdGenFailure
|
||||
from .output_commands import AlignAtAddress, InputSectionDesc, SymbolAtAddress
|
||||
|
||||
|
||||
class Placement():
|
||||
class Placement:
|
||||
"""
|
||||
A Placement is an assignment of an entity's input sections to a target
|
||||
in the output linker script - a precursor to the input section description.
|
||||
@ -134,12 +134,12 @@ class EntityNode():
|
||||
|
||||
def add_child(self, entity):
|
||||
child_specificity = self.entity.specificity.value + 1
|
||||
assert(child_specificity <= Entity.Specificity.SYMBOL.value)
|
||||
assert (child_specificity <= Entity.Specificity.SYMBOL.value)
|
||||
name = entity[Entity.Specificity(child_specificity)]
|
||||
assert(name and name != Entity.ALL)
|
||||
assert (name and name != Entity.ALL)
|
||||
|
||||
child = [c for c in self.children if c.name == name]
|
||||
assert(len(child) <= 1)
|
||||
assert (len(child) <= 1)
|
||||
|
||||
if not child:
|
||||
child = self.child_t(self, name)
|
||||
@ -174,7 +174,7 @@ class EntityNode():
|
||||
for sections in self.get_output_sections():
|
||||
placement = self.placements[sections]
|
||||
if placement.is_significant():
|
||||
assert(placement.node == self)
|
||||
assert (placement.node == self)
|
||||
|
||||
keep = False
|
||||
sort = None
|
||||
@ -202,7 +202,8 @@ class EntityNode():
|
||||
placement_sections = frozenset(placement.sections)
|
||||
command_sections = sections if sections == placement_sections else placement_sections
|
||||
|
||||
command = InputSectionDesc(placement.node.entity, command_sections, [e.node.entity for e in placement.exclusions], keep, sort)
|
||||
command = InputSectionDesc(placement.node.entity, command_sections,
|
||||
[e.node.entity for e in placement.exclusions], keep, sort)
|
||||
commands[placement.target].append(command)
|
||||
|
||||
# Generate commands for intermediate, non-explicit exclusion placements here, so that they can be enclosed by
|
||||
@ -248,6 +249,7 @@ class SymbolNode(EntityNode):
|
||||
Entities at depth=3. Represents entities with archive, object
|
||||
and symbol specified.
|
||||
"""
|
||||
|
||||
def __init__(self, parent, name):
|
||||
EntityNode.__init__(self, parent, name)
|
||||
self.entity = Entity(self.parent.parent.name, self.parent.name)
|
||||
@ -270,6 +272,7 @@ class ObjectNode(EntityNode):
|
||||
An intermediate placement on this node is created, if one does not exist,
|
||||
and is the one excluded from its basis placement.
|
||||
"""
|
||||
|
||||
def __init__(self, parent, name):
|
||||
EntityNode.__init__(self, parent, name)
|
||||
self.child_t = SymbolNode
|
||||
@ -334,6 +337,7 @@ class ArchiveNode(EntityNode):
|
||||
"""
|
||||
Entities at depth=1. Represents entities with archive specified.
|
||||
"""
|
||||
|
||||
def __init__(self, parent, name):
|
||||
EntityNode.__init__(self, parent, name)
|
||||
self.child_t = ObjectNode
|
||||
@ -345,6 +349,7 @@ class RootNode(EntityNode):
|
||||
Single entity at depth=0. Represents entities with no specific members
|
||||
specified.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
EntityNode.__init__(self, None, Entity.ALL)
|
||||
self.child_t = ArchiveNode
|
||||
@ -433,9 +438,9 @@ class Generation:
|
||||
entity = Entity(archive, obj, symbol)
|
||||
|
||||
# Check the entity exists
|
||||
if (self.check_mappings and
|
||||
entity.specificity.value > Entity.Specificity.ARCHIVE.value and
|
||||
mapping.name not in self.check_mapping_exceptions):
|
||||
if (self.check_mappings
|
||||
and entity.specificity.value > Entity.Specificity.ARCHIVE.value
|
||||
and mapping.name not in self.check_mapping_exceptions):
|
||||
if not entities.check_exists(entity):
|
||||
message = "'%s' not found" % str(entity)
|
||||
raise GenerationException(message, mapping)
|
||||
@ -445,9 +450,8 @@ class Generation:
|
||||
# Check if all section->target defined in the current
|
||||
# scheme.
|
||||
for (s, t, f) in flags:
|
||||
if (t not in scheme_dictionary[scheme_name].keys() or
|
||||
s not in [_s.name for _s in scheme_dictionary[scheme_name][t]]):
|
||||
|
||||
if (t not in scheme_dictionary[scheme_name].keys()
|
||||
or s not in [_s.name for _s in scheme_dictionary[scheme_name][t]]):
|
||||
message = "%s->%s not defined in scheme '%s'" % (s, t, scheme_name)
|
||||
raise GenerationException(message, mapping)
|
||||
else:
|
||||
@ -517,8 +521,6 @@ class Generation:
|
||||
|
||||
def add_fragments_from_file(self, fragment_file):
|
||||
for fragment in fragment_file.fragments:
|
||||
dict_to_append_to = None
|
||||
|
||||
if isinstance(fragment, Mapping) and fragment.deprecated and fragment.name in self.mappings.keys():
|
||||
self.mappings[fragment.name].entries |= fragment.entries
|
||||
else:
|
||||
@ -533,7 +535,8 @@ class Generation:
|
||||
if fragment.name in dict_to_append_to.keys():
|
||||
stored = dict_to_append_to[fragment.name].path
|
||||
new = fragment.path
|
||||
message = "Duplicate definition of fragment '%s' found in %s and %s." % (fragment.name, stored, new)
|
||||
message = "Duplicate definition of fragment '%s' found in %s and %s." % (
|
||||
fragment.name, stored, new)
|
||||
raise GenerationException(message)
|
||||
|
||||
dict_to_append_to[fragment.name] = fragment
|
13
tools/ldgen/ldgen/ldgen_common.py
Normal file
13
tools/ldgen/ldgen/ldgen_common.py
Normal file
@ -0,0 +1,13 @@
|
||||
#
|
||||
# SPDX-FileCopyrightText: 2021 Espressif Systems (Shanghai) CO LTD
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
|
||||
|
||||
class LdGenFailure(RuntimeError):
|
||||
"""
|
||||
Parent class for any ldgen runtime failure which is due to input data
|
||||
"""
|
||||
|
||||
def __init__(self, message):
|
||||
super(LdGenFailure, self).__init__(message)
|
@ -6,10 +6,11 @@
|
||||
import collections
|
||||
import os
|
||||
|
||||
from fragments import Fragment
|
||||
from generation import GenerationException
|
||||
from pyparsing import ParseException, Suppress, White
|
||||
|
||||
from .fragments import Fragment
|
||||
from .generation import GenerationException
|
||||
|
||||
|
||||
class LinkerScript:
|
||||
"""
|
@ -1,20 +1,9 @@
|
||||
#
|
||||
# Copyright 2021 Espressif Systems (Shanghai) CO LTD
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# SPDX-FileCopyrightText: 2021 Espressif Systems (Shanghai) CO LTD
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
|
||||
from entity import Entity
|
||||
from .entity import Entity
|
||||
|
||||
# Contains classes for output section commands referred to in
|
||||
# https://www.acrc.bris.ac.uk/acrc/RedHat/rhel-ld-en-4/sections.html#OUTPUT-SECTION-DESCRIPTION.
|
||||
@ -76,7 +65,7 @@ class InputSectionDesc():
|
||||
"""
|
||||
|
||||
def __init__(self, entity, sections, exclusions=None, keep=False, sort=None):
|
||||
assert(entity.specificity != Entity.Specificity.SYMBOL)
|
||||
assert (entity.specificity != Entity.Specificity.SYMBOL)
|
||||
|
||||
self.entity = entity
|
||||
self.sections = set(sections)
|
||||
@ -84,8 +73,8 @@ class InputSectionDesc():
|
||||
self.exclusions = set()
|
||||
|
||||
if exclusions:
|
||||
assert(not [e for e in exclusions if e.specificity == Entity.Specificity.SYMBOL or
|
||||
e.specificity == Entity.Specificity.NONE])
|
||||
assert (not [e for e in exclusions if e.specificity == Entity.Specificity.SYMBOL or
|
||||
e.specificity == Entity.Specificity.NONE])
|
||||
self.exclusions = set(exclusions)
|
||||
else:
|
||||
self.exclusions = set()
|
@ -1,17 +1,6 @@
|
||||
#
|
||||
# Copyright 2021 Espressif Systems (Shanghai) CO LTD
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# SPDX-FileCopyrightText: 2021 Espressif Systems (Shanghai) CO LTD
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
|
||||
import kconfiglib
|
||||
@ -28,8 +17,8 @@ class SDKConfig:
|
||||
# A configuration entry is in the form CONFIG=VALUE. Definitions of components of that grammar
|
||||
IDENTIFIER = Word(alphanums.upper() + '_')
|
||||
|
||||
HEX = Combine('0x' + Word(hexnums)).setParseAction(lambda t:int(t[0], 16))
|
||||
DECIMAL = Combine(Optional(Literal('+') | Literal('-')) + Word(nums)).setParseAction(lambda t:int(t[0]))
|
||||
HEX = Combine('0x' + Word(hexnums)).setParseAction(lambda t: int(t[0], 16))
|
||||
DECIMAL = Combine(Optional(Literal('+') | Literal('-')) + Word(nums)).setParseAction(lambda t: int(t[0]))
|
||||
LITERAL = Word(printables.replace(':', ''))
|
||||
QUOTED_LITERAL = quotedString.setParseAction(removeQuotes)
|
||||
|
||||
@ -66,8 +55,8 @@ class SDKConfig:
|
||||
condition = Group(Optional('(').suppress() + test + Optional(')').suppress())
|
||||
|
||||
grammar = infixNotation(condition, [
|
||||
('!', 1, opAssoc.RIGHT),
|
||||
('&&', 2, opAssoc.LEFT),
|
||||
('||', 2, opAssoc.LEFT)])
|
||||
('!', 1, opAssoc.RIGHT),
|
||||
('&&', 2, opAssoc.LEFT),
|
||||
('||', 2, opAssoc.LEFT)])
|
||||
|
||||
return grammar
|
@ -1,23 +0,0 @@
|
||||
#
|
||||
# Copyright 2021 Espressif Systems (Shanghai) CO LTD
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
|
||||
class LdGenFailure(RuntimeError):
|
||||
"""
|
||||
Parent class for any ldgen runtime failure which is due to input data
|
||||
"""
|
||||
def __init__(self, message):
|
||||
super(LdGenFailure, self).__init__(message)
|
@ -1,29 +1,18 @@
|
||||
#!/usr/bin/env python
|
||||
# coding=utf-8
|
||||
#
|
||||
# Copyright 2018-2020 Espressif Systems (Shanghai) CO LTD
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# SPDX-FileCopyrightText: 2018-2021 Espressif Systems (Shanghai) CO LTD
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
try:
|
||||
from entity import Entity, EntityDB
|
||||
from ldgen.entity import Entity, EntityDB
|
||||
except ImportError:
|
||||
sys.path.append('../')
|
||||
from entity import Entity, EntityDB
|
||||
from ldgen.entity import Entity, EntityDB
|
||||
|
||||
|
||||
class EntityTest(unittest.TestCase):
|
||||
|
@ -13,12 +13,12 @@ from io import StringIO
|
||||
from pyparsing import ParseException, ParseFatalException, Word, alphanums
|
||||
|
||||
try:
|
||||
from fragments import FRAGMENT_TYPES, Fragment, FragmentFile, Mapping
|
||||
from sdkconfig import SDKConfig
|
||||
from ldgen.fragments import FRAGMENT_TYPES, Fragment, FragmentFile, Mapping
|
||||
from ldgen.sdkconfig import SDKConfig
|
||||
except ImportError:
|
||||
sys.path.append('../')
|
||||
from fragments import FRAGMENT_TYPES, Fragment, FragmentFile, Mapping
|
||||
from sdkconfig import SDKConfig
|
||||
from ldgen.fragments import FRAGMENT_TYPES, Fragment, FragmentFile, Mapping
|
||||
from ldgen.sdkconfig import SDKConfig
|
||||
|
||||
|
||||
class SampleFragment(Fragment):
|
||||
|
@ -12,18 +12,18 @@ import tempfile
|
||||
import unittest
|
||||
|
||||
try:
|
||||
from generation import Generation, GenerationException
|
||||
from ldgen.generation import Generation, GenerationException
|
||||
except ImportError:
|
||||
sys.path.append('../')
|
||||
from generation import Generation, GenerationException
|
||||
from ldgen.generation import Generation, GenerationException
|
||||
|
||||
from io import StringIO
|
||||
|
||||
from entity import Entity, EntityDB
|
||||
from fragments import FragmentFile
|
||||
from linker_script import LinkerScript
|
||||
from output_commands import AlignAtAddress, InputSectionDesc, SymbolAtAddress
|
||||
from sdkconfig import SDKConfig
|
||||
from ldgen.entity import Entity, EntityDB
|
||||
from ldgen.fragments import FragmentFile
|
||||
from ldgen.linker_script import LinkerScript
|
||||
from ldgen.output_commands import AlignAtAddress, InputSectionDesc, SymbolAtAddress
|
||||
from ldgen.sdkconfig import SDKConfig
|
||||
|
||||
ROOT = Entity('*')
|
||||
|
||||
|
@ -1,30 +1,20 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# Copyright 2021 Espressif Systems (Shanghai) CO LTD
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# SPDX-FileCopyrightText: 2021 Espressif Systems (Shanghai) CO LTD
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
try:
|
||||
from output_commands import AlignAtAddress, InputSectionDesc, SymbolAtAddress
|
||||
from ldgen.entity import Entity
|
||||
from ldgen.output_commands import AlignAtAddress, InputSectionDesc, SymbolAtAddress
|
||||
except ImportError:
|
||||
sys.path.append('../')
|
||||
from output_commands import InputSectionDesc, SymbolAtAddress, AlignAtAddress
|
||||
from ldgen.entity import Entity
|
||||
from ldgen.output_commands import AlignAtAddress, InputSectionDesc, SymbolAtAddress
|
||||
|
||||
from entity import Entity
|
||||
|
||||
SECTIONS = ['.text', '.text.*', '.literal', '.literal.*']
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user