diff --git a/mesonbuild/ast/introspection.py b/mesonbuild/ast/introspection.py index 3158aa2eeefb..d3849ab39239 100644 --- a/mesonbuild/ast/introspection.py +++ b/mesonbuild/ast/introspection.py @@ -283,8 +283,7 @@ def traverse_nodes(inqueue: T.List[BaseNode]) -> T.List[BaseNode]: kwargs_reduced['_allow_no_sources'] = True target = targetclass(name, self.subdir, self.subproject, for_machine, empty_sources, [], objects, self.environment, self.coredata.compilers[for_machine], kwargs_reduced) - target.process_compilers() - target.process_compilers_late([]) + target.process_compilers_late() new_target = { 'name': target.get_basename(), diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py index 4972eabad514..d17a9a5fb7c3 100644 --- a/mesonbuild/backend/backends.py +++ b/mesonbuild/backend/backends.py @@ -842,6 +842,8 @@ def object_filename_from_source(self, target: build.BuildTarget, source: 'FileOr def _determine_ext_objs(self, extobj: 'build.ExtractedObjects', proj_dir_to_build_root: str) -> T.List[str]: result: T.List[str] = [] + targetdir = self.get_target_private_dir(extobj.target) + # Merge sources and generated sources raw_sources = list(extobj.srclist) for gensrc in extobj.genlist: @@ -858,12 +860,18 @@ def _determine_ext_objs(self, extobj: 'build.ExtractedObjects', proj_dir_to_buil elif self.environment.is_object(s): result.append(s.relative_name()) + # MSVC generate an object file for PCH + if extobj.pch: + for lang, pch in extobj.target.pch.items(): + compiler = extobj.target.compilers[lang] + if compiler.get_argument_syntax() == 'msvc': + objname = self.get_msvc_pch_objname(lang, pch) + result.append(os.path.join(proj_dir_to_build_root, targetdir, objname)) + # extobj could contain only objects and no sources if not sources: return result - targetdir = self.get_target_private_dir(extobj.target) - # With unity builds, sources don't map directly to objects, # we only support extracting all the objects in this mode, # so just return all object files. @@ -898,6 +906,12 @@ def get_pch_include_args(self, compiler: 'Compiler', target: build.BuildTarget) args += compiler.get_pch_use_args(pchpath, p[0]) return includeargs + args + def get_msvc_pch_objname(self, lang: str, pch: T.List[str]) -> str: + if len(pch) == 1: + # Same name as in create_msvc_pch_implementation() below. + return f'meson_pch-{lang}.obj' + return os.path.splitext(pch[1])[0] + '.obj' + def create_msvc_pch_implementation(self, target: build.BuildTarget, lang: str, pch_header: str) -> str: # We have to include the language in the file name, otherwise # pch.c and pch.cpp will both end up as pch.obj in VS backends. diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index d61c3597c206..8bfe96d1003f 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -3066,7 +3066,7 @@ def generate_pch(self, target, header_deps=None): extradep = None pch_objects += objs rulename = self.compiler_to_pch_rule_name(compiler) - elem = NinjaBuildElement(self.all_outputs, dst, rulename, src) + elem = NinjaBuildElement(self.all_outputs, objs + [dst], rulename, src) if extradep is not None: elem.add_dep(extradep) self.add_header_deps(target, elem, header_deps) diff --git a/mesonbuild/build.py b/mesonbuild/build.py index fefc28c3667b..97a8b64883aa 100644 --- a/mesonbuild/build.py +++ b/mesonbuild/build.py @@ -17,7 +17,6 @@ from dataclasses import dataclass, field, InitVar from functools import lru_cache import abc -import copy import hashlib import itertools, pathlib import os @@ -413,6 +412,7 @@ class ExtractedObjects(HoldableObject): genlist: T.List['GeneratedTypes'] = field(default_factory=list) objlist: T.List[T.Union[str, 'File', 'ExtractedObjects']] = field(default_factory=list) recursive: bool = True + pch: bool = False def __post_init__(self) -> None: if self.target.is_unity: @@ -745,6 +745,9 @@ def __init__( # 2. Compiled objects created by and extracted from another target self.process_objectlist(objects) self.process_kwargs(kwargs) + self.missing_languages = self.process_compilers() + self.link(extract_as_list(kwargs, 'link_with')) + self.link_whole(extract_as_list(kwargs, 'link_whole')) if not any([self.sources, self.generated, self.objects, self.link_whole_targets, self.structured_sources, kwargs.pop('_allow_no_sources', False)]): mlog.warning(f'Build target {name} has no sources. ' @@ -835,14 +838,14 @@ def can_compile_remove_sources(compiler: 'Compiler', sources: T.List['FileOrStri removed = True return removed - def process_compilers_late(self, extra_languages: T.List[str]): + def process_compilers_late(self): """Processes additional compilers after kwargs have been evaluated. This can add extra compilers that might be required by keyword arguments, such as link_with or dependencies. It will also try to guess which compiler to use if one hasn't been selected already. """ - for lang in extra_languages: + for lang in self.missing_languages: self.compilers[lang] = self.all_compilers[lang] # did user override clink_langs for this target? @@ -988,18 +991,6 @@ def process_link_depends(self, sources): 'Link_depends arguments must be strings, Files, ' 'or a Custom Target, or lists thereof.') - def get_original_kwargs(self): - return self.kwargs - - def copy_kwargs(self, kwargs): - self.kwargs = copy.copy(kwargs) - for k, v in self.kwargs.items(): - if isinstance(v, list): - self.kwargs[k] = listify(v, flatten=True) - for t in ['dependencies', 'link_with', 'include_directories', 'sources']: - if t in self.kwargs: - self.kwargs[t] = listify(self.kwargs[t], flatten=True) - def extract_objects(self, srclist: T.List[T.Union['FileOrString', 'GeneratedTypes']]) -> ExtractedObjects: sources_set = set(self.sources) generated_set = set(self.generated) @@ -1027,7 +1018,7 @@ def extract_objects(self, srclist: T.List[T.Union['FileOrString', 'GeneratedType def extract_all_objects(self, recursive: bool = True) -> ExtractedObjects: return ExtractedObjects(self, self.sources, self.generated, self.objects, - recursive) + recursive, pch=True) def get_all_link_deps(self) -> ImmutableListProtocol[BuildTargetTypes]: return self.get_transitive_link_deps() @@ -1073,23 +1064,9 @@ def get_custom_install_mode(self) -> T.Optional['FileMode']: def process_kwargs(self, kwargs): self.process_kwargs_base(kwargs) - self.copy_kwargs(kwargs) + self.original_kwargs = kwargs kwargs.get('modules', []) self.need_install = kwargs.get('install', self.need_install) - llist = extract_as_list(kwargs, 'link_with') - for linktarget in llist: - if isinstance(linktarget, dependencies.ExternalLibrary): - raise MesonException(textwrap.dedent('''\ - An external library was used in link_with keyword argument, which - is reserved for libraries built as part of this project. External - libraries must be passed using the dependencies keyword argument - instead, because they are conceptually "external dependencies", - just like those detected with the dependency() function. - ''')) - self.link(linktarget) - lwhole = extract_as_list(kwargs, 'link_whole') - for linktarget in lwhole: - self.link_whole(linktarget) for lang in all_languages: lang_args = extract_as_list(kwargs, f'{lang}_args') @@ -1280,17 +1257,36 @@ def get_outputs(self) -> T.List[str]: def get_extra_args(self, language): return self.extra_args.get(language, []) - def get_dependencies(self, exclude=None): - transitive_deps = [] - if exclude is None: - exclude = [] + @lru_cache(maxsize=None) + def get_dependencies(self) -> OrderedSet[Target]: + # Get all targets needed for linking. This includes all link_with and + # link_whole targets, and also all dependencies of static libraries + # recursively. The algorithm here is closely related to what we do in + # get_internal_static_libraries(): Installed static libraries include + # objects from all their dependencies already. + result: OrderedSet[Target] = OrderedSet() for t in itertools.chain(self.link_targets, self.link_whole_targets): - if t in transitive_deps or t in exclude: + if t not in result: + result.add(t) + if isinstance(t, StaticLibrary): + t.get_dependencies_recurse(result) + return result + + def get_dependencies_recurse(self, result: OrderedSet[Target], include_internals: bool = True) -> None: + # self is always a static library because we don't need to pull dependencies + # of shared libraries. If self is installed (not internal) it already + # include objects extracted from all its internal dependencies so we can + # skip them. + include_internals = include_internals and self.is_internal() + for t in self.link_targets: + if t in result: continue - transitive_deps.append(t) + if include_internals or not t.is_internal(): + result.add(t) if isinstance(t, StaticLibrary): - transitive_deps += t.get_dependencies(transitive_deps + exclude) - return transitive_deps + t.get_dependencies_recurse(result, include_internals) + for t in self.link_whole_targets: + t.get_dependencies_recurse(result, include_internals) def get_source_subdir(self): return self.subdir @@ -1328,10 +1324,8 @@ def add_deps(self, deps): self.extra_files.extend(f for f in dep.extra_files if f not in self.extra_files) self.add_include_dirs(dep.include_directories, dep.get_include_type()) self.objects.extend(dep.objects) - for l in dep.libraries: - self.link(l) - for l in dep.whole_libraries: - self.link_whole(l) + self.link(dep.libraries) + self.link_whole(dep.whole_libraries) if dep.get_compile_args() or dep.get_link_args(): # Those parts that are external. extpart = dependencies.InternalDependency('undefined', @@ -1380,27 +1374,27 @@ def get_external_deps(self) -> T.List[dependencies.Dependency]: def is_internal(self) -> bool: return False - def link(self, target): - for t in listify(target): + def link(self, targets): + for t in targets: if isinstance(self, StaticLibrary) and self.need_install: if isinstance(t, (CustomTarget, CustomTargetIndex)): if not t.should_install(): mlog.warning(f'Try to link an installed static library target {self.name} with a' 'custom target that is not installed, this might cause problems' 'when you try to use this static library') - elif t.is_internal() and not t.uses_rust(): + elif t.is_internal(): # When we're a static library and we link_with to an # internal/convenience library, promote to link_whole. - # - # There are cases we cannot do this, however. In Rust, for - # example, this can't be done with Rust ABI libraries, though - # it could be done with C ABI libraries, though there are - # several meson issues that need to be fixed: - # https://github.com/mesonbuild/meson/issues/10722 - # https://github.com/mesonbuild/meson/issues/10723 - # https://github.com/mesonbuild/meson/issues/10724 - return self.link_whole(t) + return self.link_whole([t]) if not isinstance(t, (Target, CustomTargetIndex)): + if isinstance(t, dependencies.ExternalLibrary): + raise MesonException(textwrap.dedent('''\ + An external library was used in link_with keyword argument, which + is reserved for libraries built as part of this project. External + libraries must be passed using the dependencies keyword argument + instead, because they are conceptually "external dependencies", + just like those detected with the dependency() function. + ''')) raise InvalidArguments(f'{t!r} is not a target.') if not t.is_linkable_target(): raise InvalidArguments(f"Link target '{t!s}' is not linkable.") @@ -1416,16 +1410,13 @@ def link(self, target): mlog.warning(msg + ' This will fail in cross build.') self.link_targets.append(t) - def link_whole(self, target): - for t in listify(target): + def link_whole(self, targets): + for t in targets: if isinstance(t, (CustomTarget, CustomTargetIndex)): if not t.is_linkable_target(): raise InvalidArguments(f'Custom target {t!r} is not linkable.') if t.links_dynamically(): raise InvalidArguments('Can only link_whole custom targets that are static archives.') - if isinstance(self, StaticLibrary): - # FIXME: We could extract the .a archive to get object files - raise InvalidArguments('Cannot link_whole a custom target into a static library') elif not isinstance(t, StaticLibrary): raise InvalidArguments(f'{t!r} is not a static library.') elif isinstance(self, SharedLibrary) and not t.pic: @@ -1438,18 +1429,41 @@ def link_whole(self, target): raise InvalidArguments(msg + ' This is not possible in a cross build.') else: mlog.warning(msg + ' This will fail in cross build.') - if isinstance(self, StaticLibrary): + if isinstance(self, StaticLibrary) and not self.uses_rust(): + if isinstance(t, (CustomTarget, CustomTargetIndex)) or t.uses_rust(): + # There are cases we cannot do this, however. In Rust, for + # example, this can't be done with Rust ABI libraries, though + # it could be done with C ABI libraries, though there are + # several meson issues that need to be fixed: + # https://github.com/mesonbuild/meson/issues/10722 + # https://github.com/mesonbuild/meson/issues/10723 + # https://github.com/mesonbuild/meson/issues/10724 + # FIXME: We could extract the .a archive to get object files + raise InvalidArguments('Cannot link_whole a custom or Rust target into a static library') # When we're a static library and we link_whole: to another static # library, we need to add that target's objects to ourselves. - self.objects += t.extract_all_objects_recurse() + self.objects += [t.extract_all_objects()] + # If we install this static library we also need to include objects + # from all uninstalled static libraries it depends on. + if self.need_install: + for lib in t.get_internal_static_libraries(): + self.objects += [lib.extract_all_objects()] self.link_whole_targets.append(t) - def extract_all_objects_recurse(self) -> T.List[T.Union[str, 'ExtractedObjects']]: - objs = [self.extract_all_objects()] + @lru_cache(maxsize=None) + def get_internal_static_libraries(self) -> OrderedSet[Target]: + result: OrderedSet[Target] = OrderedSet() + self.get_internal_static_libraries_recurse(result) + return result + + def get_internal_static_libraries_recurse(self, result: OrderedSet[Target]) -> None: for t in self.link_targets: + if t.is_internal() and t not in result: + result.add(t) + t.get_internal_static_libraries_recurse(result) + for t in self.link_whole_targets: if t.is_internal(): - objs += t.extract_all_objects_recurse() - return objs + t.get_internal_static_libraries_recurse(result) def add_pch(self, language: str, pchlist: T.List[str]) -> None: if not pchlist: @@ -2661,7 +2675,7 @@ def is_internal(self) -> bool: return False return CustomTargetIndex(self, self.outputs[0]).is_internal() - def extract_all_objects_recurse(self) -> T.List[T.Union[str, 'ExtractedObjects']]: + def extract_all_objects(self) -> T.List[T.Union[str, 'ExtractedObjects']]: return self.get_outputs() def type_suffix(self): @@ -2923,8 +2937,8 @@ def is_internal(self) -> bool: suf = os.path.splitext(self.output)[-1] return suf in {'.a', '.lib'} and not self.should_install() - def extract_all_objects_recurse(self) -> T.List[T.Union[str, 'ExtractedObjects']]: - return self.target.extract_all_objects_recurse() + def extract_all_objects(self) -> T.List[T.Union[str, 'ExtractedObjects']]: + return self.target.extract_all_objects() def get_custom_install_dir(self) -> T.List[T.Union[str, Literal[False]]]: return self.target.get_custom_install_dir() diff --git a/mesonbuild/compilers/mixins/visualstudio.py b/mesonbuild/compilers/mixins/visualstudio.py index 76d9829c77bc..acf475a1d36a 100644 --- a/mesonbuild/compilers/mixins/visualstudio.py +++ b/mesonbuild/compilers/mixins/visualstudio.py @@ -213,7 +213,7 @@ def gen_vs_module_defs_args(self, defsfile: str) -> T.List[str]: return ['/DEF:' + defsfile] def gen_pch_args(self, header: str, source: str, pchname: str) -> T.Tuple[str, T.List[str]]: - objname = os.path.splitext(pchname)[0] + '.obj' + objname = os.path.splitext(source)[0] + '.obj' return objname, ['/Yc' + header, '/Fp' + pchname, '/Fo' + objname] def openmp_flags(self) -> T.List[str]: diff --git a/mesonbuild/interpreter/interpreter.py b/mesonbuild/interpreter/interpreter.py index e51910e57d10..44f0c5ad7218 100644 --- a/mesonbuild/interpreter/interpreter.py +++ b/mesonbuild/interpreter/interpreter.py @@ -3132,9 +3132,8 @@ def add_target(self, name: str, tobj: build.Target) -> None: raise InvalidCode(f'Tried to create target "{name}", but a target of that name already exists.') if isinstance(tobj, build.BuildTarget): - missing_languages = tobj.process_compilers() - self.add_languages(missing_languages, True, tobj.for_machine) - tobj.process_compilers_late(missing_languages) + self.add_languages(tobj.missing_languages, True, tobj.for_machine) + tobj.process_compilers_late() self.add_stdlib_info(tobj) self.build.targets[idname] = tobj diff --git a/mesonbuild/modules/rust.py b/mesonbuild/modules/rust.py index bf1fd1ddae02..6052ac4d6260 100644 --- a/mesonbuild/modules/rust.py +++ b/mesonbuild/modules/rust.py @@ -139,7 +139,7 @@ def test(self, state: ModuleState, args: T.Tuple[str, BuildTarget], kwargs: Func tkwargs['args'] = extra_args + ['--test', '--format', 'pretty'] tkwargs['protocol'] = 'rust' - new_target_kwargs = base_target.kwargs.copy() + new_target_kwargs = base_target.original_kwargs.copy() # Don't mutate the shallow copied list, instead replace it with a new # one new_target_kwargs['rust_args'] = new_target_kwargs.get('rust_args', []) + ['--test'] diff --git a/test cases/rust/5 polyglot static/meson.build b/test cases/rust/5 polyglot static/meson.build index bed7977abaea..20cef3887e21 100644 --- a/test cases/rust/5 polyglot static/meson.build +++ b/test cases/rust/5 polyglot static/meson.build @@ -9,7 +9,18 @@ deps = [ extra_winlibs = meson.get_compiler('c').get_id() in ['msvc', 'clang-cl'] ? ['userenv.lib', 'ws2_32.lib', 'bcrypt.lib'] : [] r = static_library('stuff', 'stuff.rs', rust_crate_type : 'staticlib') -l = static_library('clib', 'clib.c', link_with : r, install : true) + +# clib is installed static library and stuff is not installed. That means that +# to be usable clib must link_whole stuff. Meson automatically promote to link_whole, +# as it would do with C libraries, but then cannot extract objects from stuff and +# thus should error out. +# FIXME: We should support this use-case in the future. +testcase expect_error('Cannot link_whole a custom or Rust target into a static library') + l = static_library('clib', 'clib.c', link_with : r, install : true) +endtestcase + +l = static_library('clib', 'clib.c', link_with : r) + e = executable('prog', 'prog.c', dependencies: deps, link_with : l, diff --git a/test cases/rust/5 polyglot static/test.json b/test cases/rust/5 polyglot static/test.json index cc0d2da7eefc..135300de547b 100644 --- a/test cases/rust/5 polyglot static/test.json +++ b/test cases/rust/5 polyglot static/test.json @@ -1,7 +1,6 @@ { "installed": [ {"type": "exe", "file": "usr/bin/prog"}, - {"type": "pdb", "file": "usr/bin/prog"}, - {"type": "file", "file": "usr/lib/libclib.a"} + {"type": "pdb", "file": "usr/bin/prog"} ] } diff --git a/test cases/unit/113 complex link cases/main.c b/test cases/unit/113 complex link cases/main.c new file mode 100644 index 000000000000..739b413af4c1 --- /dev/null +++ b/test cases/unit/113 complex link cases/main.c @@ -0,0 +1,6 @@ +int s3(void); + +int main(int argc, char *argv[]) +{ + return s3(); +} diff --git a/test cases/unit/113 complex link cases/meson.build b/test cases/unit/113 complex link cases/meson.build new file mode 100644 index 000000000000..d3387c2ce0de --- /dev/null +++ b/test cases/unit/113 complex link cases/meson.build @@ -0,0 +1,40 @@ +project('complex link cases', 'c') + +# In all tests, e1 uses s3 which uses s2 which uses s1. + +# Executable links with s3 and s1 but not s2 because it is included in s3. +s1 = static_library('t1-s1', 's1.c') +s2 = static_library('t1-s2', 's2.c', link_with: s1) +s3 = static_library('t1-s3', 's3.c', link_whole: s2) +e = executable('t1-e1', 'main.c', link_with: s3) + +# s3 is installed but not s1 so it has to include s1 too. +# Executable links only s3 because it contains s1 and s2. +s1 = static_library('t2-s1', 's1.c') +s2 = static_library('t2-s2', 's2.c', link_with: s1) +s3 = static_library('t2-s3', 's3.c', link_whole: s2, install: true) +e = executable('t2-e1', 'main.c', link_with: s3) + +# Executable needs to link with s3 only +s1 = static_library('t3-s1', 's1.c') +s2 = static_library('t3-s2', 's2.c', link_with: s1) +s3 = shared_library('t3-s3', 's3.c', link_with: s2) +e = executable('t3-e1', 'main.c', link_with: s3) + +# Executable needs to link with s3 and s2 +s1 = static_library('t4-s1', 's1.c') +s2 = shared_library('t4-s2', 's2.c', link_with: s1) +s3 = static_library('t4-s3', 's3.c', link_with: s2) +e = executable('t4-e1', 'main.c', link_with: s3) + +# Executable needs to link with s3 and s1 +s1 = shared_library('t5-s1', 's1.c') +s2 = static_library('t5-s2', 's2.c', link_with: s1) +s3 = static_library('t5-s3', 's3.c', link_with: s2, install: true) +e = executable('t5-e1', 'main.c', link_with: s3) + +# Executable needs to link with s3 and s2 +s1 = static_library('t6-s1', 's1.c') +s2 = static_library('t6-s2', 's2.c', link_with: s1, install: true) +s3 = static_library('t6-s3', 's3.c', link_with: s2, install: true) +e = executable('t6-e1', 'main.c', link_with: s3) diff --git a/test cases/unit/113 complex link cases/s1.c b/test cases/unit/113 complex link cases/s1.c new file mode 100644 index 000000000000..68bba4949e62 --- /dev/null +++ b/test cases/unit/113 complex link cases/s1.c @@ -0,0 +1,3 @@ +int s1(void) { + return 1; +} diff --git a/test cases/unit/113 complex link cases/s2.c b/test cases/unit/113 complex link cases/s2.c new file mode 100644 index 000000000000..835870c0428f --- /dev/null +++ b/test cases/unit/113 complex link cases/s2.c @@ -0,0 +1,5 @@ +int s1(void); + +int s2(void) { + return s1() + 1; +} diff --git a/test cases/unit/113 complex link cases/s3.c b/test cases/unit/113 complex link cases/s3.c new file mode 100644 index 000000000000..08e0620108b6 --- /dev/null +++ b/test cases/unit/113 complex link cases/s3.c @@ -0,0 +1,5 @@ +int s2(void); + +int s3(void) { + return s2() + 1; +} diff --git a/unittests/allplatformstests.py b/unittests/allplatformstests.py index d6ff3b0adca7..b003ddd25be4 100644 --- a/unittests/allplatformstests.py +++ b/unittests/allplatformstests.py @@ -4376,8 +4376,7 @@ def output_name(name, type_): structured_sources=None, objects=[], environment=env, compilers=env.coredata.compilers[MachineChoice.HOST], kwargs={}) - target.process_compilers() - target.process_compilers_late([]) + target.process_compilers_late() return target.filename shared_lib_name = lambda name: output_name(name, SharedLibrary) diff --git a/unittests/linuxliketests.py b/unittests/linuxliketests.py index c94169ad8e37..b1b9c4cb9bbd 100644 --- a/unittests/linuxliketests.py +++ b/unittests/linuxliketests.py @@ -1828,3 +1828,20 @@ def test_freezing(self): with self.assertRaises(subprocess.CalledProcessError) as e: self.run_tests() self.assertNotIn('Traceback', e.exception.output) + + @skipUnless(is_linux(), "Ninja file differs on different platforms") + def test_complex_link_cases(self): + testdir = os.path.join(self.unit_test_dir, '113 complex link cases') + self.init(testdir) + self.build() + with open(os.path.join(self.builddir, 'build.ninja'), encoding='utf-8') as f: + content = f.read() + # Verify link dependencies, see comments in meson.build. + self.assertIn('build libt1-s3.a: STATIC_LINKER libt1-s2.a.p/s2.c.o libt1-s3.a.p/s3.c.o\n', content) + self.assertIn('build t1-e1: c_LINKER t1-e1.p/main.c.o | libt1-s1.a libt1-s3.a\n', content) + self.assertIn('build libt2-s3.a: STATIC_LINKER libt2-s2.a.p/s2.c.o libt2-s1.a.p/s1.c.o libt2-s3.a.p/s3.c.o\n', content) + self.assertIn('build t2-e1: c_LINKER t2-e1.p/main.c.o | libt2-s3.a\n', content) + self.assertIn('build t3-e1: c_LINKER t3-e1.p/main.c.o | libt3-s3.so.p/libt3-s3.so.symbols\n', content) + self.assertIn('build t4-e1: c_LINKER t4-e1.p/main.c.o | libt4-s2.so.p/libt4-s2.so.symbols libt4-s3.a\n', content) + self.assertIn('build t5-e1: c_LINKER t5-e1.p/main.c.o | libt5-s1.so.p/libt5-s1.so.symbols libt5-s3.a\n', content) + self.assertIn('build t6-e1: c_LINKER t6-e1.p/main.c.o | libt6-s2.a libt6-s3.a\n', content)