blob: 027ad0f959f7edd607f98ee39a3c6382886e3a5a [file] [log] [blame]
Avi Drissmane4622aa2022-09-08 20:36:061// Copyright 2018 The Chromium Authors
Mike Wittmanf1ff2df2020-07-28 19:58:072// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
danakj51d26a402024-04-25 14:23:565#ifdef UNSAFE_BUFFERS_BUILD
6// TODO(crbug.com/40284755): Remove this and spanify to fix the errors.
7#pragma allow_unsafe_buffers
8#endif
9
Mike Wittmanf1ff2df2020-07-28 19:58:0710#include "base/debug/test_elf_image_builder.h"
11
12#include <cstring>
Helmut Januschkae1fe1aa2024-03-13 02:21:2813#include <string_view>
Mike Wittmanf1ff2df2020-07-28 19:58:0714#include <type_traits>
15#include <utility>
16
17#include "base/bits.h"
18#include "base/check.h"
Daniel Cheng610a205c2025-06-07 21:04:5119#include "base/check_op.h"
Mike Wittman6a7b517e2020-08-04 18:35:4420#include "base/notreached.h"
Mike Wittmanf1ff2df2020-07-28 19:58:0721#include "build/build_config.h"
22
23#if __SIZEOF_POINTER__ == 4
24using Dyn = Elf32_Dyn;
25using Nhdr = Elf32_Nhdr;
26using Shdr = Elf32_Shdr;
27#else
28using Dyn = Elf64_Dyn;
29using Nhdr = Elf64_Nhdr;
30using Shdr = Elf64_Shdr;
31#endif
32
33namespace base {
34
35namespace {
36// Sizes/alignments to use in the ELF image.
37static constexpr size_t kPageSize = 4096;
38static constexpr size_t kPhdrAlign = 0x4;
39static constexpr size_t kNoteAlign = 0x4;
40static constexpr size_t kLoadAlign = 0x1000;
41static constexpr size_t kDynamicAlign = 0x4;
42} // namespace
43
44struct TestElfImageBuilder::LoadSegment {
45 Word flags;
46 Word size;
47};
48
49TestElfImage::TestElfImage(std::vector<uint8_t> buffer, const void* elf_start)
50 : buffer_(std::move(buffer)), elf_start_(elf_start) {}
51
52TestElfImage::~TestElfImage() = default;
53
54TestElfImage::TestElfImage(TestElfImage&&) = default;
55
56TestElfImage& TestElfImage::operator=(TestElfImage&&) = default;
57
Mike Wittman6a7b517e2020-08-04 18:35:4458TestElfImageBuilder::TestElfImageBuilder(MappingType mapping_type)
59 : mapping_type_(mapping_type) {}
Mike Wittmanf1ff2df2020-07-28 19:58:0760
61TestElfImageBuilder::~TestElfImageBuilder() = default;
62
63TestElfImageBuilder& TestElfImageBuilder::AddLoadSegment(Word flags,
64 size_t size) {
Peter Kastingd5543152021-06-21 14:26:4765 load_segments_.push_back({flags, static_cast<Word>(size)});
Mike Wittmanf1ff2df2020-07-28 19:58:0766 return *this;
67}
68
69TestElfImageBuilder& TestElfImageBuilder::AddNoteSegment(
70 Word type,
Helmut Januschkae1fe1aa2024-03-13 02:21:2871 std::string_view name,
Mike Wittmanf1ff2df2020-07-28 19:58:0772 span<const uint8_t> desc) {
73 const size_t name_with_null_size = name.size() + 1;
Peter Kasting6a4bf14c2022-07-13 14:53:3374 std::vector<uint8_t> buffer(
75 sizeof(Nhdr) + bits::AlignUp(name_with_null_size, size_t{4}) +
76 bits::AlignUp(desc.size(), size_t{4}),
77 '\0');
Mike Wittmanf1ff2df2020-07-28 19:58:0778 uint8_t* loc = &buffer.front();
David Benjamind3e2629c32023-10-23 22:24:2079 Nhdr nhdr;
80 nhdr.n_namesz = name_with_null_size;
81 nhdr.n_descsz = desc.size();
82 nhdr.n_type = type;
83 loc = AppendHdr(nhdr, loc);
Mike Wittmanf1ff2df2020-07-28 19:58:0784
85 memcpy(loc, name.data(), name.size());
86 *(loc + name.size()) = '\0';
Peter Kasting6a4bf14c2022-07-13 14:53:3387 loc += bits::AlignUp(name_with_null_size, size_t{4});
Mike Wittmanf1ff2df2020-07-28 19:58:0788
89 memcpy(loc, &desc.front(), desc.size());
Peter Kasting6a4bf14c2022-07-13 14:53:3390 loc += bits::AlignUp(desc.size(), size_t{4});
Mike Wittmanf1ff2df2020-07-28 19:58:0791
92 DCHECK_EQ(&buffer.front() + buffer.size(), loc);
93
94 note_contents_.push_back(std::move(buffer));
95
96 return *this;
97}
98
Helmut Januschkae1fe1aa2024-03-13 02:21:2899TestElfImageBuilder& TestElfImageBuilder::AddSoName(std::string_view soname) {
Mike Wittmanf1ff2df2020-07-28 19:58:07100 DCHECK(!soname_.has_value());
101 soname_.emplace(soname);
102 return *this;
103}
104
105struct TestElfImageBuilder::ImageMeasures {
106 size_t phdrs_required;
107 size_t note_start;
108 size_t note_size;
109 std::vector<size_t> load_segment_start;
110 size_t dynamic_start;
111 size_t strtab_start;
112 size_t total_size;
113};
114
Mike Wittman87363cc12020-08-04 19:10:15115Addr TestElfImageBuilder::GetVirtualAddressForOffset(
116 Off offset,
117 const uint8_t* elf_start) const {
Mike Wittman6a7b517e2020-08-04 18:35:44118 switch (mapping_type_) {
119 case RELOCATABLE:
120 return static_cast<Addr>(offset);
121
122 case RELOCATABLE_WITH_BIAS:
123 return static_cast<Addr>(offset + kLoadBias);
Mike Wittman87363cc12020-08-04 19:10:15124
125 case NON_RELOCATABLE:
126 return reinterpret_cast<Addr>(elf_start + offset);
Mike Wittman6a7b517e2020-08-04 18:35:44127 }
128}
129
Mike Wittmanf1ff2df2020-07-28 19:58:07130TestElfImageBuilder::ImageMeasures TestElfImageBuilder::MeasureSizesAndOffsets()
131 const {
132 ImageMeasures measures;
133
134 measures.phdrs_required = 1 + load_segments_.size();
Peter Kasting134ef9af2024-12-28 02:30:09135 if (!note_contents_.empty()) {
Mike Wittmanf1ff2df2020-07-28 19:58:07136 ++measures.phdrs_required;
Peter Kasting134ef9af2024-12-28 02:30:09137 }
138 if (soname_.has_value()) {
Mike Wittmanf1ff2df2020-07-28 19:58:07139 ++measures.phdrs_required;
Peter Kasting134ef9af2024-12-28 02:30:09140 }
Mike Wittmanf1ff2df2020-07-28 19:58:07141
142 // The current offset into the image, where the next bytes are to be written.
143 // Starts after the ELF header.
144 size_t offset = sizeof(Ehdr);
145
146 // Add space for the program header table.
Benoit Lize7b302c42021-02-04 11:20:38147 offset = bits::AlignUp(offset, kPhdrAlign);
Mike Wittmanf1ff2df2020-07-28 19:58:07148 offset += sizeof(Phdr) * measures.phdrs_required;
149
150 // Add space for the notes.
151 measures.note_start = offset;
Peter Kasting134ef9af2024-12-28 02:30:09152 if (!note_contents_.empty()) {
Benoit Lize7b302c42021-02-04 11:20:38153 offset = bits::AlignUp(offset, kNoteAlign);
Peter Kasting134ef9af2024-12-28 02:30:09154 }
155 for (const std::vector<uint8_t>& contents : note_contents_) {
Mike Wittmanf1ff2df2020-07-28 19:58:07156 offset += contents.size();
Peter Kasting134ef9af2024-12-28 02:30:09157 }
Mike Wittmanf1ff2df2020-07-28 19:58:07158 measures.note_size = offset - measures.note_start;
159
160 // Add space for the load segments.
161 for (auto it = load_segments_.begin(); it != load_segments_.end(); ++it) {
Mike Wittmanf1ff2df2020-07-28 19:58:07162 // The first non PT_PHDR program header is expected to be a PT_LOAD and
163 // start at the already-aligned start of the ELF header.
164 if (it == load_segments_.begin()) {
Mike Wittmanf1ff2df2020-07-28 19:58:07165 measures.load_segment_start.push_back(0);
166 } else {
Benoit Lize7b302c42021-02-04 11:20:38167 offset = bits::AlignUp(offset, kLoadAlign);
Mike Wittmanf1ff2df2020-07-28 19:58:07168 measures.load_segment_start.push_back(offset);
169 }
170 offset += it->size;
171 }
172
173 // Add space for the dynamic segment.
Benoit Lize7b302c42021-02-04 11:20:38174 measures.dynamic_start = bits::AlignUp(offset, kDynamicAlign);
Mike Wittmanf1ff2df2020-07-28 19:58:07175 offset += sizeof(Dyn) * (soname_ ? 2 : 1);
176 measures.strtab_start = offset;
177
178 // Add space for the string table.
179 ++offset; // The first string table byte holds a null character.
Peter Kasting134ef9af2024-12-28 02:30:09180 if (soname_) {
Mike Wittmanf1ff2df2020-07-28 19:58:07181 offset += soname_->size() + 1;
Peter Kasting134ef9af2024-12-28 02:30:09182 }
Mike Wittmanf1ff2df2020-07-28 19:58:07183
184 measures.total_size = offset;
185
186 return measures;
187}
188
189TestElfImage TestElfImageBuilder::Build() {
190 ImageMeasures measures = MeasureSizesAndOffsets();
191
Mike Wittman6a7b517e2020-08-04 18:35:44192 // Write the ELF contents into |buffer|. Extends the buffer back to the 0
193 // address in the case of load bias, so that the memory between the 0 address
194 // and the image start is zero-initialized.
195 const size_t load_bias =
196 mapping_type_ == RELOCATABLE_WITH_BIAS ? kLoadBias : 0;
197 std::vector<uint8_t> buffer(load_bias + (kPageSize - 1) + measures.total_size,
198 '\0');
199 uint8_t* const elf_start =
Benoit Lize7b302c42021-02-04 11:20:38200 bits::AlignUp(&buffer.front() + load_bias, kPageSize);
Mike Wittmanf1ff2df2020-07-28 19:58:07201 uint8_t* loc = elf_start;
202
203 // Add the ELF header.
204 loc = AppendHdr(CreateEhdr(measures.phdrs_required), loc);
205
206 // Add the program header table.
Benoit Lize7b302c42021-02-04 11:20:38207 loc = bits::AlignUp(loc, kPhdrAlign);
Mike Wittman87363cc12020-08-04 19:10:15208 loc = AppendHdr(
209 CreatePhdr(PT_PHDR, PF_R, kPhdrAlign, loc - elf_start,
210 GetVirtualAddressForOffset(loc - elf_start, elf_start),
211 sizeof(Phdr) * measures.phdrs_required),
212 loc);
Mike Wittmanf1ff2df2020-07-28 19:58:07213 for (size_t i = 0; i < load_segments_.size(); ++i) {
214 const LoadSegment& load_segment = load_segments_[i];
215 size_t size = load_segment.size;
216 // The first non PT_PHDR program header is expected to be a PT_LOAD and
217 // encompass all the preceding headers.
Peter Kasting134ef9af2024-12-28 02:30:09218 if (i == 0) {
Mike Wittmanf1ff2df2020-07-28 19:58:07219 size += loc - elf_start;
Peter Kasting134ef9af2024-12-28 02:30:09220 }
Mike Wittman87363cc12020-08-04 19:10:15221 loc = AppendHdr(CreatePhdr(PT_LOAD, load_segment.flags, kLoadAlign,
222 measures.load_segment_start[i],
223 GetVirtualAddressForOffset(
224 measures.load_segment_start[i], elf_start),
225 size),
Mike Wittmanf1ff2df2020-07-28 19:58:07226 loc);
227 }
Mike Wittman87363cc12020-08-04 19:10:15228 if (measures.note_size != 0) {
229 loc = AppendHdr(
230 CreatePhdr(PT_NOTE, PF_R, kNoteAlign, measures.note_start,
231 GetVirtualAddressForOffset(measures.note_start, elf_start),
232 measures.note_size),
233 loc);
234 }
Mike Wittmanf1ff2df2020-07-28 19:58:07235 if (soname_) {
Mike Wittman87363cc12020-08-04 19:10:15236 loc = AppendHdr(
237 CreatePhdr(
238 PT_DYNAMIC, PF_R | PF_W, kDynamicAlign, measures.dynamic_start,
239 GetVirtualAddressForOffset(measures.dynamic_start, elf_start),
240 sizeof(Dyn) * 2),
241 loc);
Mike Wittmanf1ff2df2020-07-28 19:58:07242 }
243
244 // Add the notes.
Benoit Lize7b302c42021-02-04 11:20:38245 loc = bits::AlignUp(loc, kNoteAlign);
Mike Wittmanf1ff2df2020-07-28 19:58:07246 for (const std::vector<uint8_t>& contents : note_contents_) {
247 memcpy(loc, &contents.front(), contents.size());
248 loc += contents.size();
249 }
250
251 // Add the load segments.
252 for (auto it = load_segments_.begin(); it != load_segments_.end(); ++it) {
Peter Kasting134ef9af2024-12-28 02:30:09253 if (it != load_segments_.begin()) {
Benoit Lize7b302c42021-02-04 11:20:38254 loc = bits::AlignUp(loc, kLoadAlign);
Peter Kasting134ef9af2024-12-28 02:30:09255 }
Mike Wittmanf1ff2df2020-07-28 19:58:07256 memset(loc, 0, it->size);
257 loc += it->size;
258 }
259
Benoit Lize7b302c42021-02-04 11:20:38260 loc = bits::AlignUp(loc, kDynamicAlign);
Mike Wittmanf1ff2df2020-07-28 19:58:07261
262 // Add the soname state.
263 if (soname_) {
264 // Add a DYNAMIC section for the soname.
David Benjamind3e2629c32023-10-23 22:24:20265 Dyn soname_dyn;
266 soname_dyn.d_tag = DT_SONAME;
267 soname_dyn.d_un.d_val = 1; // One char into the string table.
268 loc = AppendHdr(soname_dyn, loc);
Mike Wittmanf1ff2df2020-07-28 19:58:07269 }
270
David Benjamind3e2629c32023-10-23 22:24:20271 Dyn strtab_dyn;
272 strtab_dyn.d_tag = DT_STRTAB;
Levi Zimfe7110762025-10-15 01:40:52273#if BUILDFLAG(IS_FUCHSIA) || BUILDFLAG(IS_ANDROID) || \
274 (defined(ARCH_CPU_RISCV_FAMILY) && BUILDFLAG(IS_LINUX))
Mike Wittman6a7b517e2020-08-04 18:35:44275 // Fuchsia and Android do not alter the symtab pointer on ELF load -- it's
276 // expected to remain a 'virutal address'.
David Benjamind3e2629c32023-10-23 22:24:20277 strtab_dyn.d_un.d_ptr =
Mike Wittman87363cc12020-08-04 19:10:15278 GetVirtualAddressForOffset(measures.strtab_start, elf_start);
Mike Wittmanf1ff2df2020-07-28 19:58:07279#else
Mike Wittman6a7b517e2020-08-04 18:35:44280 // Linux relocates this value on ELF load, so produce the pointer value after
281 // relocation. That value will always be equal to the actual memory address.
David Benjamind3e2629c32023-10-23 22:24:20282 strtab_dyn.d_un.d_ptr =
Mike Wittmanf1ff2df2020-07-28 19:58:07283 reinterpret_cast<uintptr_t>(elf_start + measures.strtab_start);
284#endif
David Benjamind3e2629c32023-10-23 22:24:20285 loc = AppendHdr(strtab_dyn, loc);
Mike Wittmanf1ff2df2020-07-28 19:58:07286
287 // Add a string table with one entry for the soname, if necessary.
288 *loc++ = '\0'; // The first byte holds a null character.
289 if (soname_) {
290 memcpy(loc, soname_->data(), soname_->size());
291 *(loc + soname_->size()) = '\0';
292 loc += soname_->size() + 1;
293 }
294
295 // The offset past the end of the contents should be consistent with the size
296 // mmeasurement above.
297 DCHECK_EQ(loc, elf_start + measures.total_size);
298
299 return TestElfImage(std::move(buffer), elf_start);
300}
301
302// static
303template <typename T>
304uint8_t* TestElfImageBuilder::AppendHdr(const T& hdr, uint8_t* loc) {
Andrew Rayskiyb42fe7522023-10-17 13:16:19305 static_assert(std::is_trivially_copyable_v<T>, "T should be a plain struct");
Mike Wittmanf1ff2df2020-07-28 19:58:07306 memcpy(loc, &hdr, sizeof(T));
307 return loc + sizeof(T);
308}
309
310Ehdr TestElfImageBuilder::CreateEhdr(Half phnum) {
311 Ehdr ehdr;
312 ehdr.e_ident[EI_MAG0] = ELFMAG0;
313 ehdr.e_ident[EI_MAG1] = ELFMAG1;
314 ehdr.e_ident[EI_MAG2] = ELFMAG2;
315 ehdr.e_ident[EI_MAG3] = ELFMAG3;
316 ehdr.e_ident[EI_CLASS] = __SIZEOF_POINTER__ == 4 ? 1 : 2;
317 ehdr.e_ident[EI_DATA] = 1; // Little endian.
318 ehdr.e_ident[EI_VERSION] = 1;
319 ehdr.e_ident[EI_OSABI] = 0x00;
320 ehdr.e_ident[EI_ABIVERSION] = 0;
321 ehdr.e_ident[EI_PAD] = 0;
322 ehdr.e_type = ET_DYN;
323 ehdr.e_machine = 0x28; // ARM.
324 ehdr.e_version = 1;
325 ehdr.e_entry = 0;
326 ehdr.e_phoff = sizeof(Ehdr);
327 ehdr.e_shoff = 0;
328 ehdr.e_flags = 0;
329 ehdr.e_ehsize = sizeof(Ehdr);
330 ehdr.e_phentsize = sizeof(Phdr);
331 ehdr.e_phnum = phnum;
332 ehdr.e_shentsize = sizeof(Shdr);
333 ehdr.e_shnum = 0;
334 ehdr.e_shstrndx = 0;
335
336 return ehdr;
337}
338
339Phdr TestElfImageBuilder::CreatePhdr(Word type,
340 Word flags,
341 size_t align,
342 Off offset,
Mike Wittman87363cc12020-08-04 19:10:15343 Addr vaddr,
Mike Wittmanf1ff2df2020-07-28 19:58:07344 size_t size) {
345 Phdr phdr;
346 phdr.p_type = type;
347 phdr.p_flags = flags;
348 phdr.p_offset = offset;
349 phdr.p_filesz = size;
Mike Wittman87363cc12020-08-04 19:10:15350 phdr.p_vaddr = vaddr;
Mike Wittmanf1ff2df2020-07-28 19:58:07351 phdr.p_paddr = 0;
352 phdr.p_memsz = phdr.p_filesz;
353 phdr.p_align = align;
354
355 return phdr;
356}
357
358} // namespace base