Skip to content

Commit 1262da8

Browse files
authored
Wrap assembly comments consistently (#2430)
Some lines were much longer than others. They are now consistently wrapped at 70 columns. Why so short, why not 80 or 90? Because we sometimes need to browse these files during class, and there we don’t have a lot of horizontal space when using large fonts.
1 parent 2256525 commit 1262da8

File tree

3 files changed

+65
-47
lines changed

3 files changed

+65
-47
lines changed

src/bare-metal/aps/examples/entry.S

Lines changed: 25 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -34,18 +34,22 @@
3434
.set .L_TCR_TG0_4KB, 0x0 << 14
3535
/* 4 KiB granule size for TTBR1_EL1. */
3636
.set .L_TCR_TG1_4KB, 0x2 << 30
37-
/* Disable translation table walk for TTBR1_EL1, generating a translation fault instead. */
37+
38+
/*
39+
* Disable translation table walk for TTBR1_EL1, generating a
40+
* translation fault instead.
41+
*/
3842
.set .L_TCR_EPD1, 0x1 << 23
3943
/* Translation table walks for TTBR0_EL1 are inner sharable. */
4044
.set .L_TCR_SH_INNER, 0x3 << 12
4145
/*
42-
* Translation table walks for TTBR0_EL1 are outer write-back read-allocate write-allocate
43-
* cacheable.
46+
* Translation table walks for TTBR0_EL1 are outer write-back
47+
* read-allocate write-allocate cacheable.
4448
*/
4549
.set .L_TCR_RGN_OWB, 0x1 << 10
4650
/*
47-
* Translation table walks for TTBR0_EL1 are inner write-back read-allocate write-allocate
48-
* cacheable.
51+
* Translation table walks for TTBR0_EL1 are inner write-back
52+
* read-allocate write-allocate cacheable.
4953
*/
5054
.set .L_TCR_RGN_IWB, 0x1 << 8
5155
/* Size offset for TTBR0_EL1 is 2**39 bytes (512 GiB). */
@@ -61,7 +65,9 @@
6165
.set .L_SCTLR_ELx_C, 0x1 << 2
6266
/* EL0 and EL1 stage 1 MMU enabled. */
6367
.set .L_SCTLR_ELx_M, 0x1 << 0
64-
/* Privileged Access Never is unchanged on taking an exception to EL1. */
68+
/*
69+
* Privileged Access Never is unchanged on taking an exception to EL1.
70+
*/
6571
.set .L_SCTLR_EL1_SPAN, 0x1 << 23
6672
/* SETEND instruction disabled at EL0 in aarch32 mode. */
6773
.set .L_SCTLR_EL1_SED, 0x1 << 8
@@ -72,18 +78,20 @@
7278
.set .Lsctlrval, .Lsctlrval | .L_SCTLR_ELx_I | .L_SCTLR_EL1_SPAN | .L_SCTLR_EL1_RES1
7379

7480
/**
75-
* This is a generic entry point for an image. It carries out the operations required to prepare the
76-
* loaded image to be run. Specifically, it zeroes the bss section using registers x25 and above,
77-
* prepares the stack, enables floating point, and sets up the exception vector. It preserves x0-x3
78-
* for the Rust entry point, as these may contain boot parameters.
81+
* This is a generic entry point for an image. It carries out the
82+
* operations required to prepare the loaded image to be run.
83+
* Specifically, it zeroes the bss section using registers x25 and
84+
* above, prepares the stack, enables floating point, and sets up the
85+
* exception vector. It preserves x0-x3 for the Rust entry point, as
86+
* these may contain boot parameters.
7987
*/
8088
// ANCHOR: entry
8189
.section .init.entry, "ax"
8290
.global entry
8391
entry:
8492
/*
85-
* Load and apply the memory management configuration, ready to enable MMU and
86-
* caches.
93+
* Load and apply the memory management configuration, ready to
94+
* enable MMU and caches.
8795
*/
8896
adrp x30, idmap
8997
msr ttbr0_el1, x30
@@ -101,8 +109,9 @@ entry:
101109
mov_i x30, .Lsctlrval
102110

103111
/*
104-
* Ensure everything before this point has completed, then invalidate any
105-
* potentially stale local TLB entries before they start being used.
112+
* Ensure everything before this point has completed, then
113+
* invalidate any potentially stale local TLB entries before they
114+
* start being used.
106115
*/
107116
isb
108117
tlbi vmalle1
@@ -111,8 +120,8 @@ entry:
111120
isb
112121

113122
/*
114-
* Configure sctlr_el1 to enable MMU and cache and don't proceed until this
115-
* has completed.
123+
* Configure sctlr_el1 to enable MMU and cache and don't proceed
124+
* until this has completed.
116125
*/
117126
msr sctlr_el1, x30
118127
isb

src/bare-metal/aps/examples/exceptions.S

Lines changed: 32 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -15,12 +15,13 @@
1515
*/
1616

1717
/**
18-
* Saves the volatile registers onto the stack. This currently takes 14
19-
* instructions, so it can be used in exception handlers with 18 instructions
20-
* left.
18+
* Saves the volatile registers onto the stack. This currently takes
19+
* 14 instructions, so it can be used in exception handlers with 18
20+
* instructions left.
2121
*
22-
* On return, x0 and x1 are initialised to elr_el2 and spsr_el2 respectively,
23-
* which can be used as the first and second arguments of a subsequent call.
22+
* On return, x0 and x1 are initialised to elr_el2 and spsr_el2
23+
* respectively, which can be used as the first and second arguments
24+
* of a subsequent call.
2425
*/
2526
.macro save_volatile_to_stack
2627
/* Reserve stack space and save registers x0-x18, x29 & x30. */
@@ -37,19 +38,19 @@
3738
stp x29, x30, [sp, #8 * 20]
3839

3940
/*
40-
* Save elr_el1 & spsr_el1. This such that we can take nested exception
41-
* and still be able to unwind.
41+
* Save elr_el1 & spsr_el1. This such that we can take nested
42+
* exception and still be able to unwind.
4243
*/
4344
mrs x0, elr_el1
4445
mrs x1, spsr_el1
4546
stp x0, x1, [sp, #8 * 22]
4647
.endm
4748

4849
/**
49-
* Restores the volatile registers from the stack. This currently takes 14
50-
* instructions, so it can be used in exception handlers while still leaving 18
51-
* instructions left; if paired with save_volatile_to_stack, there are 4
52-
* instructions to spare.
50+
* Restores the volatile registers from the stack. This currently
51+
* takes 14 instructions, so it can be used in exception handlers
52+
* while still leaving 18 instructions left; if paired with
53+
* save_volatile_to_stack, there are 4 instructions to spare.
5354
*/
5455
.macro restore_volatile_from_stack
5556
/* Restore registers x2-x18, x29 & x30. */
@@ -64,7 +65,9 @@
6465
ldr x18, [sp, #8 * 18]
6566
ldp x29, x30, [sp, #8 * 20]
6667

67-
/* Restore registers elr_el1 & spsr_el1, using x0 & x1 as scratch. */
68+
/*
69+
* Restore registers elr_el1 & spsr_el1, using x0 & x1 as scratch.
70+
*/
6871
ldp x0, x1, [sp, #8 * 22]
6972
msr elr_el1, x0
7073
msr spsr_el1, x1
@@ -74,13 +77,16 @@
7477
.endm
7578

7679
/**
77-
* This is a generic handler for exceptions taken at the current EL while using
78-
* SP0. It behaves similarly to the SPx case by first switching to SPx, doing
79-
* the work, then switching back to SP0 before returning.
80+
* This is a generic handler for exceptions taken at the current EL
81+
* while using SP0. It behaves similarly to the SPx case by first
82+
* switching to SPx, doing the work, then switching back to SP0 before
83+
* returning.
84+
*
85+
* Switching to SPx and calling the Rust handler takes 16
86+
* instructions. To restore and return we need an additional 16
87+
* instructions, so we can implement the whole handler within the
88+
* allotted 32 instructions.
8089
*
81-
* Switching to SPx and calling the Rust handler takes 16 instructions. To
82-
* restore and return we need an additional 16 instructions, so we can implement
83-
* the whole handler within the allotted 32 instructions.
8490
*/
8591
.macro current_exception_sp0 handler:req
8692
msr spsel, #1
@@ -92,16 +98,16 @@
9298
.endm
9399

94100
/**
95-
* This is a generic handler for exceptions taken at the current EL while using
96-
* SPx. It saves volatile registers, calls the Rust handler, restores volatile
97-
* registers, then returns.
101+
* This is a generic handler for exceptions taken at the current EL
102+
* while using SPx. It saves volatile registers, calls the Rust
103+
* handler, restores volatile registers, then returns.
98104
*
99-
* This also works for exceptions taken from EL0, if we don't care about
100-
* non-volatile registers.
105+
* This also works for exceptions taken from EL0, if we don't care
106+
* about non-volatile registers.
101107
*
102-
* Saving state and jumping to the Rust handler takes 15 instructions, and
103-
* restoring and returning also takes 15 instructions, so we can fit the whole
104-
* handler in 30 instructions, under the limit of 32.
108+
* Saving state and jumping to the Rust handler takes 15 instructions,
109+
* and restoring and returning also takes 15 instructions, so we can
110+
* fit the whole handler in 30 instructions, under the limit of 32.
105111
*/
106112
.macro current_exception_spx handler:req
107113
save_volatile_to_stack

src/bare-metal/aps/examples/image.ld

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -15,8 +15,8 @@
1515
*/
1616

1717
/*
18-
* Code will start running at this symbol which is placed at the start of the
19-
* image.
18+
* Code will start running at this symbol which is placed at the start
19+
* of the image.
2020
*/
2121
ENTRY(entry)
2222

@@ -53,8 +53,8 @@ SECTIONS
5353
rodata_end = .;
5454

5555
/*
56-
* Collect together the read-write data including .bss at the end which
57-
* will be zero'd by the entry code.
56+
* Collect together the read-write data including .bss at the end
57+
* which will be zero'd by the entry code.
5858
*/
5959
.data : ALIGN(4096) {
6060
data_begin = .;
@@ -67,7 +67,10 @@ SECTIONS
6767
data_end = .;
6868
} >image
6969

70-
/* Everything beyond this point will not be included in the binary. */
70+
/*
71+
* Everything beyond this point will not be included in the
72+
* binary.
73+
*/
7174
bin_end = .;
7275

7376
/* The entry point code assumes that .bss is 16-byte aligned. */

0 commit comments

Comments
 (0)