MP Implmentation 3/4: MAJOR progress on SMP, full userland MP is now working!
[dragonfly.git] / sys / platform / pc32 / i386 / mplock.s
CommitLineData
984263bc
MD
1/*
2 * ----------------------------------------------------------------------------
3 * "THE BEER-WARE LICENSE" (Revision 42):
4 * <phk@FreeBSD.org> wrote this file. As long as you retain this notice you
5 * can do whatever you want with this stuff. If we meet some day, and you think
6 * this stuff is worth it, you can buy me a beer in return. Poul-Henning Kamp
7 * ----------------------------------------------------------------------------
8 *
9 * $FreeBSD: src/sys/i386/i386/mplock.s,v 1.29.2.2 2000/05/16 06:58:06 dillon Exp $
a2a5ad0d 10 * $DragonFly: src/sys/platform/pc32/i386/mplock.s,v 1.6 2003/07/10 04:47:53 dillon Exp $
984263bc
MD
11 *
12 * Functions for locking between CPUs in a SMP system.
13 *
14 * This is an "exclusive counting semaphore". This means that it can be
15 * free (0xffffffff) or be owned by a CPU (0xXXYYYYYY where XX is CPU-id
16 * and YYYYYY is the count).
17 *
18 * Contrary to most implementations around, this one is entirely atomic:
19 * The attempt to seize/release the semaphore and the increment/decrement
20 * is done in one atomic operation. This way we are safe from all kinds
21 * of weird reentrancy situations.
22 */
23
24#include <machine/asmacros.h>
25#include <machine/smptests.h> /** GRAB_LOPRIO */
26#include <machine/apic.h>
27
8a8d5d85 28#include "assym.s"
984263bc 29
a2a5ad0d
MD
30/*
31 * YYY Debugging only. Define this to be paranoid about invalidating the
32 * TLB when we get giant.
33 */
34#undef PARANOID_INVLTLB
35
8a8d5d85
MD
36 .data
37 ALIGN_DATA
38#ifdef SMP
39 .globl mp_lock
40mp_lock:
41 .long -1 /* initialized to not held */
42#endif
984263bc
MD
43
44 .text
8a8d5d85
MD
45 SUPERALIGN_TEXT
46
47 /*
48 * Note on cmpxchgl... exchanges ecx with mem if mem matches eax.
96728c05 49 * Z=1 (jz) on success. A lock prefix is required for MP.
8a8d5d85
MD
50 */
51NON_GPROF_ENTRY(cpu_get_initial_mplock)
52 movl PCPU(curthread),%ecx
53 movl $1,TD_MPCOUNT(%ecx) /* curthread has mpcount of 1 */
54 movl $0,mp_lock /* owned by cpu 0 */
55 NON_GPROF_RET
56
57 /*
58 * cpu_try_mplock() returns non-zero on success, 0 on failure. It
59 * only adjusts mp_lock. It does not touch td_mpcount, and it
60 * must be called from inside a critical section.
61 */
62NON_GPROF_ENTRY(cpu_try_mplock)
63 movl PCPU(cpuid),%ecx
64 movl $-1,%eax
96728c05 65 lock cmpxchgl %ecx,mp_lock /* ecx<->mem if eax matches */
8a8d5d85 66 jnz 1f
a2a5ad0d
MD
67#ifdef PARANOID_INVLTLB
68 movl %cr3,%eax; movl %eax,%cr3 /* YYY check and remove */
69#endif
8a8d5d85
MD
70 movl $1,%eax
71 NON_GPROF_RET
984263bc 721:
8a8d5d85
MD
73 movl $0,%eax
74 NON_GPROF_RET
984263bc 75
8a8d5d85
MD
76NON_GPROF_ENTRY(get_mplock)
77 movl PCPU(curthread),%edx
78 cmpl $0,TD_MPCOUNT(%edx)
79 je 1f
80 incl TD_MPCOUNT(%edx) /* already have it, just ++mpcount */
a2a5ad0d
MD
81#ifdef INVARIANTS
82 movl PCPU(cpuid),%eax /* failure */
83 cmpl %eax,mp_lock
84 jne 4f
85#endif
8a8d5d85 86 NON_GPROF_RET
984263bc 871:
8a8d5d85
MD
88 pushfl
89 cli
90 movl $1,TD_MPCOUNT(%edx)
91 movl PCPU(cpuid),%ecx
92 movl $-1,%eax
96728c05 93 lock cmpxchgl %ecx,mp_lock /* ecx<->mem & JZ if eax matches */
984263bc 94 jnz 2f
a2a5ad0d
MD
95#ifdef PARANOID_INVLTLB
96 movl %cr3,%eax; movl %eax,%cr3 /* YYY check and remove */
97#endif
8a8d5d85
MD
98 popfl /* success */
99 NON_GPROF_RET
984263bc 1002:
96728c05 101#ifdef INVARIANTS
8a8d5d85
MD
102 movl PCPU(cpuid),%eax /* failure */
103 cmpl %eax,mp_lock
96728c05
MD
104 je 3f
105#endif
106 addl $TDPRI_CRIT,TD_PRI(%edx)
107 popfl
108 call lwkt_switch /* will be correct on return */
a2a5ad0d
MD
109#ifdef INVARIANTS
110 movl PCPU(cpuid),%eax /* failure */
111 cmpl %eax,mp_lock
112 jne 4f
113#endif
96728c05
MD
114 movl PCPU(curthread),%edx
115 subl $TDPRI_CRIT,TD_PRI(%edx)
116 NON_GPROF_RET
1173:
118 cmpl $0,panicstr /* don't double panic */
8a8d5d85
MD
119 je badmp_get
120 popfl
96728c05 121 NON_GPROF_RET
984263bc 122
a2a5ad0d
MD
1234:
124 cmpl $0,panicstr /* don't double panic */
125 je badmp_get2
126 NON_GPROF_RET
127
8a8d5d85
MD
128NON_GPROF_ENTRY(try_mplock)
129 movl PCPU(curthread),%edx
130 cmpl $0,TD_MPCOUNT(%edx)
131 je 1f
132 incl TD_MPCOUNT(%edx) /* already have it, just ++mpcount */
a2a5ad0d
MD
133#ifdef INVARIANTS
134 movl PCPU(cpuid),%eax /* failure */
135 cmpl %eax,mp_lock
136 jne 4b
137#endif
8a8d5d85
MD
138 movl $1,%eax
139 NON_GPROF_RET
984263bc 1401:
8a8d5d85
MD
141 pushfl
142 cli
143 movl PCPU(cpuid),%ecx
144 movl $-1,%eax
96728c05 145 lock cmpxchgl %ecx,mp_lock /* ecx<->mem & JZ if eax matches */
8a8d5d85
MD
146 jnz 2f
147 movl $1,TD_MPCOUNT(%edx)
a2a5ad0d
MD
148#ifdef PARANOID_INVLTLB
149 movl %cr3,%eax; movl %eax,%cr3 /* YYY check and remove */
150#endif
8a8d5d85
MD
151 popfl /* success */
152 movl $1,%eax
153 NON_GPROF_RET
984263bc 1542:
96728c05
MD
155#ifdef INVARIANTS
156 cmpl $0,panicstr
157 jnz 3f
8a8d5d85
MD
158 movl PCPU(cpuid),%eax /* failure */
159 cmpl %eax,mp_lock
160 je badmp_get
96728c05
MD
1613:
162#endif
984263bc 163 popfl
8a8d5d85
MD
164 movl $0,%eax
165 NON_GPROF_RET
984263bc 166
8a8d5d85
MD
167NON_GPROF_ENTRY(rel_mplock)
168 movl PCPU(curthread),%edx
96728c05
MD
169 movl TD_MPCOUNT(%edx),%eax
170 cmpl $1,%eax
8a8d5d85 171 je 1f
96728c05
MD
172#ifdef INVARIANTS
173 testl %eax,%eax
174 jz badmp_rel
175#endif
176 subl $1,%eax
177 movl %eax,TD_MPCOUNT(%edx)
8a8d5d85
MD
178 NON_GPROF_RET
1791:
984263bc 180 pushfl
984263bc 181 cli
96728c05
MD
182#ifdef INVARIANTS
183 movl PCPU(cpuid),%ecx
184 cmpl %ecx,mp_lock
185 jne badmp_rel2
186#endif
8a8d5d85
MD
187 movl $0,TD_MPCOUNT(%edx)
188 movl $MP_FREE_LOCK,mp_lock
984263bc 189 popfl
8a8d5d85 190 NON_GPROF_RET
984263bc 191
96728c05
MD
192#ifdef INVARIANTS
193
8a8d5d85
MD
194badmp_get:
195 pushl $bmpsw1
196 call panic
a2a5ad0d
MD
197badmp_get2:
198 pushl $bmpsw1a
199 call panic
8a8d5d85
MD
200badmp_rel:
201 pushl $bmpsw2
202 call panic
96728c05
MD
203badmp_rel2:
204 pushl $bmpsw2a
205 call panic
984263bc 206
984263bc 207 .data
984263bc 208
8a8d5d85 209bmpsw1:
96728c05 210 .asciz "try/get_mplock(): already have lock! %d %p"
984263bc 211
a2a5ad0d
MD
212bmpsw1a:
213 .asciz "try/get_mplock(): failed on count or switch %d %p"
214
8a8d5d85 215bmpsw2:
96728c05
MD
216 .asciz "rel_mplock(): mpcount already 0 @ %p %p %p %p %p %p %p %p!"
217
218bmpsw2a:
219 .asciz "rel_mplock(): Releasing another cpu's MP lock! %p %p"
220
221#endif
984263bc 222
8a8d5d85
MD
223#if 0
224/* after 1st acquire of lock we grab all hardware INTs */
225#ifdef GRAB_LOPRIO
226#define GRAB_HWI movl $ALLHWI_LEVEL, lapic_tpr
984263bc 227
8a8d5d85
MD
228/* after last release of lock give up LOW PRIO (ie, arbitrate INTerrupts) */
229#define ARB_HWI movl $LOPRIO_LEVEL, lapic_tpr /* CHEAP_TPR */
230#endif
231#endif
984263bc 232