Plan 9 from Bell Labs’s /usr/web/sources/extra/9hist/mtx/l.s

Copyright © 2021 Plan 9 Foundation.
Distributed under the MIT License.
Download the Plan 9 distribution.


## diffname mtx/l.s 2001/0810
## diff -e /dev/null /n/emeliedump/2001/0810/sys/src/9/mtx/l.s
0a
#include	"mem.h"
#include	"mpc60x.h"

#define	BDNZ	BC	16,0,
#define	BDNE	BC	0,2,
#define	NOOP	OR	R0,R0,R0
#define	TLBIA	WORD	$(31<<26)

/* Be-ware 603e chip bugs (mtmsr instruction) */
#define	FIX603e	CROR	0,0,0
#undef FIX603e
#define	FIX603e	ISYNC; SYNC

	TEXT start(SB), $-4
	MOVW	$setSB(SB), R2

	BL	main(SB)
	RETURN		/* not reached */

TEXT	splhi(SB), $0
	MOVW	LR, R31
	MOVW	R31, 4(R(MACH))	/* save PC in m->splpc */
	MOVW	MSR, R3
	RLWNM	$0, R3, $~MSR_EE, R4
	SYNC
	MOVW	R4, MSR
	FIX603e
	RETURN

TEXT	splx(SB), $0
	/* fall though */

TEXT	splxpc(SB), $0
	MOVW	LR, R31
	MOVW	R31, 4(R(MACH))	/* save PC in m->splpc */
	MOVW	MSR, R4
	RLWMI	$0, R3, $MSR_EE, R4
	SYNC
	MOVW	R4, MSR
	FIX603e
	RETURN

TEXT	spllo(SB), $0
	MOVW	MSR, R3
	OR	$MSR_EE, R3, R4
	SYNC
	MOVW	R4, MSR
	FIX603e
	RETURN

TEXT	spldone(SB), $0
	RETURN

TEXT	islo(SB), $0
	MOVW	MSR, R3
	RLWNM	$0, R3, $MSR_EE, R3
	RETURN

TEXT	setlabel(SB), $-4
	MOVW	LR, R31
	MOVW	R1, 0(R3)
	MOVW	R31, 4(R3)
	MOVW	$0, R3
	RETURN

TEXT	gotolabel(SB), $-4
	MOVW	4(R3), R31
	MOVW	R31, LR
	MOVW	0(R3), R1
	MOVW	$1, R3
	RETURN
.
## diffname mtx/l.s 2001/1122
## diff -e /n/emeliedump/2001/0810/sys/src/9/mtx/l.s /n/emeliedump/2001/1122/sys/src/9/mtx/l.s
2d
## diffname mtx/l.s 2001/1207
## diff -e /n/emeliedump/2001/1122/sys/src/9/mtx/l.s /n/emeliedump/2001/1207/sys/src/9/mtx/l.s
70a

TEXT	touser(SB), $-4
	MOVW	$(UTZERO+32), R5	/* header appears in text */
	MOVW	$(MSR_EE|MSR_PR|MSR_ME|MSR_IR|MSR_DR|MSR_RI), R4
	MOVW	R4, SPR(SRR1)
	MOVW	R3, R1
	MOVW	R5, SPR(SRR0)
	RFI

TEXT	icflush(SB), $-4	/* icflush(virtaddr, count) */
	MOVW	n+4(FP), R4
	RLWNM	$0, R3, $~(CACHELINESZ-1), R5
	SUB	R5, R3
	ADD	R3, R4
	ADD		$(CACHELINESZ-1), R4
	SRAW	$CACHELINELOG, R4
	MOVW	R4, CTR
icf0:	ICBI	(R5)
	ADD	$CACHELINESZ, R5
	BDNZ	icf0
	ISYNC
	RETURN

TEXT	dcflush(SB), $-4	/* dcflush(virtaddr, count) */
	MOVW	n+4(FP), R4
	RLWNM	$0, R3, $~(CACHELINESZ-1), R5
	CMP	R4, $0
	BLE	dcf1
	SUB	R5, R3
	ADD	R3, R4
	ADD		$(CACHELINESZ-1), R4
	SRAW	$CACHELINELOG, R4
	MOVW	R4, CTR
dcf0:	DCBF	(R5)
	ADD	$CACHELINESZ, R5
	BDNZ	dcf0
dcf1:
	RETURN

TEXT	tas(SB), $0
	SYNC
	MOVW	R3, R4
	MOVW	$0xdead,R5
tas1:
	DCBF	(R4)	/* fix for 603x bug */
	LWAR	(R4), R3
	CMP	R3, $0
	BNE	tas0
	STWCCC	R5, (R4)
	BNE	tas1
tas0:
	SYNC
	ISYNC
	RETURN

TEXT	getpvr(SB), $0
	MOVW	SPR(PVR), R3
	RETURN

TEXT	getdec(SB), $0
	MOVW	SPR(DEC), R3
	RETURN

TEXT	putdec(SB), $0
	MOVW	R3, SPR(DEC)
	RETURN

TEXT	getdar(SB), $0
	MOVW	SPR(DAR), R3
	RETURN

TEXT	getdsisr(SB), $0
	MOVW	SPR(DSISR), R3
	RETURN

TEXT	getmsr(SB), $0
	MOVW	MSR, R3
	RETURN

TEXT	putmsr(SB), $0
	SYNC
	MOVW	R3, MSR
	MSRSYNC
	RETURN

TEXT	eieio(SB), $0
	EIEIO
	RETURN

TEXT	tlbflushall(SB), $0
	TLBIA
	RETURN

TEXT	tlbflush(SB), $0
	TLBIE	R3
	RETURN

TEXT	gotopc(SB), $0
	MOVW	R3, CTR
	MOVW	LR, R31	/* for trace back */
	BR	(CTR)

/*
 * traps force memory mapping off.
 * the following code has been executed at the exception
 * vector location
 *	MOVW R0, SPR(SAVER0)
 *	MOVW LR, R0
 *	MOVW R0, SPR(SAVELR) 
 *	bl	trapvec(SB)
 */
TEXT	trapvec(SB), $-4
	MOVW	LR, R0
	MOVW	R1, SPR(SAVER1)
	MOVW	R0, SPR(SAVEXX)	/* vector */

/*
	to enable hardware break points
	MOVW	MSR, R1
	OR		$(MSR_RI), R1
	MOVW	R1, MSR
	ISYNC
*/	

	/* did we come from user space */
	MOVW	SPR(SRR1), R0
	MOVW	CR, R1
	MOVW	R0, CR
	BC	4,17,ktrap
	
	/* switch to kernel stack */
	MOVW	R1, CR
	MOVW	$(MACHADDR&~KZERO), R1	/* PADDR(m->) */
	MOVW	12(R1), R1				/* m->proc  */
	RLWNM	$0, R1, $~KZERO, R1		/* PADDR(m->proc) */
	MOVW	8(R1), R1				/* m->proc->kstack */
	RLWNM	$0, R1, $~KZERO, R1		/* PADDR(m->proc->kstack) */
	ADD	$(KSTACK-UREGSIZE), R1
	BL	saveureg(SB)
	BL	trap(SB)
	BR	restoreureg
ktrap:
	MOVW	R1, CR
	MOVW	SPR(SAVER1), R1
	RLWNM	$0, R1, $~KZERO, R1		/* PADDR(m->proc->kstack) */
	SUB	$UREGSPACE, R1
	BL	saveureg(SB)
	BL	trap(SB)
	BR	restoreureg


/*
 * enter with stack set and mapped.
 * on return, SB (R2) has been set, and R3 has the Ureg*,
 * the MMU has been re-enabled, kernel text and PC are in KSEG,
 * R(MACH) has been set, and R0 contains 0.
 *
 */
TEXT	saveureg(SB), $-4
/*
 * save state
 */
	MOVMW	R2, 48(R1)	/* r2:r31 */
	MOVW	$setSB(SB), R2
	MOVW	$(MACHADDR&~KZERO), R(MACH)
	MOVW	12(R(MACH)), R(USER)
	MOVW	$MACHADDR, R(MACH)
	MOVW	SPR(SAVER1), R4
	MOVW	R4, 44(R1)
	MOVW	SPR(SAVER0), R5
	MOVW	R5, 40(R1)
	MOVW	CTR, R6
	MOVW	R6, 36(R1)
	MOVW	XER, R4
	MOVW	R4, 32(R1)
	MOVW	CR, R5
	MOVW	R5, 28(R1)
	MOVW	SPR(SAVELR), R6	/* LR */
	MOVW	R6, 24(R1)
	/* pad at 20(R1) */
	MOVW	SPR(SRR0), R0
	MOVW	R0, 16(R1)				/* old PC */
	MOVW	SPR(SRR1), R0
	MOVW	R0, 12(R1)				/* old status */
	MOVW	SPR(SAVEXX), R0
	MOVW	R0, 8(R1)	/* cause/vector */
	ADD	$8, R1, R3	/* Ureg* */
	OR	$KZERO, R3	/* fix ureg */
	STWCCC	R3, (R1)	/* break any pending reservations */
	MOVW	$0, R0	/* compiler/linker expect R0 to be zero */

	MOVW	MSR, R5
//	OR	$(MSR_IR|MSR_DR|MSR_RI), R5	/* enable MMU */
	MOVW	R5, SPR(SRR1)
	MOVW	LR, R31
//	OR	$KZERO, R31	/* return PC in KSEG0 */
	MOVW	R31, SPR(SRR0)
//	OR	$KZERO, R1	/* fix stack pointer */
	RFI	/* returns to trap handler */

/*
 * restore state from Ureg and return from trap/interrupt
 */
TEXT	forkret(SB), $0
	BR	restoreureg

restoreureg:
	MOVMW	48(R1), R2	/* r2:r31 */
	/* defer R1 */
	MOVW	40(R1), R0
	MOVW	R0, SPR(SAVER0)
	MOVW	36(R1), R0
	MOVW	R0, CTR
	MOVW	32(R1), R0
	MOVW	R0, XER
	MOVW	28(R1), R0
	MOVW	R0, CR	/* CR */
	MOVW	24(R1), R0
	MOVW	R0, LR
	/* pad, skip */
	MOVW	16(R1), R0
	MOVW	R0, SPR(SRR0)	/* old PC */
	MOVW	12(R1), R0
	MOVW	R0, SPR(SRR1)	/* old MSR */
	/* cause, skip */
	MOVW	44(R1), R1	/* old SP */
	MOVW	SPR(SAVER0), R0
	RFI
.
47c
	MSRSYNC
.
39c
	MSRSYNC
.
26c
	MSRSYNC
.
12a
#define	UREGSPACE	(UREGSIZE+8)

.
8,11c
/* on some models mtmsr doesn't synchronise enough (eg, 603e) */
#define	MSRSYNC	SYNC; ISYNC
.
5,6c
//#define	TLBIA	WORD	$(31<<26)
#define	TLBIA	WORD	$((31<<26)|(370<<1))
//#define	MFTB(tbr,d)	WORD	$((31<<26)|((d)<<21)|((tbr&0x1f)<<16)|(((tbr>>5)&0x1f)<<11)|(371<<1))
.
2a
/* use of SPRG registers in save/restore */
#define	SAVER0	SPRG0
#define	SAVER1	SPRG1
#define	SAVELR	SPRG2
#define	SAVEXX	SPRG3

/* special instruction definitions */
.
## diffname mtx/l.s 2001/1208
## diff -e /n/emeliedump/2001/1207/sys/src/9/mtx/l.s /n/emeliedump/2001/1208/sys/src/9/mtx/l.s
276c
	OR	$KZERO, R1	/* fix stack pointer */
.
274c
	OR	$KZERO, R31	/* return PC in KSEG0 */
.
244c
	MOVW	8(R(MACH)), R(USER)
.
229d
212c
	MOVW	8(R1), R1				/* m->proc  */
.
195,202d
133a
TEXT	firmware(SB), $0
	MOVW	$0, R4
	MOVW	R4, SPR(SRR1)
	MOVW	$0x100, R5
	MOVW	R5, SPR(SRR0)
	RFI

.
24a

.
22a
	MOVW	$MACHADDR, R(MACH)
	MOVW	$(KTZERO-8), R1	/* stack, in Mach */
.
21a

.
## diffname mtx/l.s 2001/1212
## diff -e /n/emeliedump/2001/1208/sys/src/9/mtx/l.s /n/emeliedump/2001/1212/sys/src/9/mtx/l.s
273c
	OR	$(MSR_IR|MSR_DR|MSR_RI), R5	/* enable MMU */
.
247c
	MOVW	$mach0(SB), R(MACH)
	MOVW	$setSB(SB), R2
.
245c
	RLWNM	$0, R2, $~KZERO, R2		/* PADDR(setSB) */
	MOVW	$mach0(SB), R(MACH)
	RLWNM	$0, R(MACH), $~KZERO, R(MACH)		/* PADDR(m->) */
.
219a
	MOVW	R0, R2
.
214c
	MOVW	R2, R0
	MOVW	$setSB(SB), R2
	RLWNM	$0, R2, $~KZERO, R2		/* PADDR(setSB) */
	MOVW	$mach0(SB), R1	/* m-> */
	RLWNM	$0, R1, $~KZERO, R1		/* PADDR(m->) */
.
180c
	MOVW	$64, R3
	MOVW	R3, CTR
	MOVW	$0, R4
tlbflushall0:
	TLBIE	R4
	ADD		$BIT(19), R4
	BDNZ	tlbflushall0
	TLBSYNC
.
141c
	MOVW	$0xfff00100, R5
.
139c
	MOVW	initsprg3(SB), R3
	MOVW	R3, SPR(SAVEXX)
	MOVW	$MSR_IP, R4
.
30a
GLOBL	mach0(SB), $(MAXMACH*BY2PG)
GLOBL	initsprg3(SB), $4

/*
 * on return from this function we will be running in virtual mode.
 * We set up the Block Address Translation (BAT) registers thus:
 * 1) first 3 BATs are 256M blocks, starting from KZERO->0
 * 2) remaining BAT maps last 256M directly
 */
TEXT	mmuinit0(SB), $0
	/* reset all the tlbs */
	MOVW	$64, R3
	MOVW	R3, CTR
	MOVW	$0, R4
tlbloop:
	TLBIE	R4
	ADD		$BIT(19), R4
	BDNZ	tlbloop
	TLBSYNC

	/* KZERO -> 0 */
	MOVW	$(KZERO|(0x7ff<<2)|2), R3
	MOVW	$(0|(0<<3)|2), R4
	MOVW	R3, SPR(IBATU(0))
	MOVW	R4, SPR(IBATL(0))
	MOVW	R3, SPR(DBATU(0))
	MOVW	R4, SPR(DBATL(0))

	/* KZERO+256M -> 256M */
	ADD		$(1<<28), R3
	ADD		$(1<<28), R4
	MOVW	R3, SPR(IBATU(1))
	MOVW	R4, SPR(IBATL(1))
	MOVW	R3, SPR(DBATU(1))
	MOVW	R4, SPR(DBATL(1))

	/* KZERO+512M -> 512M */
	ADD		$(1<<28), R3
	ADD		$(1<<28), R4
	MOVW	R3, SPR(IBATU(2))
	MOVW	R4, SPR(IBATL(2))
	MOVW	R3, SPR(DBATU(2))
	MOVW	R4, SPR(DBATL(2))

	/* direct map last block, uncached, (?guarded) */
	MOVW	$((0xf<<28)|(0x7ff<<2)|2), R3
	MOVW	$((0xf<<28)|(4<<3)|2), R4
	MOVW	R3, SPR(IBATU(3))
	MOVW	R4, SPR(IBATL(3))
	MOVW	R3, SPR(DBATU(3))
	MOVW	R4, SPR(DBATL(3))

	/* enable MMU */
	MOVW	LR, R3
	OR	$KZERO, R3
	MOVW	R3, SPR(SRR0)
	MOVW	MSR, R4
	OR	$(MSR_IR|MSR_DR), R4
	MOVW	R4, SPR(SRR1)
	RFI	/* resume in kernel mode in caller */

	RETURN

.
26a
	BL	mmuinit0(SB)

	/* running with MMU on!! */

	/* set R2 to correct value */
	MOVW	$setSB(SB), R2

	/* save SPR(SPRG3), or else we can't go back to firmware! */
	MOVW SPR(SAVEXX), R3
	MOVW R3, initsprg3(SB)

	/* set up Mach */
	MOVW	$mach0(SB), R(MACH)
	ADD	$(MACHSIZE-8), R(MACH), R1	/* set stack */

	MOVW	R0, R(USER)
	MOVW	R0, 0(R(MACH))

.
24,25c
	MOVW	$KZERO, R3
	ANDN	R3, R2
.
22a
	/*
	 * setup MSR
	 * turn off interrupts
	 * use 0x000 as exception prefix
	 * enable machine check
	 */
	MOVW	MSR, R3
	MOVW	$(MSR_EE|MSR_IP), R4
	ANDN	R4, R3
	OR		$(MSR_ME), R3
	ISYNC
	MOVW	R3, MSR
	MSRSYNC

	/* except during trap handling, R0 is zero from now on */
	MOVW	$0, R0

	/* setup SB for pre mmu */
.
15a
#define	TLBIA	WORD	$((31<<26)|(307<<1))
#define	TLBSYNC	WORD	$((31<<26)|(566<<1))

.
12,14d
## diffname mtx/l.s 2001/1214
## diff -e /n/emeliedump/2001/1212/sys/src/9/mtx/l.s /n/emeliedump/2001/1214/sys/src/9/mtx/l.s
239c
	MOVW	R3, SPR(SPRG3)
.
68a
GLOBL	memsize(SB), $4
.
55a
	/* debugger sets R1 to top of usable memory +1 */
	MOVW R1, memsize(SB)

.
53c
	MOVW SPR(SPRG3), R3
.
## diffname mtx/l.s 2001/1222
## diff -e /n/emeliedump/2001/1214/sys/src/9/mtx/l.s /n/emeliedump/2001/1222/sys/src/9/mtx/l.s
279a
TEXT	putsdr1(SB), $0
	MOVW	R3, SPR(SDR1)
	RETURN

.
## diffname mtx/l.s 2002/0108
## diff -e /n/emeliedump/2001/1222/sys/src/9/mtx/l.s /n/emeliedump/2002/0108/sys/src/9/mtx/l.s
285a
	RETURN

TEXT	sync(SB), $0
	SYNC
.
283a
TEXT	putsr(SB), $0
	MOVW	4(FP), R4
	MOVW	R4, SEG(R3)
	RETURN

.
## diffname mtx/l.s 2002/0111
## diff -e /n/emeliedump/2002/0108/sys/src/9/mtx/l.s /n/emeliedump/2002/0111/sys/src/9/mtx/l.s
359a
//	BL	splhi(SB)					/* BUG? */
.
351a
//	BL	splhi(SB)					/* BUG? */
.
## diffname mtx/l.s 2002/0112
## diff -e /n/emeliedump/2002/0111/sys/src/9/mtx/l.s /n/emeliedump/2002/0112/sys/src/9/mtx/l.s
443a

TEXT	fpsave(SB), $0
	FMOVD	F0, (0*8)(R3)
	FMOVD	F1, (1*8)(R3)
	FMOVD	F2, (2*8)(R3)
	FMOVD	F3, (3*8)(R3)
	FMOVD	F4, (4*8)(R3)
	FMOVD	F5, (5*8)(R3)
	FMOVD	F6, (6*8)(R3)
	FMOVD	F7, (7*8)(R3)
	FMOVD	F8, (8*8)(R3)
	FMOVD	F9, (9*8)(R3)
	FMOVD	F10, (10*8)(R3)
	FMOVD	F11, (11*8)(R3)
	FMOVD	F12, (12*8)(R3)
	FMOVD	F13, (13*8)(R3)
	FMOVD	F14, (14*8)(R3)
	FMOVD	F15, (15*8)(R3)
	FMOVD	F16, (16*8)(R3)
	FMOVD	F17, (17*8)(R3)
	FMOVD	F18, (18*8)(R3)
	FMOVD	F19, (19*8)(R3)
	FMOVD	F20, (20*8)(R3)
	FMOVD	F21, (21*8)(R3)
	FMOVD	F22, (22*8)(R3)
	FMOVD	F23, (23*8)(R3)
	FMOVD	F24, (24*8)(R3)
	FMOVD	F25, (25*8)(R3)
	FMOVD	F26, (26*8)(R3)
	FMOVD	F27, (27*8)(R3)
	FMOVD	F28, (28*8)(R3)
	FMOVD	F29, (29*8)(R3)
	FMOVD	F30, (30*8)(R3)
	FMOVD	F31, (31*8)(R3)
	MOVFL	FPSCR, F0
	FMOVD	F0, (32*8)(R3)
	RETURN

TEXT	fprestore(SB), $0
	FMOVD	(32*8)(R3), F0
	MOVFL	F0, FPSCR
	FMOVD	(0*8)(R3), F0
	FMOVD	(1*8)(R3), F1
	FMOVD	(2*8)(R3), F2
	FMOVD	(3*8)(R3), F3
	FMOVD	(4*8)(R3), F4
	FMOVD	(5*8)(R3), F5
	FMOVD	(6*8)(R3), F6
	FMOVD	(7*8)(R3), F7
	FMOVD	(8*8)(R3), F8
	FMOVD	(9*8)(R3), F9
	FMOVD	(10*8)(R3), F10
	FMOVD	(11*8)(R3), F11
	FMOVD	(12*8)(R3), F12
	FMOVD	(13*8)(R3), F13
	FMOVD	(14*8)(R3), F14
	FMOVD	(15*8)(R3), F15
	FMOVD	(16*8)(R3), F16
	FMOVD	(17*8)(R3), F17
	FMOVD	(18*8)(R3), F18
	FMOVD	(19*8)(R3), F19
	FMOVD	(20*8)(R3), F20
	FMOVD	(21*8)(R3), F21
	FMOVD	(22*8)(R3), F22
	FMOVD	(23*8)(R3), F23
	FMOVD	(24*8)(R3), F24
	FMOVD	(25*8)(R3), F25
	FMOVD	(26*8)(R3), F26
	FMOVD	(27*8)(R3), F27
	FMOVD	(28*8)(R3), F28
	FMOVD	(29*8)(R3), F29
	FMOVD	(30*8)(R3), F30
	FMOVD	(31*8)(R3), F31
	RETURN
.
408c
	OR	$(MSR_IR|MSR_DR|MSR_FP|MSR_RI), R5	/* enable MMU */
.
361d
352d
288a
TEXT	gethid0(SB), $0
	MOVW	SPR(HID0), R3
	RETURN

TEXT	gethid1(SB), $0
	MOVW	SPR(HID1), R3
	RETURN

TEXT	puthid0(SB), $0
	MOVW	R3, SPR(HID0)
	RETURN

TEXT	puthid1(SB), $0
	MOVW	R3, SPR(HID1)
	RETURN

.
## diffname mtx/l.s 2002/0116
## diff -e /n/emeliedump/2002/0112/sys/src/9/mtx/l.s /n/emeliedump/2002/0116/sys/src/9/mtx/l.s
131a
	RETURN

TEXT	kfpinit(SB), $0
	MOVFL	$0,FPSCR(7)
	MOVFL	$0xD,FPSCR(6)	/* VE, OE, ZE */
	MOVFL	$0, FPSCR(5)
	MOVFL	$0, FPSCR(3)
	MOVFL	$0, FPSCR(2)
	MOVFL	$0, FPSCR(1)
	MOVFL	$0, FPSCR(0)

	FMOVD	$4503601774854144.0, F27
	FMOVD	$0.5, F29
	FSUB		F29, F29, F28
	FADD	F29, F29, F30
	FADD	F30, F30, F31
	FMOVD	F28, F0
	FMOVD	F28, F1
	FMOVD	F28, F2
	FMOVD	F28, F3
	FMOVD	F28, F4
	FMOVD	F28, F5
	FMOVD	F28, F6
	FMOVD	F28, F7
	FMOVD	F28, F8
	FMOVD	F28, F9
	FMOVD	F28, F10
	FMOVD	F28, F11
	FMOVD	F28, F12
	FMOVD	F28, F13
	FMOVD	F28, F14
	FMOVD	F28, F15
	FMOVD	F28, F16
	FMOVD	F28, F17
	FMOVD	F28, F18
	FMOVD	F28, F19
	FMOVD	F28, F20
	FMOVD	F28, F21
	FMOVD	F28, F22
	FMOVD	F28, F23
	FMOVD	F28, F24
	FMOVD	F28, F25
	FMOVD	F28, F26
.
122a
	/* IBAT 3 unused */
	MOVW	R0, SPR(IBATU(3))
	MOVW	R0, SPR(IBATL(3))

.
117,119c
	MOVW	$((0xf<<28)|PTE1_I|PTE1_G|PTE1_RW), R4
.
93c
	MOVW	$(PTEVALID|PTEWRITE), R4
.
58a
	BL		kfpinit(SB)

.
## diffname mtx/l.s 2002/0124
## diff -e /n/emeliedump/2002/0116/sys/src/9/mtx/l.s /n/emeliedump/2002/0124/sys/src/9/mtx/l.s
419c
	RLWNM	$0, R1, $~KZERO, R1		/* PADDR(R1) */
.
## diffname mtx/l.s 2002/0125
## diff -e /n/emeliedump/2002/0124/sys/src/9/mtx/l.s /n/emeliedump/2002/0125/sys/src/9/mtx/l.s
290a
//	MOVW	$0x4000, R3
//	MOVW	R3, SPR(SPRG3)
	MOVW	R0, SPR(HID0)
.
## diffname mtx/l.s 2002/0126
## diff -e /n/emeliedump/2002/0125/sys/src/9/mtx/l.s /n/emeliedump/2002/0126/sys/src/9/mtx/l.s
287,298d
73d
52,55d
## diffname mtx/l.s 2002/0213
## diff -e /n/emeliedump/2002/0126/sys/src/9/mtx/l.s /n/emeliedump/2002/0213/sys/src/9/mtx/l.s
354a
	SYNC
.
353a
	EIEIO
.
## diffname mtx/l.s 2002/0818
## diff -e /n/emeliedump/2002/0213/sys/src/9/mtx/l.s /n/emeliedump/2002/0818/sys/src/9/mtx/l.s
264a
	SYNC
.

Bell Labs OSI certified Powered by Plan 9

(Return to Plan 9 Home Page)

Copyright © 2021 Plan 9 Foundation. All Rights Reserved.
Comments to webmaster@9p.io.