-
Notifications
You must be signed in to change notification settings - Fork 212
/
Copy patharch_test.h
2066 lines (1853 loc) · 105 KB
/
arch_test.h
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
// ***DELETEME** note to self
// this version adds instret counts to the signature
// this version modifies the LA macro to skip generation if rd=x0 (or X0)
// this version detects ECALL cause even in CLIC mode
// -----------
// Copyright (c) 2020-2023. RISC-V International. All rights reserved.
// SPDX-License-Identifier: BSD-3-Clause
// -----------
//********************************************************************************
//********** FIXME: these comments are now completely out of order****************
//********************************************************************************
// This file is divided into the following sections:
// RV Arch Test Constants
// general test and helper macros, required, optional, or just useful
// _ARGn, SIG[BASE/UPD[_F/ID]],BASEUPD,BIT,LA ,LI,RVTEST_[INIT/SAVE]_GPRS, XCSR_RENAME
// RV ARCH Test Interrupt Macros ****FIXME:spec which regs must not be altered
// primary macros used by handle: RVTEST_TRAP{_PROLOG/_HANDLER/_EPILOG/SAVEAREA}
// required test format spec macros: RVTEST_{Code/DATA/SIG}{_BEGIN/_END}
// macros from Andrew Waterman's risc-v test macros
// deprecated macro name aliases, just for migration ease
// The resulting memory layout of the trap handler is (MACRO_NAME, label [function])
//****************************************************************
// (code section)
// RVMODEL_BOOT
// rvtest_entry_point: [boot code]
// RVTEST_CODE_BEGIN
// rvtest_init: [TRAP_PROLOG] (m, ms, or msv)
// [INIT_GPRS]
// rvtest_code_begin:
//*****************************
//********(body of tests)******
//*****************************
// RVTEST_CODE_END
// rvtest_code_end: [*optional* SAVE_GPRS routine]
// [RVTEST_GOTO_MMODE ] **FIXME** this won't work if MMU enabled unless VA=PA
// cleanup_epilogs [TRAP_EPILOG (m, ms, or msv)] (jump to exit_cleanup)
// [TRAP_HANDLER (m, ms, or msv)]
// exit_cleanup: [RVMODEL_HALT macro or a branch to it.]
//
//--------------------------------this could start a new section--------------------------------
// (Data section) - align to 4K boundary
// RVTEST_DATA_BEGIN
//**************************************
//*****(trap handler data is here)******
//**************************************
//
//**************************************
//*****(Ld/St test data is here)********
//**************************************
//
// rvtest_trap_sig: [ptr toglobal trap signature start (shared by all modes) inited to mtrap_sigptr] **FIXME: needs VA=PA
// RVTEST_TRAP_SAVEAREA [handler sv area(m, ms, or msv) temp reg save, CSRs, tramp table, ptrs]
// rvtest_data_begin: [input data (shared by all modes)]
// RVTEST_DATA_END
// rvtest_data_end:
// RVTEST_ROOT_PG_TBL [sets up identity map (VA=PA)
// sroot_pg_tbl: (if smode)
// vroot_pg_tbl: (if hypervisor)
//--------------------------------this could start a new section--------------------------------
// RVTEST_SIG_BEGIN
// RVMODEL_DATA_BEGIN
// rvtest_sig_begin: [beginning of signature, used by signature dump, can be used by tests]
// mtrap_sigptr: [global trap signature start (shared by all modes)] - defined by tests
// gpr_save: [gpr save area (optional, enabled if rvtest_gpr_save is defined)]
// RVTEST_SIG_END
// rvtest_sig_end: [global test end signature (shared by all modes)] (shouldn't matter what RVMODEL_DATA_END does)
// RVMODEL_DATA_END
//--------------------------------end of test--------------------------------
/* The following macros are optional if interrupt tests are enabled (defaulted if not defined):
RVMODEL_SET_[M/V/S]_[SW]_INT
RVMODEL_CLR_[M/V/S]_[SW/TIMTER/EXT]_INT
rvtest_[M/V/S]trap_routine
GOTO_[M/S/U]MODE, INSTANTIATE_MODE_MACRO (prolog/handler/epilog/savearea)
The following macro is optional, and defaults to fence.i if not defined
RVMODEL.FENCEI
The following variables are used if interrupt tests are enabled (defaulted if not defined):
NUM_SPECD_INTCAUSES
The following variables are optional if exception tests are enabled (defaulted if not defined):
DATA_REL_TVAL_MSK CODE_REL_TVAL_MSK
The following variables are optional:
rvtest_gpr_save: if defined, stores GPR contents into signature at test end (for debug)
The following labels are required and defined by required macros:
rvtest_code_begin: defined by RVTEST_CODE_BEGIN macro (boot code can precede this)
rvtest_code_end: defined by RVTEST_CODE_END macro (trap handlers follow this)
rvtest_data_begin: defined by RVTEST_DATA_BEGIN macro
rvtest_data_end: defined by RVTEST_DATA_END macro
rvtest_sig_begin: defined by RVTEST_SIG_BEGIN macro (after RVMODEL_DATA_BEGIN) defines signature begin
rvtest_sig_end: defined by RVTEST_SIG_END macro (before RVMODEL_DATA_END) defines signature end
rvtest_Sroot_pg_tbl: defined by RVTEST_PTE_IDENT_MAP macro inside RVTEST_DATA_BEGIN if Smode implemented
rvtest_Vroot_pg_tbl: defined by RVTEST_PTE_IDENT_MAP macro inside RVTEST_DATA_BEGIN if VSmode implemented
labels/variables that must be defined by the DUT in model specific macros or #defines
mtrap_sigptr: defined by test if traps are possible, else is defaulted
*/
// don't put C-style macros (#define xxx) inside assembly macros; C-style is evaluated before assembly
#include "encoding.h"
#include "test_macros.h"
#define RVTEST_ISA(_STR) //empty macro used by framework
#define T1 x6
#define T2 x7
#define T3 x8
#define T4 x9
#define T5 x10
#define T6 x11
#define MIN(a,b) (((a)<(b))?(a):(b))
#define MAX(a,b) (((a)>(b))?(a):(b))
#define BIT(addr, bit) (((addr)>>(bit))&1)
#define MASK (((1<<(XLEN-1))-1) + (1<<(XLEN-1))) // XLEN bits of 1s
#define MASK_XLEN(val) val&MASK // shortens 64b values to XLEN when XLEN==32
#define REGWIDTH (XLEN>>3) // in units of #bytes
#define WDSZ 32
#define WDSGN ( WDSZ -1)
#define WDMSK ( (1 << WDSZ) -1)
#define SEXT_WRD(x) ((x & WDMSK) | (-BIT((x), WRDSGN)<< WRDSZ))
#define IMMSZ 12
#define IMMSGN (IMMSZ -1)
#define IMMMSK ( (1 << IMMSZ)-1)
#define SEXT_IMM(x) ((x & IMMMSK) | (-BIT((x), IMMSGN)<< IMMSZ))
#define LIMMSZ (WDSZ - IMMSZ)
#define LIMMSGN (LIMMSZ -1)
#define LIMMMSK ( (1 <<LIMMSZ)-1)
#define SEXT_LIMM(x) ((x &LIMMMSK) | (-BIT((x),LIMMSGN)<<LIMMSZ))
#define WDBYTSZ (WDSZ >> 3) // in units of #bytes
#define WDBYTMSK (WDBYTSZ-1)
#define ALIGNSZ ((XLEN>>5)+2) // log2(XLEN): 2,3,4 for XLEN 32,64,128
#if XLEN>FLEN
#define SIGALIGN REGWIDTH
#else
#define SIGALIGN FREGWIDTH
#endif
#ifndef RVMODEL_MTVEC_ALIGN
#define MTVEC_ALIGN 6 // ensure that a trampoline is on a typical cacheline boundary, just in case
#else
#define MTVEC_ALIGN RVMODEL_MTVEC_ALIGN //Let the model defined value be used for required trap handler alignment based on implemented MTVEC
#endif
//==============================================================================
// this section has RV Arch Test Constants, mostly YAML based.
// It ensures they're defined & defaulted if necessary)
//==============================================================================
// set defaults
#ifndef NUM_SPECD_INTCAUSES
#define NUM_SPECD_INTCAUSES 16
#define INT_CAUSE_MSK ((1<<4)-1)
#endif
// set defaults
#ifndef NUM_SPECD_EXCPTCAUSES
#define NUM_SPECD_EXCPTCAUSES 16
#define EXCPT_CAUSE_MSK ((1<<4)-1)
#endif
// set defaults
#ifndef RVMODEL_CBZ_BLOCKSIZE
#define RVMODEL_CBZ_BLOCKSIZE 64
#endif
// set defaults
#ifndef RVMODEL_CMO_BLOCKSIZE
#define RVMODEL_CMO_BLOCKSIZE 64
#endif
//==========================================================================================
// By default, ZIFENCE is defined as nop for the implementation that does not support Zifencei
// Implementations that support Zifencei may use the fence.i instruction.
// This only gets executed if xTVEC is not writable to point to the trap trampoline,
// and if it isn't writable, the model better have the zifencei extension implemented.
//==========================================================================================
#ifndef RVMODEL_FENCEI
#ifndef ZIFENCE
#define RVMODEL_FENCEI nop // make sure ifetches get new code
#else
#define RVMODEL_FENCEI fence.i
#endif
#endif
#ifndef UNROLLSZ
#define UNROLLSZ 5
#endif
// **Note** that this is different that previous DATA_REL_TVAL_MASK! This is the OR of Code_Rel+Data_Rel
// if xTVAL is set to zero for some cause, then the corresponding bit in SET_REL_TVAL_MSK should be cleared
#ifndef SET_REL_TVAL_MSK
#define SET_REL_TVAL_MSK ((1<<CAUSE_MISALIGNED_FETCH | 1<<CAUSE_FETCH_ACCESS | 1<<CAUSE_BREAKPOINT | \
1<<CAUSE_MISALIGNED_LOAD | 1<<CAUSE_LOAD_ACCESS | 1<<CAUSE_MISALIGNED_STORE | 1<<CAUSE_STORE_ACCESS | \
1<<CAUSE_FETCH_PAGE_FAULT | 1<<CAUSE_LOAD_PAGE_FAULT | 1<<CAUSE_STORE_PAGE_FAULT) \
& 0xFFFFFFFF)
#endif
#ifndef SET_ABS_TVAL_MSK
#define SET_ABS_TVAL_MSK ((1<<CAUSE_ILLEGAL_INSTRUCTION) & 0xFFFFFFFF)
#endif
#ifndef GOTO_M_OP
#define GOTO_M_OP ecall
#endif
//this is pte entry permision bits for all permissions.
#define RVTEST_ALLPERMS ( PTE_G | PTE_U | PTE_X | PTE_W | PTE_R | PTE_V)
//this is pte entry permision bits for no permissions.
#define RVTEST_NOACC ( PTE_G | PTE_U )
//_ADDR_SZ_ is a global variable extracted from YAML; set a default if it isn't defined
// This should be the MAX(phy_addr_size, VADDR_SZ) from YAML,
// where VADDR_SZ is derived from SATP.mode at reset
#ifndef _ADDR_SZ_
#if XLEN==64
#define _ADDR_SZ_ 57
#else
#define _ADDR_SZ_ 32
#endif
#endif
// this is the position of the last level PPN in each root page table PTE
#define ROOT_PPN_LSB 10
#if XLEN==32
#define PPN_SZ 10
#define LVLS 2
#else
#define PPN_SZ 9
#define LVLS ((_ADDR_SZ_-4)/PPN_SZ)
#endif
// this defines a page of PTEs at top level (depending on _ADDR_SZ_) with named permissions
// for the largest size page and a common base (which is set to zero for identify mapping)
#define RVTEST_PTE_IDENT_MAP(PGBASE,LVLS,PERMS) ;\
.set ppn, 0 ;\
.rept (4096 >> REGWIDTH) ;\
.fill 1, REGWIDTH, (PGBASE | (ppn<<(10+(LVLS-1)*PPN_SZ)) | PERMS) ;\
.set ppn, (ppn+1) ;\
.endr ;\
// define a bunch of XLEN dependent constants
#if XLEN==32
#define SREG sw
#define LREG lw
#define XLEN_WIDTH 5
#define LREGWU lw
#elif XLEN==64
#define SREG sd
#define LREG ld
#define XLEN_WIDTH 6
#define LREGWU lwu
#else
#define SREG sq
#define LREG lq
#define XLEN_WIDTH 7
#endif
#if FLEN==32
#define FLREG flw
#define FSREG fsw
#define FREGWIDTH 4
#elif FLEN==64
#define FLREG fld
#define FSREG fsd
#define FREGWIDTH 8
#elif FLEN==128
#define FLREG flq
#define FSREG fsq
#define FREGWIDTH 16
#endif
#if ZFINX==1
#define FLREG ld
#define FSREG sd
#define FREGWIDTH 8
#define FLEN 64
#if XLEN==64
#define SIGALIGN 8
#else
#define SIGALIGN 4
#endif
#elif ZDINX==1
#define FLREG LREG
#define FSREG SREG
#define FREGWIDTH 8
#define FLEN 64
#endif
#if SIGALIGN==8
#define CANARY \
.dword 0x6F5CA309E7D4B281
#else
#define CANARY \
.word 0x6F5CA309
#endif
//---------------------------mode encoding definitions-----------------------------
.set MMODE_SIG, 3
.set SMODE_SIG, 1
.set VMODE_SIG, 2
/* these macros need to be defined because mode is uppercase in mode specific macros */
/* note that vs mode uses smode return */
#define GVA_LSB 6 //bit pos of LSB of the hstatus.GVA field
#define MPP_LSB 11 //bit pos of LSB of the mstatus.MPP field
#define MPRV_LSB 17 //bit pos of LSB of the mstatus.MPRV field
#define MPV_LSB 7 // bit pos of prev vmod mstatush.MPV in either mstatush or mstatus upper
#define MPP_SMODE (1<<MPP_LSB)
//define sizes
#define actual_tramp_sz ((XLEN + 3* NUM_SPECD_INTCAUSES + 5) * 4) // 5 is added ops before common entry pt
#define tramp_sz ((actual_tramp_sz+4) & -8) // round up to keep aligment for sv area alloc
#define ptr_sv_sz (16*8)
#define reg_sv_sz ( 8*REGWIDTH)
#define sv_area_sz (tramp_sz + ptr_sv_sz + reg_sv_sz) // force dblword alignment
#define int_hndlr_tblsz (XLEN*2*WDBYTSZ)
/*
//#define sv_area_sz (Msv_area_end-Mtramptbl_sv) //sv_area start with aligned tramp_tbl
//#define tramp_sz (((common_Mentry-Mtrampoline)+4)& -8) // #ops from Mend..Mentry, forced to dblword size
*/
//define a fixed offsets into the save area
#define tramp_sv_off ( 0*8) // (Mtramptbl_sv -Mtrapreg_sv) algned to dblwd
#define code_bgn_off (tramp_sz+ 0*8) // (Mcode_bgn_ptr -Mtrapreg_sv)
#define code_seg_siz (tramp_sz+ 1*8) // (Mcode_seg_siz -Mtrapreg_sv)
#define data_bgn_off (tramp_sz+ 2*8) // (Mdata_bgn_ptr -Mtrapreg_sv) <--update on mapping chg
#define data_seg_siz (tramp_sz+ 3*8) // (Mdata_seg_siz -Mtrapreg_sv)
#define sig_bgn_off (tramp_sz+ 4*8) // ( Msig_bgn_ptr -Mtrapreg_sv) <--update on mapping chg
#define sig_seg_siz (tramp_sz+ 5*8) // ( Msig_seg_siz -Mtrapreg_sv)
#define vmem_bgn_off (tramp_sz+ 6*8) // (Mvmem_bgn_ptr -Mtrapreg_sv) <--update on mapping chg
#define vmem_seg_siz (tramp_sz+ 7*8) // (Mvmem_seg_siz -Mtrapreg_sv)
#define mpp_sv_off (sv_area_sz+tramp_sz+8*8) // (Strap_sig -Mtrapreg_sv)
#define trapsig_ptr_off (tramp_sz+ 8*8) // (Mtrap_sig -Mtrapreg_sv)
#define xsatp_sv_off (tramp_sz+ 9*8) // (Msatp_sv -Mtrapreg_sv)
#define trampend_off (tramp_sz+10*8) // (Mtrampend_sv -Mtrapreg_sv)
#define tentry_addr (tramp_sz+11*8) // (Mtentry_sv -Mtrapreg_sv) <--update on mapping chg
#define xedeleg_sv_off (tramp_sz+12*8) // (Medeleg_sv -Mtrapreg_sv)
#define xtvec_new_off (tramp_sz+13*8) // (tvec_new -Mtrapreg_sv)
#define xtvec_sav_off (tramp_sz+14*8) // (tvec_save -Mtrapreg_sv)
#define xscr_save_off (tramp_sz+15*8) // (scratch_save -Mtrapreg_sv)
#define trap_sv_off (tramp_sz+16*8) // (trapreg_sv -Mtrapreg_sv) 8 registers long
//==============================================================================
// this section has general test helper macros, required, optional, or just useful
//==============================================================================
#define _ARG5(_1ST,_2ND, _3RD,_4TH,_5TH,...) _5TH
#define _ARG4(_1ST,_2ND, _3RD,_4TH,...) _4TH
#define _ARG3(_1ST,_2ND, _3RD, ...) _3RD
#define _ARG2(_1ST,_2ND, ...) _2ND
#define _ARG1(_1ST,...) _1ST
#define NARG(...) _ARG5(__VA_OPT__(__VA_ARGS__,)4,3,2,1,0)
#define RVTEST_CASE(_PNAME,_DSTR,...)
//-----------------------------------------------------------------------
//Fixed length la, li macros; # of ops is ADDR_SZ dependent, not data dependent
//-----------------------------------------------------------------------
// this generates a constants using the standard addi or lui/addi sequences
// but also handles cases that are contiguous bit masks in any position,
// and also constants handled with the addi/lui/addi but are shifted left
/**** fixed length LI macro ****/
#if (XLEN<64)
#define LI(reg, imm) ;\
.set immx, (imm & MASK) /* trim to XLEN (noeffect on RV64) */ ;\
.set absimm, ((immx^(-BIT(immx,XLEN-1)))&MASK) /* cvt to posnum to simplify code */ ;\
.set cry, (BIT(imm, IMMSGN)) ;\
.set imm12, (SEXT_IMM(immx)) ;\
.if ((absimm>>IMMSGN)==0) /* fits 12b signed imm (properly sgnext)? */ ;\
li reg, imm12 /* yes, <= 12bit, will be simple li */ ;\
.else ;\
lui reg, (((immx>>IMMSZ)+cry) & LIMMMSK) /* <= 32b, use lui/addi */ ;\
.if ((imm&IMMMSK)!=0) /* but skip this if lower bits are zero */ ;\
addi reg, reg, imm12 ;\
.endif ;\
.endif
#else
#define LI(reg, imm) ;\
.option push ;\
.option norvc ;\
.set immx, (imm & MASK) /* trim to XLEN (noeffect on RV64) */ ;\
/***************** used in loop that detects bitmasks */ ;\
.set edge1, 1 /* 1st "1" bit pos scanning r to l */ ;\
.set edge2, 0 /* 1st "0" bit pos scanning r to l */ ;\
.set fnd1, -1 /* found 1st "1" bit pos scanning r to l */ ;\
.set fnd2, -1 /* found 1st "0" bit pos scanning r to l */ ;\
.set imme, ((immx^(-BIT(immx,0 )))&MASK) /* cvt to even, cvt back at end */ ;\
.set pos, 0 ;\
/***************** used in code that checks for 32b immediates */ ;\
.set absimm, ((immx^(-BIT(immx,XLEN-1)))&MASK) /* cvt to posnum to simplify code */ ;\
.set cry, (BIT(immx, IMMSGN)) ;\
.set imm12, (SEXT_IMM(immx)) ;\
/***************** used in code that gnerates bitmasks */ ;\
.set even, (1-BIT(imm, 0)) /* imm has at least 1 trailing zero */ ;\
.set cryh, (BIT(immx, IMMSGN+32)) ;\
/******** loop finding rising/falling edge fm LSB-MSB given even operand ****/ ;\
.rept XLEN ;\
.if (fnd1<0) /* looking for first edge? */ ;\
.if (BIT(imme,pos)==1) /* look for falling edge[pos] */ ;\
.set edge1,pos /* fnd falling edge, don’t chk for more */ ;\
.set fnd1,0 ;\
.endif ;\
.elseif (fnd2<0) /* looking for second edge? */ ;\
.if (BIT(imme,pos)==0) /* yes, found rising edge[pos]? */ ;\
.set edge2, pos /* fnd rising edge, don’t chk for more */ ;\
.set fnd2,0 ;\
.endif ;\
.endif ;\
.set pos, pos+1 /* keep looking (even if already found) */ ;\
.endr ;\
/***************** used in code that generates shifted 32b values */ ;\
.set immxsh, (immx>>edge1) /* *sh variables only used if positive */ ;\
.set imm12sh,(SEXT_IMM(immxsh))/* look @1st 12b of shifted imm val */ ;\
.set crysh, (BIT(immxsh, IMMSGN)) ;\
.set absimmsh, immxsh /* pos, no inversion needed, just shift */ ;\
/*******does it fit into std li or lui+li sequence****************************/ ;\
.if ((absimm>>IMMSGN)==0) /* fits 12b signed imm (properly sgnext)? */ ;\
li reg, imm12 /* yes, <= 12bit, will be simple li */ ;\
.elseif ((absimm+ (cry << IMMSZ) >> WDSGN)==0)/*fits 32b sgnimm?(w/ sgnext)?*/;\
lui reg, (((immx>>IMMSZ)+cry) & LIMMMSK) /* <= 32b, use lui/addi */ ;\
.if ((imm&IMMMSK)!=0) /* but skip this if lower bits are zero */ ;\
addi reg, reg, imm12 ;\
.endif ;\
/*********** look for 0->1->0 masks, or inverse sgl/multbit *************/ ;\
.elseif ( even && (fnd2<0)) /* only rising edge, so 111000 */ ;\
li reg, -1 ;\
slli reg, reg, edge1 /* make 111s --> 000s mask */ ;\
.elseif (!even && (fnd2<0)) /* only falling edge, so 000111 */ ;\
li reg, -1 ;\
srli reg, reg, XLEN-edge1 /* make 000s --> 111s mask */ ;\
.elseif (imme == (1<<edge1)) /* check for single bit case */ ;\
li reg, 1 ;\
slli reg, reg, edge1 /* make 0001000 sgl bit mask */ ;\
.if (!even) ;\
xori reg, reg, -1 /* orig odd, cvt to 1110111 mask */ ;\
.endif ;\
.elseif (imme == ((1<<edge2) - (1<<edge1))) /* chk for multibit case */ ;\
li reg, -1 ;\
srli reg, reg, XLEN-(edge2-edge1) /* make multibit 1s mask */ ;\
slli reg, reg, edge1 /* and put it into position */ ;\
.if (!even) ;\
xori reg, reg, -1 /* orig odd, cvt to 1110111 mask */ ;\
.endif ;\
/************** look for 12b or 32b imms with trailing zeroes ***********/ ;\
.elseif ((immx==imme)&&((absimmsh>>IMMSGN)==0))/* fits 12b after shift? */ ;\
li reg, imm12sh /* <= 12bit, will be simple li */ ;\
slli reg, reg, edge1 /* add trailing zeros */ ;\
.elseif ((immx==imme)&&(((absimmsh>>WDSGN)+crysh)==0)) /* fits 32 <<shft? */ ;\
lui reg, ((immxsh>>IMMSZ)+crysh)&LIMMMSK /* <=32b, use lui/addi */ ;\
.if ((imm12sh&IMMMSK)!=0) /* but skip this if low bits ==0 */ ;\
addi reg, reg, imm12sh ;\
.endif ;\
slli reg, reg, edge1 /* add trailing zeros */ ;\
.else /* give up, use fixed 8op sequence*/ ;\
/******* TBD add sp case of zero short imms, rmv add/merge shifts ******/ ;\
lui reg, ((immx>>(XLEN-LIMMSZ))+cryh)&LIMMMSK /* 1st 20b (63:44) */ ;\
addi reg, reg, SEXT_IMM(immx>>32) /* nxt 12b (43:32) */ ;\
slli reg, reg, 11 /* following are <12b, don't need SEXT */ ;\
addi reg, reg, (immx>>21) & (IMMMSK>>1) /* nxt 11b (31:21) */ ;\
slli reg, reg, 11 /* mk room for 11b */ ;\
addi reg, reg, (immx>>10) & (IMMMSK>>1) /* nxt 11b (20:10) */ ;\
slli reg, reg, 10 /* mk room for 10b */ ;\
.if ((imm&(IMMMSK>>2))!=0) /* but skip this if lower bits are zero */ ;\
addi reg, reg, (immx) & (IMMMSK>>2) /* lst 10b (09:00) */ ;\
.endif ;\
.if (XLEN==32) ;\
.warning "Should never get here for RV32" ;\
.endif ;\
.endif ;\
.option pop
#endif
/**** fixed length LA macro; alignment and rvc/norvc unknown before execution ****/
#define LA(reg,val) ;\
.ifnc(reg, X0) ;\
.option push ;\
.option rvc ;\
.align UNROLLSZ ;\
.option norvc ;\
la reg,val ;\
.align UNROLLSZ ;\
.option pop ;\
.endif
#define ADDI(dst, src, imm) /* helper*/ ;\
.if ((imm<=2048) & (imm>=-2048)) ;\
addi dst, src, imm ;\
.else ;\
LI( dst, imm) ;\
addi dst, src, dst ;\
.endif
/*****************************************************************/
/**** initialize regs, just to make sure you catch any errors ****/
/*****************************************************************/
.macro DBLSHIFT7 dstreg, oldreg
srli \dstreg\(), \oldreg\(), 7
srli x15 , \oldreg\(), XLEN-7
or \dstreg\(), \dstreg\(), x15
.endm
/* init regs, to ensure you catch any errors */
.macro RVTEST_INIT_GPRS
#ifndef RVTEST_E
LI (x16, (0x7D5BFDDB7D5BFDDB & MASK))
DBLSHIFT7 x17, x16
DBLSHIFT7 x18, x17
DBLSHIFT7 x19, x18
DBLSHIFT7 x20, x19
DBLSHIFT7 x21, x20
DBLSHIFT7 x22, x21
DBLSHIFT7 x23, x22
DBLSHIFT7 x24, x23
DBLSHIFT7 x25, x24
DBLSHIFT7 x26, x25
DBLSHIFT7 x27, x26
DBLSHIFT7 x28, x27
DBLSHIFT7 x29, x28
DBLSHIFT7 x30, x29
#endif
LI (x1, (0xFEEDBEADFEEDBEAD & MASK))
DBLSHIFT7 x2, x1
DBLSHIFT7 x3, x2
DBLSHIFT7 x4, x3
DBLSHIFT7 x5, x4
DBLSHIFT7 x6, x5
DBLSHIFT7 x7, x6
DBLSHIFT7 x8, x7
DBLSHIFT7 x9, x8
DBLSHIFT7 x10, x9
DBLSHIFT7 x11, x10
DBLSHIFT7 x12, x11
DBLSHIFT7 x13, x12
#ifdef RVTEST_ENAB_INSTRET_CNT
csrr x14, CSR_MSCRATCH
csrr x15, CSR_MINSTRET
SREG x15, tramp_sz+4*8(x14) // this replaces initial canary val w/ instret counter val
DBLSHIFT7 x14, x13
LI (x15, (0xFAB7FBB6FAB7FBB6 & MASK))
#endif
.endm
/******************************************************************************/
/**** this is a helper macro that conditionally instantiates the macros ****/
/**** PROLOG/HANDLER/EPILOG/SAVEAREA depending on test type & mode support ****/
/******************************************************************************/
.macro INSTANTIATE_MODE_MACRO MACRO_NAME
#ifdef rvtest_mtrap_routine
\MACRO_NAME M // actual m-mode prolog/epilog/handler code
#ifdef rvtest_strap_routine
\MACRO_NAME S // actual s-mode prolog/epilog/handler code
#ifdef rvtest_vtrap_routine
\MACRO_NAME V // actual v-mode prolog/epilog/handler code
#endif
#endif
#endif
.endm
/**************************************************************************/
/**** this is a helper macro defaulting the int macro if its undefined ****/
/**** It builds the macro name from arguments prefix, mode, and type ****/
/**** The macro names are RV_MODEL_SET_[M/S/V][SW/TMR,EXT] ****/
/**** and RV_MODEL_CLR_[M/S/V][SW] ****/
/**************************************************************************/
.macro DFLT_INT_MACRO MACRO_NAME
.set MACRO_NAME_, \MACRO_NAME
.ifndef MACRO_NAME_
.warning "MACRO_NAME_ is not defined by target. Executing this will end test."
#define MACRO_NAME_ j cleanup_epilogs
.endif
.endm
/******************************************************************************/
/**** These macros enable parameterization of trap handlers for each mode ****/
/******************************************************************************/
.macro _XCSR_RENAME_V
.set CSR_XSTATUS, CSR_VSSTATUS /****FIXME? is the right substitution? ****/
.set CSR_XEDELEG, CSR_HEDELEG /****FIXME? is the right substitution? ****/
.set CSR_XIE, CSR_HIE
.set CSR_XIP, CSR_HIP
.set CSR_XCAUSE, CSR_VSCAUSE
.set CSR_XEPC, CSR_VSEPC
.set CSR_XSATP, CSR_VSATP
.set CSR_XSCRATCH,CSR_VSSCRATCH
.set CSR_XTVAL, CSR_VSTVAL
.set CSR_XTVEC, CSR_VSTVEC
.endm
.macro _XCSR_RENAME_S
.set CSR_XSTATUS, CSR_SSTATUS
.set CSR_XEDELEG, CSR_SEDELEG
.set CSR_XIE, CSR_SIE
.set CSR_XIP, CSR_SIP
.set CSR_XCAUSE, CSR_SCAUSE
.set CSR_XEPC, CSR_SEPC
.set CSR_XSATP, CSR_SATP
.set CSR_XSCRATCH,CSR_SSCRATCH
.set CSR_XTVAL, CSR_STVAL
.set CSR_XTVEC, CSR_STVEC
.endm
.macro _XCSR_RENAME_M
.set CSR_XSTATUS, CSR_MSTATUS
.set CSR_XEDELEG, CSR_MEDELEG
.set CSR_XIE, CSR_MIE
.set CSR_XIP, CSR_MIP
.set CSR_XCAUSE, CSR_MCAUSE
.set CSR_XEPC, CSR_MEPC
.set CSR_XSATP, CSR_SATP
.set CSR_XSCRATCH,CSR_MSCRATCH
.set CSR_XTVAL, CSR_MTVAL
.set CSR_XTVEC, CSR_MTVEC
.endm
/******************************************************************************/
/**** this is a helper macro that creates CSR aliases so code that ****/
/**** accesses CSRs when V=1 in different modes can share the code ****/
/******************************************************************************/
.macro XCSR_RENAME __MODE__ // enable CSR names to be parameterized, V,S merged
.ifc \__MODE__ , M
_XCSR_RENAME_M
.endif
.ifc \__MODE__ , S
_XCSR_RENAME_S
.endif
.ifc \__MODE__ , V
_XCSR_RENAME_S
.endif
.endm
/******************************************************************************/
/**** this is a helper macro that creates CSR aliases so code that ****/
/**** accesses CSRs when V=1 in different modes can share the code ****/
/**** this verasion treats Vmodes separately as opposed to XCSR_RENAME ****/
/**** this is used when the using it is run from Mmode ****/
/******************************************************************************/
.macro XCSR_VRENAME __MODE__ // enable CSR names to be parameterized, V,S separate
.ifc \__MODE__ , M
_XCSR_RENAME_M
.endif
.ifc \__MODE__ , S
_XCSR_RENAME_S
.endif
.ifc \__MODE__ , V
_XCSR_RENAME_V
.endif
.endm
////////////////////////////////////////////////////////////////////////////////////////
//**** This is a helper macro that saves GPRs. Normally used only inside CODE_END ****//
//**** Note: this needs a temp scratch register, & there isn't anything that will ****//
//**** will work, so we always trash some register, determined by macro param ****//
//**** NOTE: Only be use for debug! Xregs containing addresses won't be relocated ****//
////////////////////////////////////////////////////////////////////////////////////////
#define RVTEST_SAVE_GPRS(_BR, _LBL, ...) ;\
.option push ;\
.option norvc ;\
.set __SV_MASK__, -1 /* default to save all */ ;\
.if NARG(__VA_ARGS__) == 1 ;\
.set __SV_MASK__, _ARG1(__VA_OPT__(__VA_ARGS__,0)) ;\
.endif ;\
.set offset, 0 ;\
LA(_BR, _LBL) ;\
.if (__SV_MASK__ & (0x2)) == 0x2 ;\
RVTEST_SIGUPD(_BR, x1) ;\
.endif ;\
.if (__SV_MASK__ & (0x4)) == 0x4 ;\
RVTEST_SIGUPD(_BR, x2) ;\
.endif ;\
.if (__SV_MASK__ & (0x8)) == 0x8 ;\
RVTEST_SIGUPD(_BR, x3) ;\
.endif ;\
.if (__SV_MASK__ & (0x10)) == 0x10 ;\
RVTEST_SIGUPD(_BR, x4) ;\
.endif ;\
.if (__SV_MASK__ & (0x20)) == 0x20 ;\
RVTEST_SIGUPD(_BR, x5) ;\
.endif ;\
.if (__SV_MASK__ & (0x40)) == 0x40 ;\
RVTEST_SIGUPD(_BR, x6) ;\
.endif ;\
.if (__SV_MASK__ & (0x80)) == 0x80 ;\
RVTEST_SIGUPD(_BR, x7) ;\
.endif ;\
.if (__SV_MASK__ & (0x100)) == 0x100 ;\
RVTEST_SIGUPD(_BR, x8) ;\
.endif ;\
.if (__SV_MASK__ & (0x200)) == 0x200 ;\
RVTEST_SIGUPD(_BR, x9) ;\
.endif ;\
.if (__SV_MASK__ & (0x400)) == 0x400 ;\
RVTEST_SIGUPD(_BR, x10) ;\
.endif ;\
.if (__SV_MASK__ & (0x800)) == 0x800 ;\
RVTEST_SIGUPD(_BR, x11) ;\
.endif ;\
.if (__SV_MASK__ & (0x1000)) == 0x1000 ;\
RVTEST_SIGUPD(_BR, x12) ;\
.endif ;\
.if (__SV_MASK__ & (0x2000)) == 0x2000 ;\
RVTEST_SIGUPD(_BR, x13) ;\
.endif ;\
.if (__SV_MASK__ & (0x4000)) == 0x4000 ;\
RVTEST_SIGUPD(_BR, x14) ;\
.endif ;\
.if (__SV_MASK__ & (0x8000)) == 0x8000 ;\
RVTEST_SIGUPD(_BR, x15) ;\
.endif ;\
#ifndef RVTEST_E ;\
.if (__SV_MASK__ & (0x10000)) == 0x10000 ;\
RVTEST_SIGUPD(_BR, x16) ;\
.endif ;\
.if (__SV_MASK__ & (0x20000)) == 0x20000 ;\
RVTEST_SIGUPD(_BR, x17) ;\
.endif ;\
.if (__SV_MASK__ & (0x40000)) == 0x40000 ;\
RVTEST_SIGUPD(_BR, x18) ;\
.endif ;\
.if (__SV_MASK__ & (0x80000)) == 0x80000 ;\
RVTEST_SIGUPD(_BR, x19) ;\
.endif ;\
.if (__SV_MASK__ & (0x100000)) == 0x100000 ;\
RVTEST_SIGUPD(_BR, x20) ;\
.endif ;\
.if (__SV_MASK__ & (0x200000)) == 0x200000 ;\
RVTEST_SIGUPD(_BR, x21) ;\
.endif ;\
.if (__SV_MASK__ & (0x400000)) == 0x400000 ;\
RVTEST_SIGUPD(_BR, x22) ;\
.endif ;\
.if (__SV_MASK__ & (0x800000)) == 0x800000 ;\
RVTEST_SIGUPD(_BR, x23) ;\
.endif ;\
.if (__SV_MASK__ & (0x1000000)) == 0x1000000 ;\
RVTEST_SIGUPD(_BR, x24) ;\
.endif ;\
.if (__SV_MASK__ & (0x2000000)) == 0x2000000 ;\
RVTEST_SIGUPD(_BR, x25) ;\
.endif ;\
.if (__SV_MASK__ & (0x4000000)) == 0x4000000 ;\
RVTEST_SIGUPD(_BR, x26) ;\
.endif ;\
.if (__SV_MASK__ & (0x8000000)) == 0x8000000 ;\
RVTEST_SIGUPD(_BR, x27) ;\
.endif ;\
.if (__SV_MASK__ & (0x10000000)) == 0x10000000 ;\
RVTEST_SIGUPD(_BR, x28) ;\
.endif ;\
.if (__SV_MASK__ & (0x20000000)) == 0x20000000 ;\
RVTEST_SIGUPD(_BR, x29) ;\
.endif ;\
.if (__SV_MASK__ & (0x40000000)) == 0x40000000 ;\
RVTEST_SIGUPD(_BR, x30) ;\
.endif ;\
.if (__SV_MASK__ & (0x80000000)) == 0x80000000 ;\
RVTEST_SIGUPD(_BR, x31) ;\
.endif ;\
.option pop ;\
#endif
/***********************************************************************************/
/**** At end of test, this code is entered. It sets a register x2 to 0 and by ****/
/**** default executes an ecall. The handler checks if the cause of the trap ****/
/**** was ecall, w/ x2=0, and divert a special rtn_fm_mmode handler. That code ****/
/**** determines the caller's mode, uses it to select it's CODE_BEGIN, and uses ****/
/**** to calculate it offset from Mmode's CODE_BEGIN, adjusts MEPC by that amt ****/
/**** to convert it to an Mmode address, restores saved regs, and branches to ****/
/**** the relocated addr+4, immediately following the ECALL, but now in Mmode ****/
/**** **NOTE**: this destroys T2 and clears x2 (AKA sp) ****/
/**** **NOTE**: this works from any mode but MUST not be used if ****/
/**** medeleg[<GOTO_M_OP_cause>]==1 to prevent infinite delegation loops. ****/
/**** **NOTE: tests that set medeleg[GOTO_M_OP_cause] must replace GOTO_M_OP ****/
/**** with an op that causes a different exception cause that isn't delegated. ****/
/***********************************************************************************/
.macro RVTEST_GOTO_MMODE
.option push
.option norvc
#ifdef rvtest_mtrap_routine /**** this can be empty if no Umode ****/
li x2, 0 /* Ecall w/x2=0 is handled specially to rtn here */
// Note that if illegal op trap is delegated , this may infinite loop
// The solution is either for test to disable delegation, or to
// redefine the GOTO_M_OP to be an op that will trap to mmode
GOTO_M_OP /* ECALL: traps always, but returns immediately to */
/* the next op if x2=0, else handles trap normally */
#endif
.option pop
.endm
/**** This is a helper macro that causes harts to transition from ****/
/**** M-mode to a lower priv mode at the instruction that follows ****/
/**** the macro invocation. Legal params are VS,HS,VU,HU,S,U. ****/
/**** The H,U variations leave V unchanged. This uses T4 only. ****/
/**** NOTE: this MUST be executed in M-mode. Precede with GOTO_MMODE ****/
/**** FIXME - SATP & VSATP must point to the identity map page table ****/
#define HSmode 0x9
#define HUmode 0x8
#define VUmode 0x4
#define VSmode 0x5
#define Smode 0x1
#define Umode 0x0
.macro RVTEST_GOTO_LOWER_MODE LMODE
.option push
.option norvc
// first, clear MSTATUS.PP (and .MPV if it will be changed_
// then set them to the values that represent the lower mode
#if (XLEN==32)
.if ((\LMODE\()==VUmode) | (\LMODE\()==VSmode))
csrsi CSR_MSTATUS, MSTATUS_MPV /* set V */
.elseif ((\LMODE\()==HUmode) | (\LMODE\()==HSmode))
csrci CSR_MSTATUS, MSTATUS_MPV /* clr V */
.endif /* lv V unchged for S or U */
LI( T4, MSTATUS_MPP)
csrc CSR_MSTATUS, T4 /* clr PP always */
.if ((\LMODE\()==VSmode) || (\LMODE\()==HSmode) || (\LMODE\()==Smode))
LI( T4, MPP_SMODE) /* val for Smode */
csrs CSR_MSTATUS, T4 /* set in PP */
.endif
// do the same if XLEN=64
#else /* XLEN=64, maybe 128? FIXME for 128 */
.if ((\LMODE\()==Smode) || (\LMODE\()==Umode)) /* lv V unchanged here */
LI( T4, MSTATUS_MPP) /* but always clear PP */
.else
LI( T4, (MSTATUS_MPP | MSTATUS_MPV)) /* clr V and P */
.endif
csrc CSR_MSTATUS, T4 /* clr PP to umode & maybe Vmode */
.if (!((\LMODE\()==HUmode) || (\LMODE\()==Umode))) /* lv pp unchged, v=0 or unchged */
.if (\LMODE\()==VSmode)
LI( T4, (MPP_SMODE | MSTATUS_MPV)) /* val for pp & v */
.elseif ((\LMODE\()==HSmode) || (\LMODE\()==Smode))
LI( T4, (MPP_SMODE)) /* val for pp only */
.else /* only VU left; set MPV only */
li T4, 1 /* optimize for single bit */
slli T4, T4, 32+MPV_LSB /* val for v only */
.endif
csrs CSR_MSTATUS, T4 /* set correct mode and Vbit */
.endif
#endif
csrr sp, CSR_MSCRATCH /* ensure sp points to Mmode datae area */
/**** mstatus MPV and PP now set up to desired mode ****/
/**** set MEPC to mret+4; requires relocating the pc ****/
.if (\LMODE\() == Vmode) // get trapsig_ptr & init val up 2 save areas (M<-S<-V)
LREG T1, code_bgn_off + 2*sv_area_sz(sp)
.elseif (\LMODE\() == Smode || \LMODE\() == Umode) // get trapsig_ptr & init val up 1 save areas (M<-S)
LREG T1, code_bgn_off + 1*sv_area_sz(sp)
.else // get trapsig ptr & init val for this Mmode, (M)
LREG T1, code_bgn_off + 0*sv_area_sz(sp)
.endif
LREG T4, code_bgn_off(sp)
sub T1, T1,T4 /* calc addr delta between this mode (M) and lower mode code */
addi T1, T1, 4*WDBYTSZ /* bias by # ops after auipc continue executing at mret+4 */
auipc T4, 0
add T4, T4, T1 /* calc addr after mret in LMODE's VM */
csrrw T4, CSR_MEPC, T4 /* set rtn addr to mret+4 in LMODE's VM */
mret /* transition to desired mode */
.option pop
.endm // end of RVTEST_GOTO_LOWER_MODE
//==============================================================================
// Helper macro to set defaults for undefined interrupt set/clear
// macros. This is used to populated the interrupt vector table.
// These are only used during interrupt testing, so it is safe to
// define them as empty macros if and only if that particular interrupt
// isn't being tested
//==============================================================================
//****************************************************************
#define RVTEST_DFLT_INT_HNDLR j cleanup_epilogs
//Mmode interrupts
#ifndef RVMODEL_SET_MSW_INT
//.warning "RVMODEL_SET_MSW_INT not defined. Executing this will end test. Define an empty macro to suppress this warning"
#define RVMODEL_SET_MSW_INT RVTEST_DFLT_INT_HNDLR
#endif
#ifndef RVMODEL_CLR_MSW_INT
//.warning "RVMODEL_CLR_MSW_INT not defined. Executing this will end test. Define an empty macro to suppress this warning"
#define RVMODEL_CLR_MSW_INT RVTEST_DFLT_INT_HNDLR
#endif
#ifndef RVMODEL_CLR_MTIMER_INT
//.warning "RVMODEL_CLR_MTIMER_INT not defined. Executing this will end test. Define an empty macro to suppress this warning"
#define RVMODEL_CLR_MTIMER_INT RVTEST_DFLT_INT_HNDLR
#endif
#ifndef RVMODEL_CLR_MEXT_INT
//.warning "RVMODEL_CLR_MEXT_INT not defined. Executing this will end test. Define an empty macro to suppress this warning"
#define RVMODEL_CLR_MEXT_INT RVTEST_DFLT_INT_HNDLR
#endif
//Smode interrupts
#ifndef RVMODEL_SET_SSW_INT
//.warning "RVMODEL_SET_SSW_INT not defined. Executing this will end test. Define an empty macro to suppress this warning"
#define RVMODEL_SET_SSW_INT RVTEST_DFLT_INT_HNDLR
#endif
#ifndef RVMODEL_CLR_SSW_INT
//.warning "RVMODEL_CLR_SSW_INT not defined. Executing this will end test. Define an empty macro to suppress this warning"
#define RVMODEL_CLR_SSW_INT RVTEST_DFLT_INT_HNDLR
#endif
#ifndef RVMODEL_CLR_STIMER_INT
//.warning "RVMODEL_CLR_STIMER_INT not defined. Executing this will end test. Define an empty macro to suppress this warning"
#define RVMODEL_CLR_STIMER_INT RVTEST_DFLT_INT_HNDLR
#endif
#ifndef RVMODEL_CLR_SEXT_INT
//.warning "RVMODEL_CLR_SEXT_INT not defined. Executing this will end test. Define an empty macro to suppress this warning"
#define RVMODEL_CLR_SEXT_INT RVTEST_DFLT_INT_HNDLR
#endif
//Vmode interrupts
#ifndef RVMODEL_SET_VSW_INT
//.warning "RVMODEL_SET_VSW_INT not defined. Executing this will end test. Define an empty macro to suppress this warning"
#define RVMODEL_SET_VSW_INT RVTEST_DFLT_INT_HNDLR
#endif
#ifndef RVMODEL_CLR_VSW_INT
//.warning "RVMODEL_CLR_VSW_INT not defined. Executing this will end test. Define an empty macro to suppress this warning"
#define RVMODEL_CLR_VSW_INT RVTEST_DFLT_INT_HNDLR
#endif
#ifndef RVMODEL_CLR_VTIMER_INT
//.warning "RVMODEL_CLR_VTIMER_INT not defined. Executing this will end test. Define an empty macro to suppress this warning"
#define RVMODEL_CLR_VTIMER_INT RVTEST_DFLT_INT_HNDLR
#endif
#ifndef RVMODEL_CLR_VEXT_INT
//.warning "RVMODEL_CLR_VEXT_INT not defined. Executing this will end test. Define an empty macro to suppress this warning"
#define RVMODEL_CLR_VEXT_INT RVTEST_DFLT_INT_HNDLR
#endif
//==============================================================================
// This section defines macros used by these required macros:
// RVTEST_TRAP_PROLOG, RVTEST_TRAP_HANDLER, RVTEST_TRAP_EPILOG
// These are macros instead of inline because they need to be replicated per mode
// These are passed the privmode as an argument to properly rename labels
// The helper INSTANTIATE_MODE_MACRO actually handles the replication
//==============================================================================
.macro RVTEST_TRAP_PROLOG __MODE__
.option push
.option norvc
/******************************************************************************/
/**** this is a mode-configured version of the prolog, which either saves and */
/**** replaces xtvec, or saves and replaces the code located at xtvec if it */
/**** it xtvec isn't arbitrarily writable. If not writable, restore & exit */
/******************************************************************************/
/******************************************************************************/
/**** Prolog, to be run before any tests ****/
/**** #include 1 copy of this per mode in rvmodel_boot code? ****/
/**** ------------------------------------------------------------------- ****/
/**** if xTVEC isn't completely RW, then we need to change the code at its ****/
/**** target. The entire trap trampoline and mtrap handler replaces the ****/
/**** area pointed to by mtvec, after saving its original contents first. ****/
/**** If it isn't possible to fully write that area, restore and fail. ****/
/******************************************************************************/
// RVTEST_TRAP_PROLOG trap_handler_prolog; enter with T1..T6 available; define specific handler
// sp will immediately point to the current mode's save area and must not be touched
//NOTE: this is run in M-mode, so can't use aliased S,V CSR names
.global \__MODE__\()trampoline
//.global mtrap_sigptr
XCSR_VRENAME \__MODE__ //retarget XCSR names to this modes CSRs, separate V/S copies
LA( T1, \__MODE__\()tramptbl_sv) // get ptr to save area (will be stored in xSCRATCH)
//----------------------------------------------------------------------
init_\__MODE__\()scratch:
csrrw T3, CSR_XSCRATCH, T1 // swap xscratch with save area ptr (will be used by handler)
SREG T3, xscr_save_off(T1) // save old mscratch in xscratch_save
//----------------------------------------------------------------------
init_\__MODE__\()edeleg:
li T2, 0 // save and clear edeleg so we can exit to Mmode
.ifc \__MODE__ , V
csrrw T2, CSR_VEDELEG, T2 // special case: VS EDELEG available from Vmode
.else
.ifc \__MODE__ , M
#ifdef rvtest_strap_routine
csrrw T2, CSR_XEDELEG, T2 // this handles M mode save, but only if Smode exists
#endif
.else
//FIXME: if N-extension or anything like it is implemented, uncomment the following
// csrrw T2, CSR_XEDELEG, T2 // this handles S mode
.endif