@@ -71,6 +71,200 @@ static bool at_s1e1p_fast(struct kvm_vcpu *vcpu, u32 op, u64 vaddr)
7171 return fail ;
7272}
7373
74+ #define MEMATTR (ic , oc ) (MEMATTR_##oc << 4 | MEMATTR_##ic)
75+ #define MEMATTR_NC 0b0100
76+ #define MEMATTR_Wt 0b1000
77+ #define MEMATTR_Wb 0b1100
78+ #define MEMATTR_WbRaWa 0b1111
79+
80+ #define MEMATTR_IS_DEVICE (m ) (((m) & GENMASK(7, 4)) == 0)
81+
82+ static u8 s2_memattr_to_attr (u8 memattr )
83+ {
84+ memattr &= 0b1111 ;
85+
86+ switch (memattr ) {
87+ case 0b0000 :
88+ case 0b0001 :
89+ case 0b0010 :
90+ case 0b0011 :
91+ return memattr << 2 ;
92+ case 0b0100 :
93+ return MEMATTR (Wb , Wb );
94+ case 0b0101 :
95+ return MEMATTR (NC , NC );
96+ case 0b0110 :
97+ return MEMATTR (Wt , NC );
98+ case 0b0111 :
99+ return MEMATTR (Wb , NC );
100+ case 0b1000 :
101+ /* Reserved, assume NC */
102+ return MEMATTR (NC , NC );
103+ case 0b1001 :
104+ return MEMATTR (NC , Wt );
105+ case 0b1010 :
106+ return MEMATTR (Wt , Wt );
107+ case 0b1011 :
108+ return MEMATTR (Wb , Wt );
109+ case 0b1100 :
110+ /* Reserved, assume NC */
111+ return MEMATTR (NC , NC );
112+ case 0b1101 :
113+ return MEMATTR (NC , Wb );
114+ case 0b1110 :
115+ return MEMATTR (Wt , Wb );
116+ case 0b1111 :
117+ return MEMATTR (Wb , Wb );
118+ default :
119+ unreachable ();
120+ }
121+ }
122+
123+ static u8 combine_s1_s2_attr (u8 s1 , u8 s2 )
124+ {
125+ bool transient ;
126+ u8 final = 0 ;
127+
128+ /* Upgrade transient s1 to non-transient to simplify things */
129+ switch (s1 ) {
130+ case 0b0001 ... 0b0011 : /* Normal, Write-Through Transient */
131+ transient = true;
132+ s1 = MEMATTR_Wt | (s1 & GENMASK (1 ,0 ));
133+ break ;
134+ case 0b0101 ... 0b0111 : /* Normal, Write-Back Transient */
135+ transient = true;
136+ s1 = MEMATTR_Wb | (s1 & GENMASK (1 ,0 ));
137+ break ;
138+ default :
139+ transient = false;
140+ }
141+
142+ /* S2CombineS1AttrHints() */
143+ if ((s1 & GENMASK (3 , 2 )) == MEMATTR_NC ||
144+ (s2 & GENMASK (3 , 2 )) == MEMATTR_NC )
145+ final = MEMATTR_NC ;
146+ else if ((s1 & GENMASK (3 , 2 )) == MEMATTR_Wt ||
147+ (s2 & GENMASK (3 , 2 )) == MEMATTR_Wt )
148+ final = MEMATTR_Wt ;
149+ else
150+ final = MEMATTR_Wb ;
151+
152+ if (final != MEMATTR_NC ) {
153+ /* Inherit RaWa hints form S1 */
154+ if (transient ) {
155+ switch (s1 & GENMASK (3 , 2 )) {
156+ case MEMATTR_Wt :
157+ final = 0 ;
158+ break ;
159+ case MEMATTR_Wb :
160+ final = MEMATTR_NC ;
161+ break ;
162+ }
163+ }
164+
165+ final |= s1 & GENMASK (1 , 0 );
166+ }
167+
168+ return final ;
169+ }
170+
171+ #define ATTR_NSH 0b00
172+ #define ATTR_RSV 0b01
173+ #define ATTR_OSH 0b10
174+ #define ATTR_ISH 0b11
175+
176+ static u8 compute_sh (u8 attr , u64 desc )
177+ {
178+ u8 sh ;
179+
180+ /* Any form of device, as well as NC has SH[1:0]=0b10 */
181+ if (MEMATTR_IS_DEVICE (attr ) || attr == MEMATTR (NC , NC ))
182+ return ATTR_OSH ;
183+
184+ sh = FIELD_GET (PTE_SHARED , desc );
185+ if (sh == ATTR_RSV ) /* Reserved, mapped to NSH */
186+ sh = ATTR_NSH ;
187+
188+ return sh ;
189+ }
190+
191+ static u64 compute_par_s12 (struct kvm_vcpu * vcpu , u64 s1_par ,
192+ struct kvm_s2_trans * tr )
193+ {
194+ u8 s1_parattr , s2_memattr , final_attr ;
195+ u64 par ;
196+
197+ /* If S2 has failed to translate, report the damage */
198+ if (tr -> esr ) {
199+ par = SYS_PAR_EL1_RES1 ;
200+ par |= SYS_PAR_EL1_F ;
201+ par |= SYS_PAR_EL1_S ;
202+ par |= FIELD_PREP (SYS_PAR_EL1_FST , tr -> esr );
203+ return par ;
204+ }
205+
206+ s1_parattr = FIELD_GET (SYS_PAR_EL1_ATTR , s1_par );
207+ s2_memattr = FIELD_GET (GENMASK (5 , 2 ), tr -> desc );
208+
209+ if (__vcpu_sys_reg (vcpu , HCR_EL2 ) & HCR_FWB ) {
210+ if (!kvm_has_feat (vcpu -> kvm , ID_AA64PFR2_EL1 , MTEPERM , IMP ))
211+ s2_memattr &= ~BIT (3 );
212+
213+ /* Combination of R_VRJSW and R_RHWZM */
214+ switch (s2_memattr ) {
215+ case 0b0101 :
216+ if (MEMATTR_IS_DEVICE (s1_parattr ))
217+ final_attr = s1_parattr ;
218+ else
219+ final_attr = MEMATTR (NC , NC );
220+ break ;
221+ case 0b0110 :
222+ case 0b1110 :
223+ final_attr = MEMATTR (WbRaWa , WbRaWa );
224+ break ;
225+ case 0b0111 :
226+ case 0b1111 :
227+ /* Preserve S1 attribute */
228+ final_attr = s1_parattr ;
229+ break ;
230+ case 0b0100 :
231+ case 0b1100 :
232+ case 0b1101 :
233+ /* Reserved, do something non-silly */
234+ final_attr = s1_parattr ;
235+ break ;
236+ default :
237+ /* MemAttr[2]=0, Device from S2 */
238+ final_attr = s2_memattr & GENMASK (1 ,0 ) << 2 ;
239+ }
240+ } else {
241+ /* Combination of R_HMNDG, R_TNHFM and R_GQFSF */
242+ u8 s2_parattr = s2_memattr_to_attr (s2_memattr );
243+
244+ if (MEMATTR_IS_DEVICE (s1_parattr ) ||
245+ MEMATTR_IS_DEVICE (s2_parattr )) {
246+ final_attr = min (s1_parattr , s2_parattr );
247+ } else {
248+ /* At this stage, this is memory vs memory */
249+ final_attr = combine_s1_s2_attr (s1_parattr & 0xf ,
250+ s2_parattr & 0xf );
251+ final_attr |= combine_s1_s2_attr (s1_parattr >> 4 ,
252+ s2_parattr >> 4 ) << 4 ;
253+ }
254+ }
255+
256+ if ((__vcpu_sys_reg (vcpu , HCR_EL2 ) & HCR_CD ) &&
257+ !MEMATTR_IS_DEVICE (final_attr ))
258+ final_attr = MEMATTR (NC , NC );
259+
260+ par = FIELD_PREP (SYS_PAR_EL1_ATTR , final_attr );
261+ par |= tr -> output & GENMASK (47 , 12 );
262+ par |= FIELD_PREP (SYS_PAR_EL1_SH ,
263+ compute_sh (final_attr , tr -> desc ));
264+
265+ return par ;
266+ }
267+
74268/*
75269 * Return the PAR_EL1 value as the result of a valid translation.
76270 *
@@ -215,3 +409,62 @@ void __kvm_at_s1e2(struct kvm_vcpu *vcpu, u32 op, u64 vaddr)
215409
216410 vcpu_write_sys_reg (vcpu , par , PAR_EL1 );
217411}
412+
413+ void __kvm_at_s12 (struct kvm_vcpu * vcpu , u32 op , u64 vaddr )
414+ {
415+ struct kvm_s2_trans out = {};
416+ u64 ipa , par ;
417+ bool write ;
418+ int ret ;
419+
420+ /* Do the stage-1 translation */
421+ switch (op ) {
422+ case OP_AT_S12E1R :
423+ op = OP_AT_S1E1R ;
424+ write = false;
425+ break ;
426+ case OP_AT_S12E1W :
427+ op = OP_AT_S1E1W ;
428+ write = true;
429+ break ;
430+ case OP_AT_S12E0R :
431+ op = OP_AT_S1E0R ;
432+ write = false;
433+ break ;
434+ case OP_AT_S12E0W :
435+ op = OP_AT_S1E0W ;
436+ write = true;
437+ break ;
438+ default :
439+ WARN_ON_ONCE (1 );
440+ return ;
441+ }
442+
443+ __kvm_at_s1e01 (vcpu , op , vaddr );
444+ par = vcpu_read_sys_reg (vcpu , PAR_EL1 );
445+ if (par & SYS_PAR_EL1_F )
446+ return ;
447+
448+ /*
449+ * If we only have a single stage of translation (E2H=0 or
450+ * TGE=1), exit early. Same thing if {VM,DC}=={0,0}.
451+ */
452+ if (!vcpu_el2_e2h_is_set (vcpu ) || vcpu_el2_tge_is_set (vcpu ) ||
453+ !(vcpu_read_sys_reg (vcpu , HCR_EL2 ) & (HCR_VM | HCR_DC )))
454+ return ;
455+
456+ /* Do the stage-2 translation */
457+ ipa = (par & GENMASK_ULL (47 , 12 )) | (vaddr & GENMASK_ULL (11 , 0 ));
458+ out .esr = 0 ;
459+ ret = kvm_walk_nested_s2 (vcpu , ipa , & out );
460+ if (ret < 0 )
461+ return ;
462+
463+ /* Check the access permission */
464+ if (!out .esr &&
465+ ((!write && !out .readable ) || (write && !out .writable )))
466+ out .esr = ESR_ELx_FSC_PERM | (out .level & 0x3 );
467+
468+ par = compute_par_s12 (vcpu , par , & out );
469+ vcpu_write_sys_reg (vcpu , par , PAR_EL1 );
470+ }
0 commit comments