@@ -65,117 +65,13 @@ copy_to_user_mcsafe(void *to, const void *from, unsigned len)
6565static __always_inline __must_check unsigned long
6666raw_copy_from_user (void * dst , const void __user * src , unsigned long size )
6767{
68- int ret = 0 ;
69-
70- if (!__builtin_constant_p (size ))
71- return copy_user_generic (dst , (__force void * )src , size );
72- switch (size ) {
73- case 1 :
74- __uaccess_begin_nospec ();
75- __get_user_asm_nozero (* (u8 * )dst , (u8 __user * )src ,
76- ret , "b" , "b" , "=q" , 1 );
77- __uaccess_end ();
78- return ret ;
79- case 2 :
80- __uaccess_begin_nospec ();
81- __get_user_asm_nozero (* (u16 * )dst , (u16 __user * )src ,
82- ret , "w" , "w" , "=r" , 2 );
83- __uaccess_end ();
84- return ret ;
85- case 4 :
86- __uaccess_begin_nospec ();
87- __get_user_asm_nozero (* (u32 * )dst , (u32 __user * )src ,
88- ret , "l" , "k" , "=r" , 4 );
89- __uaccess_end ();
90- return ret ;
91- case 8 :
92- __uaccess_begin_nospec ();
93- __get_user_asm_nozero (* (u64 * )dst , (u64 __user * )src ,
94- ret , "q" , "" , "=r" , 8 );
95- __uaccess_end ();
96- return ret ;
97- case 10 :
98- __uaccess_begin_nospec ();
99- __get_user_asm_nozero (* (u64 * )dst , (u64 __user * )src ,
100- ret , "q" , "" , "=r" , 10 );
101- if (likely (!ret ))
102- __get_user_asm_nozero (* (u16 * )(8 + (char * )dst ),
103- (u16 __user * )(8 + (char __user * )src ),
104- ret , "w" , "w" , "=r" , 2 );
105- __uaccess_end ();
106- return ret ;
107- case 16 :
108- __uaccess_begin_nospec ();
109- __get_user_asm_nozero (* (u64 * )dst , (u64 __user * )src ,
110- ret , "q" , "" , "=r" , 16 );
111- if (likely (!ret ))
112- __get_user_asm_nozero (* (u64 * )(8 + (char * )dst ),
113- (u64 __user * )(8 + (char __user * )src ),
114- ret , "q" , "" , "=r" , 8 );
115- __uaccess_end ();
116- return ret ;
117- default :
118- return copy_user_generic (dst , (__force void * )src , size );
119- }
68+ return copy_user_generic (dst , (__force void * )src , size );
12069}
12170
12271static __always_inline __must_check unsigned long
12372raw_copy_to_user (void __user * dst , const void * src , unsigned long size )
12473{
125- int ret = 0 ;
126-
127- if (!__builtin_constant_p (size ))
128- return copy_user_generic ((__force void * )dst , src , size );
129- switch (size ) {
130- case 1 :
131- __uaccess_begin ();
132- __put_user_asm (* (u8 * )src , (u8 __user * )dst ,
133- ret , "b" , "b" , "iq" , 1 );
134- __uaccess_end ();
135- return ret ;
136- case 2 :
137- __uaccess_begin ();
138- __put_user_asm (* (u16 * )src , (u16 __user * )dst ,
139- ret , "w" , "w" , "ir" , 2 );
140- __uaccess_end ();
141- return ret ;
142- case 4 :
143- __uaccess_begin ();
144- __put_user_asm (* (u32 * )src , (u32 __user * )dst ,
145- ret , "l" , "k" , "ir" , 4 );
146- __uaccess_end ();
147- return ret ;
148- case 8 :
149- __uaccess_begin ();
150- __put_user_asm (* (u64 * )src , (u64 __user * )dst ,
151- ret , "q" , "" , "er" , 8 );
152- __uaccess_end ();
153- return ret ;
154- case 10 :
155- __uaccess_begin ();
156- __put_user_asm (* (u64 * )src , (u64 __user * )dst ,
157- ret , "q" , "" , "er" , 10 );
158- if (likely (!ret )) {
159- asm("" :::"memory" );
160- __put_user_asm (4 [(u16 * )src ], 4 + (u16 __user * )dst ,
161- ret , "w" , "w" , "ir" , 2 );
162- }
163- __uaccess_end ();
164- return ret ;
165- case 16 :
166- __uaccess_begin ();
167- __put_user_asm (* (u64 * )src , (u64 __user * )dst ,
168- ret , "q" , "" , "er" , 16 );
169- if (likely (!ret )) {
170- asm("" :::"memory" );
171- __put_user_asm (1 [(u64 * )src ], 1 + (u64 __user * )dst ,
172- ret , "q" , "" , "er" , 8 );
173- }
174- __uaccess_end ();
175- return ret ;
176- default :
177- return copy_user_generic ((__force void * )dst , src , size );
178- }
74+ return copy_user_generic ((__force void * )dst , src , size );
17975}
18076
18177static __always_inline __must_check
0 commit comments