3 files changed +93
-108
lines changed Original file line number Diff line number Diff line change 40
40
lib-y += csum-partial_64.o csum-copy_64.o csum-wrappers_64.o
41
41
lib-y += clear_page_64.o copy_page_64.o
42
42
lib-y += memmove_64.o memset_64.o
43
- lib-y += copy_user_64.o copy_user_nocache_64.o
43
+ lib-y += copy_user_64.o
44
44
lib-y += cmpxchg16b_emu.o
45
45
endif
Original file line number Diff line number Diff line change @@ -242,3 +242,95 @@ ENTRY(copy_user_enhanced_fast_string)
242
242
_ASM_EXTABLE(1b,12b)
243
243
CFI_ENDPROC
244
244
ENDPROC(copy_user_enhanced_fast_string)
245
+
246
+ /*
247
+ * copy_user_nocache - Uncached memory copy with exception handling
248
+ * This will force destination/source out of cache for more performance.
249
+ */
250
+ ENTRY(__copy_user_nocache)
251
+ CFI_STARTPROC
252
+ ASM_STAC
253
+ cmpl $8 ,%edx
254
+ jb 20f /* less then 8 bytes, go to byte copy loop */
255
+ ALIGN_DESTINATION
256
+ movl %edx ,%ecx
257
+ andl $63 ,%edx
258
+ shrl $6 ,%ecx
259
+ jz 17f
260
+ 1: movq (%rsi ),%r8
261
+ 2: movq 1*8 (%rsi ),%r9
262
+ 3: movq 2*8 (%rsi ),%r10
263
+ 4: movq 3*8 (%rsi ),%r11
264
+ 5: movnti %r8 ,(%rdi )
265
+ 6: movnti %r9 ,1*8 (%rdi )
266
+ 7: movnti %r10 ,2*8 (%rdi )
267
+ 8: movnti %r11 ,3*8 (%rdi )
268
+ 9: movq 4*8 (%rsi ),%r8
269
+ 10: movq 5*8 (%rsi ),%r9
270
+ 11: movq 6*8 (%rsi ),%r10
271
+ 12: movq 7*8 (%rsi ),%r11
272
+ 13: movnti %r8 ,4*8 (%rdi )
273
+ 14: movnti %r9 ,5*8 (%rdi )
274
+ 15: movnti %r10 ,6*8 (%rdi )
275
+ 16: movnti %r11 ,7*8 (%rdi )
276
+ leaq 64 (%rsi ),%rsi
277
+ leaq 64 (%rdi ),%rdi
278
+ decl %ecx
279
+ jnz 1b
280
+ 17: movl %edx ,%ecx
281
+ andl $7 ,%edx
282
+ shrl $3 ,%ecx
283
+ jz 20f
284
+ 18: movq (%rsi ),%r8
285
+ 19: movnti %r8 ,(%rdi )
286
+ leaq 8 (%rsi ),%rsi
287
+ leaq 8 (%rdi ),%rdi
288
+ decl %ecx
289
+ jnz 18b
290
+ 20: andl %edx ,%edx
291
+ jz 23f
292
+ movl %edx ,%ecx
293
+ 21: movb (%rsi ),%al
294
+ 22: movb %al ,(%rdi )
295
+ incq %rsi
296
+ incq %rdi
297
+ decl %ecx
298
+ jnz 21b
299
+ 23: xorl %eax ,%eax
300
+ ASM_CLAC
301
+ sfence
302
+ ret
303
+
304
+ .section .fixup,"ax"
305
+ 30: shll $6 ,%ecx
306
+ addl %ecx ,%edx
307
+ jmp 60f
308
+ 40: lea (%rdx ,%rcx ,8 ),%rdx
309
+ jmp 60f
310
+ 50: movl %ecx ,%edx
311
+ 60: sfence
312
+ jmp copy_user_handle_tail
313
+ .previous
314
+
315
+ _ASM_EXTABLE(1b,30b)
316
+ _ASM_EXTABLE(2b,30b)
317
+ _ASM_EXTABLE(3b,30b)
318
+ _ASM_EXTABLE(4b,30b)
319
+ _ASM_EXTABLE(5b,30b)
320
+ _ASM_EXTABLE(6b,30b)
321
+ _ASM_EXTABLE(7b,30b)
322
+ _ASM_EXTABLE(8b,30b)
323
+ _ASM_EXTABLE(9b,30b)
324
+ _ASM_EXTABLE(10b,30b)
325
+ _ASM_EXTABLE(11b,30b)
326
+ _ASM_EXTABLE(12b,30b)
327
+ _ASM_EXTABLE(13b,30b)
328
+ _ASM_EXTABLE(14b,30b)
329
+ _ASM_EXTABLE(15b,30b)
330
+ _ASM_EXTABLE(16b,30b)
331
+ _ASM_EXTABLE(18b,40b)
332
+ _ASM_EXTABLE(19b,40b)
333
+ _ASM_EXTABLE(21b,50b)
334
+ _ASM_EXTABLE(22b,50b)
335
+ CFI_ENDPROC
336
+ ENDPROC(__copy_user_nocache)
Load Diff This file was deleted.
0 commit comments