ppc32: use L1_CACHE_SHIFT/L1_CACHE_BYTES

instead of L1_CACHE_LINE_SIZE and LG_L1_CACHE_LINE_SIZE

Signed-off-by: Stephen Rothwell <sfr@canb.auug.org.au>
This commit is contained in:
Stephen Rothwell 2005-10-17 11:50:32 +10:00
parent cf76485562
commit 7dffb72028
14 changed files with 100 additions and 103 deletions

View file

@ -837,7 +837,7 @@ relocate_kernel:
copy_and_flush: copy_and_flush:
addi r5,r5,-4 addi r5,r5,-4
addi r6,r6,-4 addi r6,r6,-4
4: li r0,L1_CACHE_LINE_SIZE/4 4: li r0,L1_CACHE_BYTES/4
mtctr r0 mtctr r0
3: addi r6,r6,4 /* copy a cache line */ 3: addi r6,r6,4 /* copy a cache line */
lwzx r0,r6,r4 lwzx r0,r6,r4

View file

@ -496,21 +496,21 @@ _GLOBAL(flush_icache_range)
BEGIN_FTR_SECTION BEGIN_FTR_SECTION
blr /* for 601, do nothing */ blr /* for 601, do nothing */
END_FTR_SECTION_IFCLR(CPU_FTR_SPLIT_ID_CACHE) END_FTR_SECTION_IFCLR(CPU_FTR_SPLIT_ID_CACHE)
li r5,L1_CACHE_LINE_SIZE-1 li r5,L1_CACHE_BYTES-1
andc r3,r3,r5 andc r3,r3,r5
subf r4,r3,r4 subf r4,r3,r4
add r4,r4,r5 add r4,r4,r5
srwi. r4,r4,LG_L1_CACHE_LINE_SIZE srwi. r4,r4,L1_CACHE_SHIFT
beqlr beqlr
mtctr r4 mtctr r4
mr r6,r3 mr r6,r3
1: dcbst 0,r3 1: dcbst 0,r3
addi r3,r3,L1_CACHE_LINE_SIZE addi r3,r3,L1_CACHE_BYTES
bdnz 1b bdnz 1b
sync /* wait for dcbst's to get to ram */ sync /* wait for dcbst's to get to ram */
mtctr r4 mtctr r4
2: icbi 0,r6 2: icbi 0,r6
addi r6,r6,L1_CACHE_LINE_SIZE addi r6,r6,L1_CACHE_BYTES
bdnz 2b bdnz 2b
sync /* additional sync needed on g4 */ sync /* additional sync needed on g4 */
isync isync
@ -523,16 +523,16 @@ END_FTR_SECTION_IFCLR(CPU_FTR_SPLIT_ID_CACHE)
* clean_dcache_range(unsigned long start, unsigned long stop) * clean_dcache_range(unsigned long start, unsigned long stop)
*/ */
_GLOBAL(clean_dcache_range) _GLOBAL(clean_dcache_range)
li r5,L1_CACHE_LINE_SIZE-1 li r5,L1_CACHE_BYTES-1
andc r3,r3,r5 andc r3,r3,r5
subf r4,r3,r4 subf r4,r3,r4
add r4,r4,r5 add r4,r4,r5
srwi. r4,r4,LG_L1_CACHE_LINE_SIZE srwi. r4,r4,L1_CACHE_SHIFT
beqlr beqlr
mtctr r4 mtctr r4
1: dcbst 0,r3 1: dcbst 0,r3
addi r3,r3,L1_CACHE_LINE_SIZE addi r3,r3,L1_CACHE_BYTES
bdnz 1b bdnz 1b
sync /* wait for dcbst's to get to ram */ sync /* wait for dcbst's to get to ram */
blr blr
@ -544,16 +544,16 @@ _GLOBAL(clean_dcache_range)
* flush_dcache_range(unsigned long start, unsigned long stop) * flush_dcache_range(unsigned long start, unsigned long stop)
*/ */
_GLOBAL(flush_dcache_range) _GLOBAL(flush_dcache_range)
li r5,L1_CACHE_LINE_SIZE-1 li r5,L1_CACHE_BYTES-1
andc r3,r3,r5 andc r3,r3,r5
subf r4,r3,r4 subf r4,r3,r4
add r4,r4,r5 add r4,r4,r5
srwi. r4,r4,LG_L1_CACHE_LINE_SIZE srwi. r4,r4,L1_CACHE_SHIFT
beqlr beqlr
mtctr r4 mtctr r4
1: dcbf 0,r3 1: dcbf 0,r3
addi r3,r3,L1_CACHE_LINE_SIZE addi r3,r3,L1_CACHE_BYTES
bdnz 1b bdnz 1b
sync /* wait for dcbst's to get to ram */ sync /* wait for dcbst's to get to ram */
blr blr
@ -566,16 +566,16 @@ _GLOBAL(flush_dcache_range)
* invalidate_dcache_range(unsigned long start, unsigned long stop) * invalidate_dcache_range(unsigned long start, unsigned long stop)
*/ */
_GLOBAL(invalidate_dcache_range) _GLOBAL(invalidate_dcache_range)
li r5,L1_CACHE_LINE_SIZE-1 li r5,L1_CACHE_BYTES-1
andc r3,r3,r5 andc r3,r3,r5
subf r4,r3,r4 subf r4,r3,r4
add r4,r4,r5 add r4,r4,r5
srwi. r4,r4,LG_L1_CACHE_LINE_SIZE srwi. r4,r4,L1_CACHE_SHIFT
beqlr beqlr
mtctr r4 mtctr r4
1: dcbi 0,r3 1: dcbi 0,r3
addi r3,r3,L1_CACHE_LINE_SIZE addi r3,r3,L1_CACHE_BYTES
bdnz 1b bdnz 1b
sync /* wait for dcbi's to get to ram */ sync /* wait for dcbi's to get to ram */
blr blr
@ -596,7 +596,7 @@ _GLOBAL(flush_dcache_all)
mtctr r4 mtctr r4
lis r5, KERNELBASE@h lis r5, KERNELBASE@h
1: lwz r3, 0(r5) /* Load one word from every line */ 1: lwz r3, 0(r5) /* Load one word from every line */
addi r5, r5, L1_CACHE_LINE_SIZE addi r5, r5, L1_CACHE_BYTES
bdnz 1b bdnz 1b
blr blr
#endif /* CONFIG_NOT_COHERENT_CACHE */ #endif /* CONFIG_NOT_COHERENT_CACHE */
@ -614,16 +614,16 @@ BEGIN_FTR_SECTION
blr /* for 601, do nothing */ blr /* for 601, do nothing */
END_FTR_SECTION_IFCLR(CPU_FTR_SPLIT_ID_CACHE) END_FTR_SECTION_IFCLR(CPU_FTR_SPLIT_ID_CACHE)
rlwinm r3,r3,0,0,19 /* Get page base address */ rlwinm r3,r3,0,0,19 /* Get page base address */
li r4,4096/L1_CACHE_LINE_SIZE /* Number of lines in a page */ li r4,4096/L1_CACHE_BYTES /* Number of lines in a page */
mtctr r4 mtctr r4
mr r6,r3 mr r6,r3
0: dcbst 0,r3 /* Write line to ram */ 0: dcbst 0,r3 /* Write line to ram */
addi r3,r3,L1_CACHE_LINE_SIZE addi r3,r3,L1_CACHE_BYTES
bdnz 0b bdnz 0b
sync sync
mtctr r4 mtctr r4
1: icbi 0,r6 1: icbi 0,r6
addi r6,r6,L1_CACHE_LINE_SIZE addi r6,r6,L1_CACHE_BYTES
bdnz 1b bdnz 1b
sync sync
isync isync
@ -646,16 +646,16 @@ END_FTR_SECTION_IFCLR(CPU_FTR_SPLIT_ID_CACHE)
mtmsr r0 mtmsr r0
isync isync
rlwinm r3,r3,0,0,19 /* Get page base address */ rlwinm r3,r3,0,0,19 /* Get page base address */
li r4,4096/L1_CACHE_LINE_SIZE /* Number of lines in a page */ li r4,4096/L1_CACHE_BYTES /* Number of lines in a page */
mtctr r4 mtctr r4
mr r6,r3 mr r6,r3
0: dcbst 0,r3 /* Write line to ram */ 0: dcbst 0,r3 /* Write line to ram */
addi r3,r3,L1_CACHE_LINE_SIZE addi r3,r3,L1_CACHE_BYTES
bdnz 0b bdnz 0b
sync sync
mtctr r4 mtctr r4
1: icbi 0,r6 1: icbi 0,r6
addi r6,r6,L1_CACHE_LINE_SIZE addi r6,r6,L1_CACHE_BYTES
bdnz 1b bdnz 1b
sync sync
mtmsr r10 /* restore DR */ mtmsr r10 /* restore DR */
@ -670,7 +670,7 @@ END_FTR_SECTION_IFCLR(CPU_FTR_SPLIT_ID_CACHE)
* void clear_pages(void *page, int order) ; * void clear_pages(void *page, int order) ;
*/ */
_GLOBAL(clear_pages) _GLOBAL(clear_pages)
li r0,4096/L1_CACHE_LINE_SIZE li r0,4096/L1_CACHE_BYTES
slw r0,r0,r4 slw r0,r0,r4
mtctr r0 mtctr r0
#ifdef CONFIG_8xx #ifdef CONFIG_8xx
@ -682,7 +682,7 @@ _GLOBAL(clear_pages)
#else #else
1: dcbz 0,r3 1: dcbz 0,r3
#endif #endif
addi r3,r3,L1_CACHE_LINE_SIZE addi r3,r3,L1_CACHE_BYTES
bdnz 1b bdnz 1b
blr blr
@ -708,7 +708,7 @@ _GLOBAL(copy_page)
#ifdef CONFIG_8xx #ifdef CONFIG_8xx
/* don't use prefetch on 8xx */ /* don't use prefetch on 8xx */
li r0,4096/L1_CACHE_LINE_SIZE li r0,4096/L1_CACHE_BYTES
mtctr r0 mtctr r0
1: COPY_16_BYTES 1: COPY_16_BYTES
bdnz 1b bdnz 1b
@ -722,13 +722,13 @@ _GLOBAL(copy_page)
li r11,4 li r11,4
mtctr r0 mtctr r0
11: dcbt r11,r4 11: dcbt r11,r4
addi r11,r11,L1_CACHE_LINE_SIZE addi r11,r11,L1_CACHE_BYTES
bdnz 11b bdnz 11b
#else /* MAX_COPY_PREFETCH == 1 */ #else /* MAX_COPY_PREFETCH == 1 */
dcbt r5,r4 dcbt r5,r4
li r11,L1_CACHE_LINE_SIZE+4 li r11,L1_CACHE_BYTES+4
#endif /* MAX_COPY_PREFETCH */ #endif /* MAX_COPY_PREFETCH */
li r0,4096/L1_CACHE_LINE_SIZE - MAX_COPY_PREFETCH li r0,4096/L1_CACHE_BYTES - MAX_COPY_PREFETCH
crclr 4*cr0+eq crclr 4*cr0+eq
2: 2:
mtctr r0 mtctr r0
@ -736,12 +736,12 @@ _GLOBAL(copy_page)
dcbt r11,r4 dcbt r11,r4
dcbz r5,r3 dcbz r5,r3
COPY_16_BYTES COPY_16_BYTES
#if L1_CACHE_LINE_SIZE >= 32 #if L1_CACHE_BYTES >= 32
COPY_16_BYTES COPY_16_BYTES
#if L1_CACHE_LINE_SIZE >= 64 #if L1_CACHE_BYTES >= 64
COPY_16_BYTES COPY_16_BYTES
COPY_16_BYTES COPY_16_BYTES
#if L1_CACHE_LINE_SIZE >= 128 #if L1_CACHE_BYTES >= 128
COPY_16_BYTES COPY_16_BYTES
COPY_16_BYTES COPY_16_BYTES
COPY_16_BYTES COPY_16_BYTES

View file

@ -66,9 +66,9 @@
.stabs "copy32.S",N_SO,0,0,0f .stabs "copy32.S",N_SO,0,0,0f
0: 0:
CACHELINE_BYTES = L1_CACHE_LINE_SIZE CACHELINE_BYTES = L1_CACHE_BYTES
LG_CACHELINE_BYTES = LG_L1_CACHE_LINE_SIZE LG_CACHELINE_BYTES = L1_CACHE_SHIFT
CACHELINE_MASK = (L1_CACHE_LINE_SIZE-1) CACHELINE_MASK = (L1_CACHE_BYTES-1)
/* /*
* Use dcbz on the complete cache lines in the destination * Use dcbz on the complete cache lines in the destination
@ -205,12 +205,12 @@ _GLOBAL(cacheable_memcpy)
dcbz r11,r6 dcbz r11,r6
#endif #endif
COPY_16_BYTES COPY_16_BYTES
#if L1_CACHE_LINE_SIZE >= 32 #if L1_CACHE_BYTES >= 32
COPY_16_BYTES COPY_16_BYTES
#if L1_CACHE_LINE_SIZE >= 64 #if L1_CACHE_BYTES >= 64
COPY_16_BYTES COPY_16_BYTES
COPY_16_BYTES COPY_16_BYTES
#if L1_CACHE_LINE_SIZE >= 128 #if L1_CACHE_BYTES >= 128
COPY_16_BYTES COPY_16_BYTES
COPY_16_BYTES COPY_16_BYTES
COPY_16_BYTES COPY_16_BYTES
@ -399,12 +399,12 @@ _GLOBAL(__copy_tofrom_user)
.text .text
/* the main body of the cacheline loop */ /* the main body of the cacheline loop */
COPY_16_BYTES_WITHEX(0) COPY_16_BYTES_WITHEX(0)
#if L1_CACHE_LINE_SIZE >= 32 #if L1_CACHE_BYTES >= 32
COPY_16_BYTES_WITHEX(1) COPY_16_BYTES_WITHEX(1)
#if L1_CACHE_LINE_SIZE >= 64 #if L1_CACHE_BYTES >= 64
COPY_16_BYTES_WITHEX(2) COPY_16_BYTES_WITHEX(2)
COPY_16_BYTES_WITHEX(3) COPY_16_BYTES_WITHEX(3)
#if L1_CACHE_LINE_SIZE >= 128 #if L1_CACHE_BYTES >= 128
COPY_16_BYTES_WITHEX(4) COPY_16_BYTES_WITHEX(4)
COPY_16_BYTES_WITHEX(5) COPY_16_BYTES_WITHEX(5)
COPY_16_BYTES_WITHEX(6) COPY_16_BYTES_WITHEX(6)
@ -458,12 +458,12 @@ _GLOBAL(__copy_tofrom_user)
* 104f (if in read part) or 105f (if in write part), after updating r5 * 104f (if in read part) or 105f (if in write part), after updating r5
*/ */
COPY_16_BYTES_EXCODE(0) COPY_16_BYTES_EXCODE(0)
#if L1_CACHE_LINE_SIZE >= 32 #if L1_CACHE_BYTES >= 32
COPY_16_BYTES_EXCODE(1) COPY_16_BYTES_EXCODE(1)
#if L1_CACHE_LINE_SIZE >= 64 #if L1_CACHE_BYTES >= 64
COPY_16_BYTES_EXCODE(2) COPY_16_BYTES_EXCODE(2)
COPY_16_BYTES_EXCODE(3) COPY_16_BYTES_EXCODE(3)
#if L1_CACHE_LINE_SIZE >= 128 #if L1_CACHE_BYTES >= 128
COPY_16_BYTES_EXCODE(4) COPY_16_BYTES_EXCODE(4)
COPY_16_BYTES_EXCODE(5) COPY_16_BYTES_EXCODE(5)
COPY_16_BYTES_EXCODE(6) COPY_16_BYTES_EXCODE(6)

View file

@ -387,10 +387,10 @@ turn_on_mmu:
#endif /* defined(CONFIG_PM) || defined(CONFIG_CPU_FREQ) */ #endif /* defined(CONFIG_PM) || defined(CONFIG_CPU_FREQ) */
.section .data .section .data
.balign L1_CACHE_LINE_SIZE .balign L1_CACHE_BYTES
sleep_storage: sleep_storage:
.long 0 .long 0
.balign L1_CACHE_LINE_SIZE, 0 .balign L1_CACHE_BYTES, 0
#endif /* CONFIG_6xx */ #endif /* CONFIG_6xx */
.section .text .section .text

View file

@ -290,10 +290,10 @@ _GLOBAL(__init_fpu_registers)
#define CS_SIZE 32 #define CS_SIZE 32
.data .data
.balign L1_CACHE_LINE_SIZE .balign L1_CACHE_BYTES
cpu_state_storage: cpu_state_storage:
.space CS_SIZE .space CS_SIZE
.balign L1_CACHE_LINE_SIZE,0 .balign L1_CACHE_BYTES,0
.text .text
/* Called in normal context to backup CPU 0 state. This /* Called in normal context to backup CPU 0 state. This

View file

@ -86,10 +86,10 @@ _GLOBAL(__setup_cpu_ppc970)
#define CS_SIZE 32 #define CS_SIZE 32
.data .data
.balign L1_CACHE_LINE_SIZE .balign L1_CACHE_BYTES
cpu_state_storage: cpu_state_storage:
.space CS_SIZE .space CS_SIZE
.balign L1_CACHE_LINE_SIZE,0 .balign L1_CACHE_BYTES,0
.text .text
/* Called in normal context to backup CPU 0 state. This /* Called in normal context to backup CPU 0 state. This

View file

@ -916,7 +916,7 @@ relocate_kernel:
copy_and_flush: copy_and_flush:
addi r5,r5,-4 addi r5,r5,-4
addi r6,r6,-4 addi r6,r6,-4
4: li r0,L1_CACHE_LINE_SIZE/4 4: li r0,L1_CACHE_BYTES/4
mtctr r0 mtctr r0
3: addi r6,r6,4 /* copy a cache line */ 3: addi r6,r6,4 /* copy a cache line */
lwzx r0,r6,r4 lwzx r0,r6,r4

View file

@ -203,7 +203,7 @@ END_FTR_SECTION_IFSET(CPU_FTR_SPEC7450)
* L1 icache * L1 icache
*/ */
b 20f b 20f
.balign L1_CACHE_LINE_SIZE .balign L1_CACHE_BYTES
22: 22:
sync sync
mtspr SPRN_L2CR,r3 mtspr SPRN_L2CR,r3

View file

@ -498,21 +498,21 @@ _GLOBAL(flush_icache_range)
BEGIN_FTR_SECTION BEGIN_FTR_SECTION
blr /* for 601, do nothing */ blr /* for 601, do nothing */
END_FTR_SECTION_IFCLR(CPU_FTR_SPLIT_ID_CACHE) END_FTR_SECTION_IFCLR(CPU_FTR_SPLIT_ID_CACHE)
li r5,L1_CACHE_LINE_SIZE-1 li r5,L1_CACHE_BYTES-1
andc r3,r3,r5 andc r3,r3,r5
subf r4,r3,r4 subf r4,r3,r4
add r4,r4,r5 add r4,r4,r5
srwi. r4,r4,LG_L1_CACHE_LINE_SIZE srwi. r4,r4,L1_CACHE_SHIFT
beqlr beqlr
mtctr r4 mtctr r4
mr r6,r3 mr r6,r3
1: dcbst 0,r3 1: dcbst 0,r3
addi r3,r3,L1_CACHE_LINE_SIZE addi r3,r3,L1_CACHE_BYTES
bdnz 1b bdnz 1b
sync /* wait for dcbst's to get to ram */ sync /* wait for dcbst's to get to ram */
mtctr r4 mtctr r4
2: icbi 0,r6 2: icbi 0,r6
addi r6,r6,L1_CACHE_LINE_SIZE addi r6,r6,L1_CACHE_BYTES
bdnz 2b bdnz 2b
sync /* additional sync needed on g4 */ sync /* additional sync needed on g4 */
isync isync
@ -525,16 +525,16 @@ END_FTR_SECTION_IFCLR(CPU_FTR_SPLIT_ID_CACHE)
* clean_dcache_range(unsigned long start, unsigned long stop) * clean_dcache_range(unsigned long start, unsigned long stop)
*/ */
_GLOBAL(clean_dcache_range) _GLOBAL(clean_dcache_range)
li r5,L1_CACHE_LINE_SIZE-1 li r5,L1_CACHE_BYTES-1
andc r3,r3,r5 andc r3,r3,r5
subf r4,r3,r4 subf r4,r3,r4
add r4,r4,r5 add r4,r4,r5
srwi. r4,r4,LG_L1_CACHE_LINE_SIZE srwi. r4,r4,L1_CACHE_SHIFT
beqlr beqlr
mtctr r4 mtctr r4
1: dcbst 0,r3 1: dcbst 0,r3
addi r3,r3,L1_CACHE_LINE_SIZE addi r3,r3,L1_CACHE_BYTES
bdnz 1b bdnz 1b
sync /* wait for dcbst's to get to ram */ sync /* wait for dcbst's to get to ram */
blr blr
@ -546,16 +546,16 @@ _GLOBAL(clean_dcache_range)
* flush_dcache_range(unsigned long start, unsigned long stop) * flush_dcache_range(unsigned long start, unsigned long stop)
*/ */
_GLOBAL(flush_dcache_range) _GLOBAL(flush_dcache_range)
li r5,L1_CACHE_LINE_SIZE-1 li r5,L1_CACHE_BYTES-1
andc r3,r3,r5 andc r3,r3,r5
subf r4,r3,r4 subf r4,r3,r4
add r4,r4,r5 add r4,r4,r5
srwi. r4,r4,LG_L1_CACHE_LINE_SIZE srwi. r4,r4,L1_CACHE_SHIFT
beqlr beqlr
mtctr r4 mtctr r4
1: dcbf 0,r3 1: dcbf 0,r3
addi r3,r3,L1_CACHE_LINE_SIZE addi r3,r3,L1_CACHE_BYTES
bdnz 1b bdnz 1b
sync /* wait for dcbst's to get to ram */ sync /* wait for dcbst's to get to ram */
blr blr
@ -568,16 +568,16 @@ _GLOBAL(flush_dcache_range)
* invalidate_dcache_range(unsigned long start, unsigned long stop) * invalidate_dcache_range(unsigned long start, unsigned long stop)
*/ */
_GLOBAL(invalidate_dcache_range) _GLOBAL(invalidate_dcache_range)
li r5,L1_CACHE_LINE_SIZE-1 li r5,L1_CACHE_BYTES-1
andc r3,r3,r5 andc r3,r3,r5
subf r4,r3,r4 subf r4,r3,r4
add r4,r4,r5 add r4,r4,r5
srwi. r4,r4,LG_L1_CACHE_LINE_SIZE srwi. r4,r4,L1_CACHE_SHIFT
beqlr beqlr
mtctr r4 mtctr r4
1: dcbi 0,r3 1: dcbi 0,r3
addi r3,r3,L1_CACHE_LINE_SIZE addi r3,r3,L1_CACHE_BYTES
bdnz 1b bdnz 1b
sync /* wait for dcbi's to get to ram */ sync /* wait for dcbi's to get to ram */
blr blr
@ -598,7 +598,7 @@ _GLOBAL(flush_dcache_all)
mtctr r4 mtctr r4
lis r5, KERNELBASE@h lis r5, KERNELBASE@h
1: lwz r3, 0(r5) /* Load one word from every line */ 1: lwz r3, 0(r5) /* Load one word from every line */
addi r5, r5, L1_CACHE_LINE_SIZE addi r5, r5, L1_CACHE_BYTES
bdnz 1b bdnz 1b
blr blr
#endif /* CONFIG_NOT_COHERENT_CACHE */ #endif /* CONFIG_NOT_COHERENT_CACHE */
@ -616,16 +616,16 @@ BEGIN_FTR_SECTION
blr /* for 601, do nothing */ blr /* for 601, do nothing */
END_FTR_SECTION_IFCLR(CPU_FTR_SPLIT_ID_CACHE) END_FTR_SECTION_IFCLR(CPU_FTR_SPLIT_ID_CACHE)
rlwinm r3,r3,0,0,19 /* Get page base address */ rlwinm r3,r3,0,0,19 /* Get page base address */
li r4,4096/L1_CACHE_LINE_SIZE /* Number of lines in a page */ li r4,4096/L1_CACHE_BYTES /* Number of lines in a page */
mtctr r4 mtctr r4
mr r6,r3 mr r6,r3
0: dcbst 0,r3 /* Write line to ram */ 0: dcbst 0,r3 /* Write line to ram */
addi r3,r3,L1_CACHE_LINE_SIZE addi r3,r3,L1_CACHE_BYTES
bdnz 0b bdnz 0b
sync sync
mtctr r4 mtctr r4
1: icbi 0,r6 1: icbi 0,r6
addi r6,r6,L1_CACHE_LINE_SIZE addi r6,r6,L1_CACHE_BYTES
bdnz 1b bdnz 1b
sync sync
isync isync
@ -648,16 +648,16 @@ END_FTR_SECTION_IFCLR(CPU_FTR_SPLIT_ID_CACHE)
mtmsr r0 mtmsr r0
isync isync
rlwinm r3,r3,0,0,19 /* Get page base address */ rlwinm r3,r3,0,0,19 /* Get page base address */
li r4,4096/L1_CACHE_LINE_SIZE /* Number of lines in a page */ li r4,4096/L1_CACHE_BYTES /* Number of lines in a page */
mtctr r4 mtctr r4
mr r6,r3 mr r6,r3
0: dcbst 0,r3 /* Write line to ram */ 0: dcbst 0,r3 /* Write line to ram */
addi r3,r3,L1_CACHE_LINE_SIZE addi r3,r3,L1_CACHE_BYTES
bdnz 0b bdnz 0b
sync sync
mtctr r4 mtctr r4
1: icbi 0,r6 1: icbi 0,r6
addi r6,r6,L1_CACHE_LINE_SIZE addi r6,r6,L1_CACHE_BYTES
bdnz 1b bdnz 1b
sync sync
mtmsr r10 /* restore DR */ mtmsr r10 /* restore DR */
@ -672,7 +672,7 @@ END_FTR_SECTION_IFCLR(CPU_FTR_SPLIT_ID_CACHE)
* void clear_pages(void *page, int order) ; * void clear_pages(void *page, int order) ;
*/ */
_GLOBAL(clear_pages) _GLOBAL(clear_pages)
li r0,4096/L1_CACHE_LINE_SIZE li r0,4096/L1_CACHE_BYTES
slw r0,r0,r4 slw r0,r0,r4
mtctr r0 mtctr r0
#ifdef CONFIG_8xx #ifdef CONFIG_8xx
@ -684,7 +684,7 @@ _GLOBAL(clear_pages)
#else #else
1: dcbz 0,r3 1: dcbz 0,r3
#endif #endif
addi r3,r3,L1_CACHE_LINE_SIZE addi r3,r3,L1_CACHE_BYTES
bdnz 1b bdnz 1b
blr blr
@ -710,7 +710,7 @@ _GLOBAL(copy_page)
#ifdef CONFIG_8xx #ifdef CONFIG_8xx
/* don't use prefetch on 8xx */ /* don't use prefetch on 8xx */
li r0,4096/L1_CACHE_LINE_SIZE li r0,4096/L1_CACHE_BYTES
mtctr r0 mtctr r0
1: COPY_16_BYTES 1: COPY_16_BYTES
bdnz 1b bdnz 1b
@ -724,13 +724,13 @@ _GLOBAL(copy_page)
li r11,4 li r11,4
mtctr r0 mtctr r0
11: dcbt r11,r4 11: dcbt r11,r4
addi r11,r11,L1_CACHE_LINE_SIZE addi r11,r11,L1_CACHE_BYTES
bdnz 11b bdnz 11b
#else /* MAX_COPY_PREFETCH == 1 */ #else /* MAX_COPY_PREFETCH == 1 */
dcbt r5,r4 dcbt r5,r4
li r11,L1_CACHE_LINE_SIZE+4 li r11,L1_CACHE_BYTES+4
#endif /* MAX_COPY_PREFETCH */ #endif /* MAX_COPY_PREFETCH */
li r0,4096/L1_CACHE_LINE_SIZE - MAX_COPY_PREFETCH li r0,4096/L1_CACHE_BYTES - MAX_COPY_PREFETCH
crclr 4*cr0+eq crclr 4*cr0+eq
2: 2:
mtctr r0 mtctr r0
@ -738,12 +738,12 @@ _GLOBAL(copy_page)
dcbt r11,r4 dcbt r11,r4
dcbz r5,r3 dcbz r5,r3
COPY_16_BYTES COPY_16_BYTES
#if L1_CACHE_LINE_SIZE >= 32 #if L1_CACHE_BYTES >= 32
COPY_16_BYTES COPY_16_BYTES
#if L1_CACHE_LINE_SIZE >= 64 #if L1_CACHE_BYTES >= 64
COPY_16_BYTES COPY_16_BYTES
COPY_16_BYTES COPY_16_BYTES
#if L1_CACHE_LINE_SIZE >= 128 #if L1_CACHE_BYTES >= 128
COPY_16_BYTES COPY_16_BYTES
COPY_16_BYTES COPY_16_BYTES
COPY_16_BYTES COPY_16_BYTES

View file

@ -65,9 +65,9 @@
.stabs "arch/ppc/lib/",N_SO,0,0,0f .stabs "arch/ppc/lib/",N_SO,0,0,0f
.stabs "string.S",N_SO,0,0,0f .stabs "string.S",N_SO,0,0,0f
CACHELINE_BYTES = L1_CACHE_LINE_SIZE CACHELINE_BYTES = L1_CACHE_BYTES
LG_CACHELINE_BYTES = LG_L1_CACHE_LINE_SIZE LG_CACHELINE_BYTES = L1_CACHE_SHIFT
CACHELINE_MASK = (L1_CACHE_LINE_SIZE-1) CACHELINE_MASK = (L1_CACHE_BYTES-1)
_GLOBAL(strcpy) _GLOBAL(strcpy)
addi r5,r3,-1 addi r5,r3,-1
@ -265,12 +265,12 @@ _GLOBAL(cacheable_memcpy)
dcbz r11,r6 dcbz r11,r6
#endif #endif
COPY_16_BYTES COPY_16_BYTES
#if L1_CACHE_LINE_SIZE >= 32 #if L1_CACHE_BYTES >= 32
COPY_16_BYTES COPY_16_BYTES
#if L1_CACHE_LINE_SIZE >= 64 #if L1_CACHE_BYTES >= 64
COPY_16_BYTES COPY_16_BYTES
COPY_16_BYTES COPY_16_BYTES
#if L1_CACHE_LINE_SIZE >= 128 #if L1_CACHE_BYTES >= 128
COPY_16_BYTES COPY_16_BYTES
COPY_16_BYTES COPY_16_BYTES
COPY_16_BYTES COPY_16_BYTES
@ -485,12 +485,12 @@ _GLOBAL(__copy_tofrom_user)
.text .text
/* the main body of the cacheline loop */ /* the main body of the cacheline loop */
COPY_16_BYTES_WITHEX(0) COPY_16_BYTES_WITHEX(0)
#if L1_CACHE_LINE_SIZE >= 32 #if L1_CACHE_BYTES >= 32
COPY_16_BYTES_WITHEX(1) COPY_16_BYTES_WITHEX(1)
#if L1_CACHE_LINE_SIZE >= 64 #if L1_CACHE_BYTES >= 64
COPY_16_BYTES_WITHEX(2) COPY_16_BYTES_WITHEX(2)
COPY_16_BYTES_WITHEX(3) COPY_16_BYTES_WITHEX(3)
#if L1_CACHE_LINE_SIZE >= 128 #if L1_CACHE_BYTES >= 128
COPY_16_BYTES_WITHEX(4) COPY_16_BYTES_WITHEX(4)
COPY_16_BYTES_WITHEX(5) COPY_16_BYTES_WITHEX(5)
COPY_16_BYTES_WITHEX(6) COPY_16_BYTES_WITHEX(6)
@ -544,12 +544,12 @@ _GLOBAL(__copy_tofrom_user)
* 104f (if in read part) or 105f (if in write part), after updating r5 * 104f (if in read part) or 105f (if in write part), after updating r5
*/ */
COPY_16_BYTES_EXCODE(0) COPY_16_BYTES_EXCODE(0)
#if L1_CACHE_LINE_SIZE >= 32 #if L1_CACHE_BYTES >= 32
COPY_16_BYTES_EXCODE(1) COPY_16_BYTES_EXCODE(1)
#if L1_CACHE_LINE_SIZE >= 64 #if L1_CACHE_BYTES >= 64
COPY_16_BYTES_EXCODE(2) COPY_16_BYTES_EXCODE(2)
COPY_16_BYTES_EXCODE(3) COPY_16_BYTES_EXCODE(3)
#if L1_CACHE_LINE_SIZE >= 128 #if L1_CACHE_BYTES >= 128
COPY_16_BYTES_EXCODE(4) COPY_16_BYTES_EXCODE(4)
COPY_16_BYTES_EXCODE(5) COPY_16_BYTES_EXCODE(5)
COPY_16_BYTES_EXCODE(6) COPY_16_BYTES_EXCODE(6)

View file

@ -521,7 +521,7 @@ katana_fixup_resources(struct pci_dev *dev)
{ {
u16 v16; u16 v16;
pci_write_config_byte(dev, PCI_CACHE_LINE_SIZE, L1_CACHE_LINE_SIZE>>2); pci_write_config_byte(dev, PCI_CACHE_LINE_SIZE, L1_CACHE_BYTES>>2);
pci_read_config_word(dev, PCI_COMMAND, &v16); pci_read_config_word(dev, PCI_COMMAND, &v16);
v16 |= PCI_COMMAND_INVALIDATE | PCI_COMMAND_FAST_BACK; v16 |= PCI_COMMAND_INVALIDATE | PCI_COMMAND_FAST_BACK;

View file

@ -387,10 +387,10 @@ turn_on_mmu:
#endif /* defined(CONFIG_PM) || defined(CONFIG_CPU_FREQ) */ #endif /* defined(CONFIG_PM) || defined(CONFIG_CPU_FREQ) */
.section .data .section .data
.balign L1_CACHE_LINE_SIZE .balign L1_CACHE_BYTES
sleep_storage: sleep_storage:
.long 0 .long 0
.balign L1_CACHE_LINE_SIZE, 0 .balign L1_CACHE_BYTES, 0
#endif /* CONFIG_6xx */ #endif /* CONFIG_6xx */
.section .text .section .text

View file

@ -1304,7 +1304,7 @@ mv64x60_config_pci_params(struct pci_controller *hose,
early_write_config_word(hose, 0, devfn, PCI_COMMAND, u16_val); early_write_config_word(hose, 0, devfn, PCI_COMMAND, u16_val);
/* Set latency timer, cache line size, clear BIST */ /* Set latency timer, cache line size, clear BIST */
u16_val = (pi->latency_timer << 8) | (L1_CACHE_LINE_SIZE >> 2); u16_val = (pi->latency_timer << 8) | (L1_CACHE_BYTES >> 2);
early_write_config_word(hose, 0, devfn, PCI_CACHE_LINE_SIZE, u16_val); early_write_config_word(hose, 0, devfn, PCI_CACHE_LINE_SIZE, u16_val);
mv64x60_pci_exclude_bridge = save_exclude; mv64x60_pci_exclude_bridge = save_exclude;

View file

@ -9,21 +9,18 @@
/* bytes per L1 cache line */ /* bytes per L1 cache line */
#if defined(CONFIG_8xx) || defined(CONFIG_403GCX) #if defined(CONFIG_8xx) || defined(CONFIG_403GCX)
#define L1_CACHE_LINE_SIZE 16 #define L1_CACHE_SHIFT 4
#define LG_L1_CACHE_LINE_SIZE 4
#define MAX_COPY_PREFETCH 1 #define MAX_COPY_PREFETCH 1
#elif defined(CONFIG_PPC64BRIDGE) #elif defined(CONFIG_PPC64BRIDGE)
#define L1_CACHE_LINE_SIZE 128 #define L1_CACHE_SHIFT 7
#define LG_L1_CACHE_LINE_SIZE 7
#define MAX_COPY_PREFETCH 1 #define MAX_COPY_PREFETCH 1
#else #else
#define L1_CACHE_LINE_SIZE 32 #define L1_CACHE_SHIFT 5
#define LG_L1_CACHE_LINE_SIZE 5
#define MAX_COPY_PREFETCH 4 #define MAX_COPY_PREFETCH 4
#endif #endif
#define L1_CACHE_BYTES L1_CACHE_LINE_SIZE #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT)
#define L1_CACHE_SHIFT LG_L1_CACHE_LINE_SIZE
#define SMP_CACHE_BYTES L1_CACHE_BYTES #define SMP_CACHE_BYTES L1_CACHE_BYTES
#define L1_CACHE_SHIFT_MAX 7 /* largest L1 which this arch supports */ #define L1_CACHE_SHIFT_MAX 7 /* largest L1 which this arch supports */