1215976Sjmallett/***********************license start***************
2232812Sjmallett * Copyright (c) 2003-2010  Cavium Inc. (support@cavium.com). All rights
3215976Sjmallett * reserved.
4215976Sjmallett *
5215976Sjmallett *
6215976Sjmallett * Redistribution and use in source and binary forms, with or without
7215976Sjmallett * modification, are permitted provided that the following conditions are
8215976Sjmallett * met:
9215976Sjmallett *
10215976Sjmallett *   * Redistributions of source code must retain the above copyright
11215976Sjmallett *     notice, this list of conditions and the following disclaimer.
12215976Sjmallett *
13215976Sjmallett *   * Redistributions in binary form must reproduce the above
14215976Sjmallett *     copyright notice, this list of conditions and the following
15215976Sjmallett *     disclaimer in the documentation and/or other materials provided
16215976Sjmallett *     with the distribution.
17215976Sjmallett
18232812Sjmallett *   * Neither the name of Cavium Inc. nor the names of
19215976Sjmallett *     its contributors may be used to endorse or promote products
20215976Sjmallett *     derived from this software without specific prior written
21215976Sjmallett *     permission.
22215976Sjmallett
23215976Sjmallett * This Software, including technical data, may be subject to U.S. export  control
24215976Sjmallett * laws, including the U.S. Export Administration Act and its  associated
25215976Sjmallett * regulations, and may be subject to export or import  regulations in other
26215976Sjmallett * countries.
27215976Sjmallett
28215976Sjmallett * TO THE MAXIMUM EXTENT PERMITTED BY LAW, THE SOFTWARE IS PROVIDED "AS IS"
29232812Sjmallett * AND WITH ALL FAULTS AND CAVIUM INC. MAKES NO PROMISES, REPRESENTATIONS OR
30215976Sjmallett * WARRANTIES, EITHER EXPRESS, IMPLIED, STATUTORY, OR OTHERWISE, WITH RESPECT TO
31215976Sjmallett * THE SOFTWARE, INCLUDING ITS CONDITION, ITS CONFORMITY TO ANY REPRESENTATION OR
32215976Sjmallett * DESCRIPTION, OR THE EXISTENCE OF ANY LATENT OR PATENT DEFECTS, AND CAVIUM
33215976Sjmallett * SPECIFICALLY DISCLAIMS ALL IMPLIED (IF ANY) WARRANTIES OF TITLE,
34215976Sjmallett * MERCHANTABILITY, NONINFRINGEMENT, FITNESS FOR A PARTICULAR PURPOSE, LACK OF
35215976Sjmallett * VIRUSES, ACCURACY OR COMPLETENESS, QUIET ENJOYMENT, QUIET POSSESSION OR
36215976Sjmallett * CORRESPONDENCE TO DESCRIPTION. THE ENTIRE  RISK ARISING OUT OF USE OR
37215976Sjmallett * PERFORMANCE OF THE SOFTWARE LIES WITH YOU.
38215976Sjmallett ***********************license end**************************************/
39215976Sjmallett
40215976Sjmallett
41215976Sjmallett
42215976Sjmallett
43215976Sjmallett/**
44215976Sjmallett * @file
45215976Sjmallett *
46215976Sjmallett * cvmx-tlb supplies per core TLB access functions for simple executive
47215976Sjmallett * applications.
48215976Sjmallett *
49215976Sjmallett * <hr>$Revision: 41586 $<hr>
50215976Sjmallett */
51215976Sjmallett#include "cvmx.h"
52215976Sjmallett#include "cvmx-tlb.h"
53232812Sjmallett#include "cvmx-core.h"
54232812Sjmallett#include <math.h>
55215976Sjmallett
56232812Sjmallettextern __uint32_t  __log2(__uint32_t);
57215976Sjmallett//#define DEBUG
58215976Sjmallett
59215976Sjmallett/**
60215976Sjmallett * @INTERNAL
61215976Sjmallett * issue the tlb read instruction
62215976Sjmallett */
63215976Sjmallettstatic inline void __tlb_read(void){
64215976Sjmallett    CVMX_EHB;
65215976Sjmallett    CVMX_TLBR;
66215976Sjmallett    CVMX_EHB;
67215976Sjmallett}
68215976Sjmallett
69215976Sjmallett/**
70215976Sjmallett * @INTERNAL
71215976Sjmallett * issue the tlb write instruction
72215976Sjmallett */
73215976Sjmallettstatic inline void __tlb_write(void){
74215976Sjmallett
75215976Sjmallett    CVMX_EHB;
76215976Sjmallett    CVMX_TLBWI;
77215976Sjmallett    CVMX_EHB;
78215976Sjmallett}
79215976Sjmallett
80215976Sjmallett/**
81215976Sjmallett * @INTERNAL
82215976Sjmallett * issue the tlb read instruction
83215976Sjmallett */
84215976Sjmallettstatic inline int __tlb_probe(uint64_t hi){
85215976Sjmallett    int index;
86215976Sjmallett    CVMX_EHB;
87215976Sjmallett    CVMX_MT_ENTRY_HIGH(hi);
88215976Sjmallett    CVMX_TLBP;
89215976Sjmallett    CVMX_EHB;
90215976Sjmallett
91215976Sjmallett    CVMX_MF_TLB_INDEX(index);
92215976Sjmallett
93215976Sjmallett    if (index < 0) index = -1;
94215976Sjmallett
95215976Sjmallett    return index;
96215976Sjmallett}
97215976Sjmallett
98215976Sjmallett/**
99215976Sjmallett * @INTERNAL
100215976Sjmallett * read a single tlb entry
101215976Sjmallett *
102215976Sjmallett * return 0: tlb entry is read
103215976Sjmallett *    -1: index is invalid
104215976Sjmallett */
105215976Sjmallettstatic inline int __tlb_read_index(uint32_t tlbi){
106215976Sjmallett
107232812Sjmallett    if (tlbi >= (uint32_t)cvmx_core_get_tlb_entries()) {
108215976Sjmallett        return -1;
109215976Sjmallett    }
110215976Sjmallett
111215976Sjmallett    CVMX_MT_TLB_INDEX(tlbi);
112215976Sjmallett    __tlb_read();
113215976Sjmallett
114215976Sjmallett    return 0;
115215976Sjmallett}
116215976Sjmallett
117215976Sjmallett/**
118215976Sjmallett * @INTERNAL
119215976Sjmallett * write a single tlb entry
120215976Sjmallett *
121215976Sjmallett * return 0: tlb entry is read
122215976Sjmallett *    -1: index is invalid
123215976Sjmallett */
124215976Sjmallettstatic inline int __tlb_write_index(uint32_t tlbi,
125215976Sjmallett        			    uint64_t hi, uint64_t lo0,
126215976Sjmallett				    uint64_t lo1, uint64_t pagemask)
127215976Sjmallett{
128215976Sjmallett
129232812Sjmallett    if (tlbi >= (uint32_t)cvmx_core_get_tlb_entries()) {
130215976Sjmallett        return -1;
131215976Sjmallett    }
132215976Sjmallett
133215976Sjmallett#ifdef DEBUG
134215976Sjmallett    cvmx_dprintf("cvmx-tlb-dbg: "
135215976Sjmallett	    "write TLB %d: hi %lx, lo0 %lx, lo1 %lx, pagemask %lx \n",
136215976Sjmallett		tlbi, hi, lo0, lo1, pagemask);
137215976Sjmallett#endif
138215976Sjmallett
139215976Sjmallett    CVMX_MT_TLB_INDEX(tlbi);
140215976Sjmallett    CVMX_MT_ENTRY_HIGH(hi);
141215976Sjmallett    CVMX_MT_ENTRY_LO_0(lo0);
142215976Sjmallett    CVMX_MT_ENTRY_LO_1(lo1);
143215976Sjmallett    CVMX_MT_PAGEMASK(pagemask);
144215976Sjmallett    __tlb_write();
145215976Sjmallett
146215976Sjmallett    return 0;
147215976Sjmallett}
148215976Sjmallett
149215976Sjmallett/**
150215976Sjmallett * @INTERNAL
151215976Sjmallett * Determine if a TLB entry is free to use
152215976Sjmallett */
153215976Sjmallettstatic inline int __tlb_entry_is_free(uint32_t tlbi) {
154215976Sjmallett    int ret = 0;
155215976Sjmallett    uint64_t lo0 = 0, lo1 = 0;
156215976Sjmallett
157232812Sjmallett    if (tlbi < (uint32_t)cvmx_core_get_tlb_entries()) {
158215976Sjmallett
159215976Sjmallett        __tlb_read_index(tlbi);
160215976Sjmallett
161215976Sjmallett        /* Unused entries have neither even nor odd page mapped */
162215976Sjmallett    	CVMX_MF_ENTRY_LO_0(lo0);
163215976Sjmallett    	CVMX_MF_ENTRY_LO_1(lo1);
164215976Sjmallett
165215976Sjmallett        if ( !(lo0 & TLB_VALID) && !(lo1 & TLB_VALID)) {
166215976Sjmallett            ret = 1;
167215976Sjmallett        }
168215976Sjmallett    }
169215976Sjmallett
170215976Sjmallett    return ret;
171215976Sjmallett}
172215976Sjmallett
173215976Sjmallett
174215976Sjmallett/**
175215976Sjmallett * @INTERNAL
176215976Sjmallett * dump a single tlb entry
177215976Sjmallett */
178215976Sjmallettstatic inline void __tlb_dump_index(uint32_t tlbi)
179215976Sjmallett{
180232812Sjmallett    if (tlbi < (uint32_t)cvmx_core_get_tlb_entries()) {
181215976Sjmallett
182215976Sjmallett        if (__tlb_entry_is_free(tlbi)) {
183215976Sjmallett#ifdef DEBUG
184215976Sjmallett            cvmx_dprintf("Index: %3d Free \n", tlbi);
185215976Sjmallett#endif
186215976Sjmallett        } else {
187215976Sjmallett            uint64_t lo0, lo1, pgmask;
188232812Sjmallett            uint32_t hi;
189215976Sjmallett#ifdef DEBUG
190232812Sjmallett            uint32_t c0, c1;
191215976Sjmallett            int width = 13;
192215976Sjmallett#endif
193215976Sjmallett
194215976Sjmallett            __tlb_read_index(tlbi);
195215976Sjmallett
196215976Sjmallett            CVMX_MF_ENTRY_HIGH(hi);
197215976Sjmallett            CVMX_MF_ENTRY_LO_0(lo0);
198215976Sjmallett            CVMX_MF_ENTRY_LO_1(lo1);
199215976Sjmallett            CVMX_MF_PAGEMASK(pgmask);
200215976Sjmallett
201215976Sjmallett#ifdef DEBUG
202215976Sjmallett            c0 = ( lo0 >> 3 ) & 7;
203215976Sjmallett            c1 = ( lo1 >> 3 ) & 7;
204215976Sjmallett
205215976Sjmallett            cvmx_dprintf("va=%0*lx asid=%02x\n",
206215976Sjmallett                               width, (hi & ~0x1fffUL), hi & 0xff);
207215976Sjmallett
208215976Sjmallett            cvmx_dprintf("\t[pa=%0*lx c=%d d=%d v=%d g=%d] ",
209215976Sjmallett                               width,
210215976Sjmallett                               (lo0 << 6) & PAGE_MASK, c0,
211215976Sjmallett                               (lo0 & 4) ? 1 : 0,
212215976Sjmallett                               (lo0 & 2) ? 1 : 0,
213215976Sjmallett                               (lo0 & 1) ? 1 : 0);
214215976Sjmallett            cvmx_dprintf("[pa=%0*lx c=%d d=%d v=%d g=%d]\n",
215215976Sjmallett                               width,
216215976Sjmallett                               (lo1 << 6) & PAGE_MASK, c1,
217215976Sjmallett                               (lo1 & 4) ? 1 : 0,
218215976Sjmallett                               (lo1 & 2) ? 1 : 0,
219215976Sjmallett                               (lo1 & 1) ? 1 : 0);
220215976Sjmallett
221215976Sjmallett#endif
222215976Sjmallett        }
223215976Sjmallett    }
224215976Sjmallett}
225215976Sjmallett
226215976Sjmallett/**
227215976Sjmallett * @INTERNAL
228215976Sjmallett * dump a single tlb entry
229215976Sjmallett */
230215976Sjmallettstatic inline uint32_t __tlb_wired_index() {
231215976Sjmallett    uint32_t  tlbi;
232215976Sjmallett
233215976Sjmallett    CVMX_MF_TLB_WIRED(tlbi);
234215976Sjmallett    return tlbi;
235215976Sjmallett}
236215976Sjmallett
237215976Sjmallett/**
238215976Sjmallett *  Find a free entry that can be used for share memory mapping.
239215976Sjmallett *
240215976Sjmallett *  @return -1: no free entry found
241215976Sjmallett *  @return :  a free entry
242215976Sjmallett */
243215976Sjmallettint cvmx_tlb_allocate_runtime_entry(void)
244215976Sjmallett{
245215976Sjmallett    uint32_t i, ret = -1;
246215976Sjmallett
247232812Sjmallett    for (i = __tlb_wired_index(); i< (uint32_t)cvmx_core_get_tlb_entries(); i++) {
248215976Sjmallett
249215976Sjmallett    	/* Check to make sure the index is free to use */
250215976Sjmallett        if (__tlb_entry_is_free(i)) {
251215976Sjmallett		/* Found and return */
252215976Sjmallett        	ret = i;
253215976Sjmallett        	break;
254215976Sjmallett	}
255215976Sjmallett    }
256215976Sjmallett
257215976Sjmallett    return ret;
258215976Sjmallett}
259215976Sjmallett
260215976Sjmallett/**
261215976Sjmallett *  Invalidate the TLB entry. Remove previous mapping if one was set up
262215976Sjmallett */
263215976Sjmallettvoid cvmx_tlb_free_runtime_entry(uint32_t tlbi)
264215976Sjmallett{
265215976Sjmallett    /* Invalidate an unwired TLB entry */
266232812Sjmallett    if ((tlbi < (uint32_t)cvmx_core_get_tlb_entries()) && (tlbi >= __tlb_wired_index())) {
267215976Sjmallett        __tlb_write_index(tlbi, 0xffffffff80000000ULL, 0, 0, 0);
268215976Sjmallett    }
269215976Sjmallett}
270215976Sjmallett
271215976Sjmallett
272215976Sjmallett/**
273215976Sjmallett *  Program a single TLB entry to enable the provided vaddr to paddr mapping.
274215976Sjmallett *
275215976Sjmallett *  @param index  Index of the TLB entry
276215976Sjmallett *  @param vaddr  The virtual address for this mapping
277215976Sjmallett *  @param paddr  The physical address for this mapping
278215976Sjmallett *  @param size   Size of the mapping
279215976Sjmallett *  @param tlb_flags  Entry mapping flags
280215976Sjmallett */
281215976Sjmallett
282215976Sjmallettvoid cvmx_tlb_write_entry(int index, uint64_t vaddr, uint64_t paddr,
283215976Sjmallett			uint64_t size, uint64_t tlb_flags) {
284215976Sjmallett	uint64_t lo0, lo1, hi, pagemask;
285215976Sjmallett
286215976Sjmallett	if ( __is_power_of_two(size) ) {
287215976Sjmallett		if ( (__log2(size) & 1 ) == 0) {
288215976Sjmallett			/* size is not power of 4,  we only need to map
289215976Sjmallett  			   one page, figure out even or odd page to map */
290215976Sjmallett			if ((vaddr >> __log2(size) & 1))  {
291215976Sjmallett				lo0 =  0;
292215976Sjmallett				lo1 =  ((paddr >> 12) << 6) | tlb_flags;
293215976Sjmallett				hi =   ((vaddr - size) >> 12) << 12;
294215976Sjmallett			}else {
295215976Sjmallett				lo0 =  ((paddr >> 12) << 6) | tlb_flags;
296215976Sjmallett				lo1 =  0;
297215976Sjmallett				hi =   ((vaddr) >> 12) << 12;
298215976Sjmallett			}
299215976Sjmallett			pagemask = (size - 1) & (~1<<11);
300215976Sjmallett		}else {
301215976Sjmallett			lo0 =  ((paddr >> 12)<< 6) | tlb_flags;
302215976Sjmallett			lo1 =  (((paddr + size /2) >> 12) << 6) | tlb_flags;
303215976Sjmallett			hi =   ((vaddr) >> 12) << 12;
304215976Sjmallett			pagemask = ((size/2) -1) & (~1<<11);
305215976Sjmallett		}
306215976Sjmallett
307215976Sjmallett
308215976Sjmallett        	__tlb_write_index(index, hi, lo0, lo1, pagemask);
309215976Sjmallett
310215976Sjmallett	}
311215976Sjmallett}
312215976Sjmallett
313215976Sjmallett
314215976Sjmallett/**
315215976Sjmallett *  Program a single TLB entry to enable the provided vaddr to paddr mapping.
316215976Sjmallett *  This version adds a wired entry that should not be changed at run time
317215976Sjmallett *
318215976Sjmallett *  @param vaddr  The virtual address for this mapping
319215976Sjmallett *  @param paddr  The physical address for this mapping
320215976Sjmallett *  @param size   Size of the mapping
321215976Sjmallett *  @param tlb_flags  Entry mapping flags
322215976Sjmallett *  @return -1: TLB out of entries
323215976Sjmallett * 	     0:  fixed entry added
324215976Sjmallett */
325215976Sjmallettint cvmx_tlb_add_fixed_entry( uint64_t vaddr, uint64_t paddr, uint64_t size, uint64_t tlb_flags) {
326215976Sjmallett
327215976Sjmallett    uint64_t index;
328215976Sjmallett    int ret = 0;
329215976Sjmallett
330215976Sjmallett    CVMX_MF_TLB_WIRED(index);
331215976Sjmallett
332215976Sjmallett    /* Check to make sure if the index is free to use */
333232812Sjmallett    if (index < (uint32_t)cvmx_core_get_tlb_entries() && __tlb_entry_is_free(index) ) {
334215976Sjmallett	cvmx_tlb_write_entry(index, vaddr, paddr, size, tlb_flags);
335215976Sjmallett
336215976Sjmallett	if (!__tlb_entry_is_free(index)) {
337215976Sjmallett        	/* Bump up the wired register*/
338215976Sjmallett        	CVMX_MT_TLB_WIRED(index + 1);
339215976Sjmallett		ret  = 1;
340215976Sjmallett	}
341215976Sjmallett    }
342215976Sjmallett    return ret;
343215976Sjmallett}
344215976Sjmallett
345215976Sjmallett
346215976Sjmallett/**
347215976Sjmallett *  Program a single TLB entry to enable the provided vaddr to paddr mapping.
348215976Sjmallett *  This version writes a runtime entry. It will check the index to make sure
349215976Sjmallett *  not to overwrite any fixed entries.
350215976Sjmallett *
351215976Sjmallett *  @param index  Index of the TLB entry
352215976Sjmallett *  @param vaddr  The virtual address for this mapping
353215976Sjmallett *  @param paddr  The physical address for this mapping
354215976Sjmallett *  @param size   Size of the mapping
355215976Sjmallett *  @param tlb_flags  Entry mapping flags
356215976Sjmallett */
357215976Sjmallettvoid cvmx_tlb_write_runtime_entry(int index, uint64_t vaddr, uint64_t paddr,
358215976Sjmallett                          uint64_t size, uint64_t tlb_flags)
359215976Sjmallett{
360215976Sjmallett
361215976Sjmallett    int wired_index;
362215976Sjmallett    CVMX_MF_TLB_WIRED(wired_index);
363215976Sjmallett
364215976Sjmallett    if (index >= wired_index) {
365215976Sjmallett	cvmx_tlb_write_entry(index, vaddr, paddr, size, tlb_flags);
366215976Sjmallett    }
367215976Sjmallett
368215976Sjmallett}
369215976Sjmallett
370215976Sjmallett
371215976Sjmallett
372215976Sjmallett/**
373215976Sjmallett * Find the TLB index of a given virtual address
374215976Sjmallett *
375215976Sjmallett *  @param vaddr  The virtual address to look up
376215976Sjmallett *  @return  -1  not TLB mapped
377215976Sjmallett *           >=0 TLB TLB index
378215976Sjmallett */
379215976Sjmallettint cvmx_tlb_lookup(uint64_t vaddr) {
380232812Sjmallett	uint64_t hi= (vaddr >> 13 ) << 13; /* We always use ASID 0 */
381215976Sjmallett
382215976Sjmallett	return  __tlb_probe(hi);
383215976Sjmallett}
384215976Sjmallett
385215976Sjmallett/**
386215976Sjmallett *  Debug routine to show all shared memory mapping
387215976Sjmallett */
388215976Sjmallettvoid cvmx_tlb_dump_shared_mapping(void) {
389215976Sjmallett    uint32_t tlbi;
390215976Sjmallett
391232812Sjmallett    for ( tlbi = __tlb_wired_index(); tlbi<(uint32_t)cvmx_core_get_tlb_entries(); tlbi++ ) {
392215976Sjmallett        __tlb_dump_index(tlbi);
393215976Sjmallett    }
394215976Sjmallett}
395215976Sjmallett
396215976Sjmallett/**
397215976Sjmallett *  Debug routine to show all TLB entries of this core
398215976Sjmallett *
399215976Sjmallett */
400215976Sjmallettvoid cvmx_tlb_dump_all(void) {
401215976Sjmallett
402215976Sjmallett    uint32_t tlbi;
403215976Sjmallett
404232812Sjmallett    for (tlbi = 0; tlbi<= (uint32_t)cvmx_core_get_tlb_entries(); tlbi++ ) {
405215976Sjmallett        __tlb_dump_index(tlbi);
406215976Sjmallett    }
407215976Sjmallett}
408215976Sjmallett
409