1210284Sjmallett/***********************license start*************** 2215990Sjmallett * Copyright (c) 2003-2010 Cavium Networks (support@cavium.com). All rights 3215990Sjmallett * reserved. 4210284Sjmallett * 5210284Sjmallett * 6215990Sjmallett * Redistribution and use in source and binary forms, with or without 7215990Sjmallett * modification, are permitted provided that the following conditions are 8215990Sjmallett * met: 9210284Sjmallett * 10215990Sjmallett * * Redistributions of source code must retain the above copyright 11215990Sjmallett * notice, this list of conditions and the following disclaimer. 12210284Sjmallett * 13215990Sjmallett * * Redistributions in binary form must reproduce the above 14215990Sjmallett * copyright notice, this list of conditions and the following 15215990Sjmallett * disclaimer in the documentation and/or other materials provided 16215990Sjmallett * with the distribution. 17215990Sjmallett 18215990Sjmallett * * Neither the name of Cavium Networks nor the names of 19215990Sjmallett * its contributors may be used to endorse or promote products 20215990Sjmallett * derived from this software without specific prior written 21215990Sjmallett * permission. 22215990Sjmallett 23215990Sjmallett * This Software, including technical data, may be subject to U.S. export control 24215990Sjmallett * laws, including the U.S. Export Administration Act and its associated 25215990Sjmallett * regulations, and may be subject to export or import regulations in other 26215990Sjmallett * countries. 27215990Sjmallett 28215990Sjmallett * TO THE MAXIMUM EXTENT PERMITTED BY LAW, THE SOFTWARE IS PROVIDED "AS IS" 29215990Sjmallett * AND WITH ALL FAULTS AND CAVIUM NETWORKS MAKES NO PROMISES, REPRESENTATIONS OR 30215990Sjmallett * WARRANTIES, EITHER EXPRESS, IMPLIED, STATUTORY, OR OTHERWISE, WITH RESPECT TO 31215990Sjmallett * THE SOFTWARE, INCLUDING ITS CONDITION, ITS CONFORMITY TO ANY REPRESENTATION OR 32215990Sjmallett * DESCRIPTION, OR THE EXISTENCE OF ANY LATENT OR PATENT DEFECTS, AND CAVIUM 33215990Sjmallett * SPECIFICALLY DISCLAIMS ALL IMPLIED (IF ANY) WARRANTIES OF TITLE, 34215990Sjmallett * MERCHANTABILITY, NONINFRINGEMENT, FITNESS FOR A PARTICULAR PURPOSE, LACK OF 35215990Sjmallett * VIRUSES, ACCURACY OR COMPLETENESS, QUIET ENJOYMENT, QUIET POSSESSION OR 36215990Sjmallett * CORRESPONDENCE TO DESCRIPTION. THE ENTIRE RISK ARISING OUT OF USE OR 37215990Sjmallett * PERFORMANCE OF THE SOFTWARE LIES WITH YOU. 38210284Sjmallett ***********************license end**************************************/ 39210284Sjmallett 40210284Sjmallett 41210284Sjmallett 42210284Sjmallett 43210284Sjmallett 44210284Sjmallett 45215990Sjmallett 46210284Sjmallett/** 47210284Sjmallett * @file 48210284Sjmallett * 49210284Sjmallett * Module to support operations on core such as TLB config, etc. 50210284Sjmallett * 51215990Sjmallett * <hr>$Revision: 49862 $<hr> 52210284Sjmallett * 53210284Sjmallett */ 54210284Sjmallett 55215990Sjmallett#ifdef CVMX_BUILD_FOR_LINUX_KERNEL 56215990Sjmallett#include <linux/module.h> 57215990Sjmallett#include <asm/octeon/cvmx.h> 58215990Sjmallett#include <asm/octeon/cvmx-core.h> 59215990Sjmallett#else 60210284Sjmallett#include "cvmx-config.h" 61210284Sjmallett#include "cvmx.h" 62210284Sjmallett#include "cvmx-core.h" 63215990Sjmallett#endif 64210284Sjmallett 65210284Sjmallett 66210284Sjmallett/** 67210284Sjmallett * Adds a wired TLB entry, and returns the index of the entry added. 68210284Sjmallett * Parameters are written to TLB registers without further processing. 69210284Sjmallett * 70210284Sjmallett * @param hi HI register value 71210284Sjmallett * @param lo0 lo0 register value 72210284Sjmallett * @param lo1 lo1 register value 73210284Sjmallett * @param page_mask pagemask register value 74210284Sjmallett * 75215990Sjmallett * @return Success: TLB index used (0-31 Octeon, 0-63 Octeon+, or 0-127 76215990Sjmallett * Octeon2). Failure: -1 77210284Sjmallett */ 78210284Sjmallettint cvmx_core_add_wired_tlb_entry(uint64_t hi, uint64_t lo0, uint64_t lo1, cvmx_tlb_pagemask_t page_mask) 79210284Sjmallett{ 80210284Sjmallett uint32_t index; 81210284Sjmallett 82210284Sjmallett CVMX_MF_TLB_WIRED(index); 83215990Sjmallett if (index >= (unsigned int)cvmx_core_get_tlb_entries()) 84210284Sjmallett { 85210284Sjmallett return(-1); 86210284Sjmallett } 87210284Sjmallett CVMX_MT_ENTRY_HIGH(hi); 88210284Sjmallett CVMX_MT_ENTRY_LO_0(lo0); 89210284Sjmallett CVMX_MT_ENTRY_LO_1(lo1); 90210284Sjmallett CVMX_MT_PAGEMASK(page_mask); 91210284Sjmallett CVMX_MT_TLB_INDEX(index); 92210284Sjmallett CVMX_MT_TLB_WIRED(index + 1); 93210284Sjmallett CVMX_EHB; 94210284Sjmallett CVMX_TLBWI; 95210284Sjmallett CVMX_EHB; 96210284Sjmallett return(index); 97210284Sjmallett} 98210284Sjmallett 99210284Sjmallett 100210284Sjmallett 101210284Sjmallett/** 102210284Sjmallett * Adds a fixed (wired) TLB mapping. Returns TLB index used or -1 on error. 103210284Sjmallett * This is a wrapper around cvmx_core_add_wired_tlb_entry() 104210284Sjmallett * 105210284Sjmallett * @param vaddr Virtual address to map 106210284Sjmallett * @param page0_addr page 0 physical address, with low 3 bits representing the DIRTY, VALID, and GLOBAL bits 107210284Sjmallett * @param page1_addr page1 physical address, with low 3 bits representing the DIRTY, VALID, and GLOBAL bits 108210284Sjmallett * @param page_mask page mask. 109210284Sjmallett * 110210284Sjmallett * @return Success: TLB index used (0-31) 111210284Sjmallett * Failure: -1 112210284Sjmallett */ 113210284Sjmallettint cvmx_core_add_fixed_tlb_mapping_bits(uint64_t vaddr, uint64_t page0_addr, uint64_t page1_addr, cvmx_tlb_pagemask_t page_mask) 114210284Sjmallett{ 115210284Sjmallett 116210284Sjmallett if ((vaddr & (page_mask | 0x7ff)) 117210284Sjmallett || ((page0_addr & ~0x7ULL) & ((page_mask | 0x7ff) >> 1)) 118210284Sjmallett || ((page1_addr & ~0x7ULL) & ((page_mask | 0x7ff) >> 1))) 119210284Sjmallett { 120210284Sjmallett cvmx_dprintf("Error adding tlb mapping: invalid address alignment at vaddr: 0x%llx\n", (unsigned long long)vaddr); 121210284Sjmallett return(-1); 122210284Sjmallett } 123210284Sjmallett 124210284Sjmallett 125210284Sjmallett return(cvmx_core_add_wired_tlb_entry(vaddr, 126210284Sjmallett (page0_addr >> 6) | (page0_addr & 0x7), 127210284Sjmallett (page1_addr >> 6) | (page1_addr & 0x7), 128210284Sjmallett page_mask)); 129210284Sjmallett 130210284Sjmallett} 131210284Sjmallett/** 132210284Sjmallett * Adds a fixed (wired) TLB mapping. Returns TLB index used or -1 on error. 133210284Sjmallett * Assumes both pages are valid. Use cvmx_core_add_fixed_tlb_mapping_bits for more control. 134210284Sjmallett * This is a wrapper around cvmx_core_add_wired_tlb_entry() 135210284Sjmallett * 136210284Sjmallett * @param vaddr Virtual address to map 137210284Sjmallett * @param page0_addr page 0 physical address 138210284Sjmallett * @param page1_addr page1 physical address 139210284Sjmallett * @param page_mask page mask. 140210284Sjmallett * 141210284Sjmallett * @return Success: TLB index used (0-31) 142210284Sjmallett * Failure: -1 143210284Sjmallett */ 144210284Sjmallettint cvmx_core_add_fixed_tlb_mapping(uint64_t vaddr, uint64_t page0_addr, uint64_t page1_addr, cvmx_tlb_pagemask_t page_mask) 145210284Sjmallett{ 146210284Sjmallett 147210284Sjmallett return(cvmx_core_add_fixed_tlb_mapping_bits(vaddr, page0_addr | TLB_DIRTY | TLB_VALID | TLB_GLOBAL, page1_addr | TLB_DIRTY | TLB_VALID | TLB_GLOBAL, page_mask)); 148210284Sjmallett 149210284Sjmallett} 150215990Sjmallett 151215990Sjmallett/** 152215990Sjmallett * Return number of TLB entries. 153215990Sjmallett */ 154215990Sjmallettint cvmx_core_get_tlb_entries(void) 155215990Sjmallett{ 156215990Sjmallett if (OCTEON_IS_MODEL(OCTEON_CN3XXX)) 157215990Sjmallett return 32; 158215990Sjmallett else if (OCTEON_IS_MODEL(OCTEON_CN5XXX)) 159215990Sjmallett return 64; 160215990Sjmallett else 161215990Sjmallett return 128; 162215990Sjmallett} 163