1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31
32 33 34 35
36
37 package ti.sysbios.family.arm.a9;
38
39 import xdc.rov.ViewInfo;
40
41 import xdc.runtime.Assert;
42
43 import ti.sysbios.family.arm.gic.Hwi;
44
45 /*!
46 * ======== Cache ========
47 * ARM Cache Module
48 *
49 * This module manages the data and instruction caches on Cortex A9
50 * processors.
51 *
52 * It provides a list of functions that perform cache operations. The
53 * functions operate on a per cache line except for the 'All' functions
54 * which operate on the entire cache specified. Any Address that is not
55 * aligned to a cache line gets rounded down to the address of
56 * the nearest cache line.
57 *
58 * The L1 data and program caches as well as the L2 cache are enabled
59 * by default early during the startup sequence (prior to any
60 * Module_startup()s).
61 *
62 * Data caching requires the MMU to be enabled and the cacheable
63 * attribute of the section/page descriptor for a corresponding
64 * memory region to be enabled.
65 * Program caching does not require the MMU to be enabled and therefore
66 * occurs when the L1 program cache is enabled.
67 *
68 * Note: See the {@link ti.sysbios.family.arm.a8.Mmu} module for
69 * information about the MMU.
70 *
71 * Here's an example showing how to enable L2 interrupt and register an
72 * interrupt callback function:
73 *
74 * *.cfg config script:
75 * @p(code)
76 * ...
77 *
78 * var Cache = xdc.useModule('ti.sysbios.family.arm.a9.Cache');
79 * Cache.enableL2Interrupt = true;
80 * Cache.l2InterruptFunc = '&cacheIntHandler';
81 * @p
82 *
83 * C source file:
84 * @p(code)
85 * ...
86 *
87 * Void cacheIntHandler(UArg arg0)
88 * {
89 * System_printf("Interrupt Mask: %u \n", arg0);
90 * }
91 * @p
92 *
93 * Notes:
94 * @p(blist)
95 * - See the {@link http://infocenter.arm.com/help/topic/com.arm.doc.ddi0406c/index.html ARM v7AR Architecture Reference Manual} and {@link http://infocenter.arm.com/help/topic/com.arm.doc.subset.primecell.system/index.html#pl310 ARM PL310 Cache Controller Reference Manual} for more info.
96 * @p
97 *
98 * Unconstrained Functions
99 * All functions
100 *
101 * @p(html)
102 * <h3> Calling Context </h3>
103 * <table border="1" cellpadding="3">
104 * <colgroup span="1"></colgroup> <colgroup span="5" align="center">
105 * </colgroup>
106 *
107 * <tr><th> Function </th><th> Hwi </th><th> Swi </th>
108 * <th> Task </th><th> Main </th><th> Startup </th></tr>
109 * <!-- -->
110 * <tr><td> {@link #configureL2EventCounter} </td><td> Y </td>
111 * <td> Y </td><td> Y </td><td> Y </td><td> Y </td></tr>
112 * <tr><td> {@link #disable} </td><td> Y </td><td> Y </td>
113 * <td> Y </td><td> Y </td><td> Y </td></tr>
114 * <tr><td> {@link #disableL1Prefetch} </td><td> Y </td>
115 * <td> Y </td><td> Y </td><td> Y </td><td> Y </td></tr>
116 * <tr><td> {@link #disableL2EventCounters} </td><td> Y </td>
117 * <td> Y </td><td> Y </td><td> Y </td><td> Y </td></tr>
118 * <tr><td> {@link #enable} </td><td> Y </td><td> Y </td>
119 * <td> Y </td><td> Y </td><td> Y </td></tr>
120 * <tr><td> {@link #enableL1Prefetch} </td><td> Y </td>
121 * <td> Y </td><td> Y </td><td> Y </td><td> Y </td></tr>
122 * <tr><td> {@link #enableL2EventCounters} </td><td> Y </td>
123 * <td> Y </td><td> Y </td><td> Y </td><td> Y </td></tr>
124 * <tr><td> {@link #resetL2EventCounter} </td><td> Y </td>
125 * <td> Y </td><td> Y </td><td> Y </td><td> Y </td></tr>
126 * <tr><td> {@link #getL2EventCount} </td><td> Y </td>
127 * <td> Y </td><td> Y </td><td> Y </td><td> Y </td></tr>
128 * <tr><td> {@link #getL2PrefetchControl} </td><td> Y </td>
129 * <td> Y </td><td> Y </td><td> Y </td><td> Y </td></tr>
130 * <tr><td> {@link #setL2PrefetchControl} </td><td> Y </td>
131 * <td> Y </td><td> Y </td><td> Y </td><td> Y </td></tr>
132 * <tr><td> {@link #getL2AuxControlReg} </td><td> Y </td>
133 * <td> Y </td><td> Y </td><td> Y </td><td> Y </td></tr>
134 * <tr><td> {@link #setL2AuxControlReg} </td><td> Y </td>
135 * <td> Y </td><td> Y </td><td> Y </td><td> Y </td></tr>
136 * <tr><td> {@link #inv} </td><td> Y </td><td> Y </td>
137 * <td> Y </td><td> Y </td><td> Y </td></tr>
138 * <tr><td> {@link #invL1dAll} </td><td> Y </td><td> Y </td>
139 * <td> Y </td><td> Y </td><td> Y </td></tr>
140 * <tr><td> {@link #invL1pAll} </td><td> Y </td><td> Y </td>
141 * <td> Y </td><td> Y </td><td> Y </td></tr>
142 * <tr><td> {@link #invL2All} </td><td> Y </td><td> Y </td>
143 * <td> Y </td><td> Y </td><td> Y </td></tr>
144 * <tr><td> {@link #wait} </td><td> Y </td><td> Y </td>
145 * <td> Y </td><td> Y </td><td> Y </td></tr>
146 * <tr><td> {@link #wb} </td><td> Y </td><td> Y </td>
147 * <td> Y </td><td> Y </td><td> Y </td></tr>
148 * <tr><td> {@link #wbInv} </td><td> Y </td><td> Y </td>
149 * <td> Y </td><td> Y </td><td> Y </td></tr>
150 * <tr><td> {@link #wbInvL1dAll} </td><td> Y </td><td> Y </td>
151 * <td> Y </td><td> Y </td><td> Y </td></tr>
152 * <tr><td> {@link #wbL1dAll} </td><td> Y </td><td> Y </td>
153 * <td> Y </td><td> Y </td><td> Y </td></tr>
154 * <tr><td> {@link #lock} </td><td> Y </td><td> Y </td>
155 * <td> Y </td><td> Y </td><td> Y </td></tr>
156 * <tr><td> {@link #unlock} </td><td> Y </td><td> Y </td>
157 * <td> Y </td><td> Y </td><td> Y </td></tr>
158 * <tr><td colspan="6"> Definitions: <br />
159 * <ul>
160 * <li> <b>Hwi</b>: API is callable from a Hwi thread. </li>
161 * <li> <b>Swi</b>: API is callable from a Swi thread. </li>
162 * <li> <b>Task</b>: API is callable from a Task thread. </li>
163 * <li> <b>Main</b>: API is callable during any of these phases: </li>
164 * <ul>
165 * <li> In your module startup after this module is started
166 * (e.g. Cache_Module_startupDone() returns TRUE). </li>
167 * <li> During xdc.runtime.Startup.lastFxns. </li>
168 * <li> During main().</li>
169 * <li> During BIOS.startupFxns.</li>
170 * </ul>
171 * <li> <b>Startup</b>: API is callable during any of these phases:</li>
172 * <ul>
173 * <li> During xdc.runtime.Startup.firstFxns.</li>
174 * <li> In your module startup before this module is started
175 * (e.g. Cache_Module_startupDone() returns FALSE).</li>
176 * </ul>
177 * </ul>
178 * </td></tr>
179 *
180 * </table>
181 * @p
182 */
183
184 module Cache inherits ti.sysbios.interfaces.ICache
185 {
186 /*! Lists of bitmask for event counter interrupt generation types */
187 enum L2CounterIntType {
188 L2CounterIntType_DISABLED, /*! Disabled */
189 L2CounterIntType_INCREMENT, /*! Interrupt generated on counter
190 increment */
191 L2CounterIntType_OVERFLOW /*! Interrupt generated on counter
192 overflow */
193 };
194
195 /*! Lists of bitmask for event sources to the L2 Cache event counter */
196 enum L2EventSource {
197 L2EventSource_DISABLED = 0, /*! Counter Disabled. */
198 L2EventSource_CO = 4, /*! Eviction of a line from the L2 cache */
199 L2EventSource_DRHIT = 8, /*! Data read hit in the L2 cache. */
200 L2EventSource_DRREQ = 12, /*! Data read lookup to the L2 cache.
201 Results in a hit or miss. */
202 L2EventSource_DWHIT = 16, /*! Data write hit in the L2 cache. */
203 L2EventSource_DWREQ = 20, /*! Data write lookup to the L2 cache.
204 Results in a hit or miss. */
205 L2EventSource_DWTREQ = 24, /*! Data write lookup to L2 cache with
206 Write-Through attribute. Results in
207 a hit or miss. */
208 L2EventSource_IRHIT = 28, /*! Instruction read hit in the L2 cache */
209 L2EventSource_IRREQ = 32, /*! Instruction read lookup to the L2
210 cache. Results in a hit or miss. */
211 L2EventSource_WA = 36, /*! Allocation into L2 cache caused by a
212 write, with Write-Allocate attribute.
213 Result of a cache miss. */
214 L2EventSource_IPFALLOC = 40, /*! Allocation of a prefetch generated by
215 L2 cache controller into L2 cache. */
216 L2EventSource_EPFHIT = 44, /*! Prefetch hint hits in the L2 cache. */
217 L2EventSource_EPFALLOC = 48, /*! Prefetch hint allocated into L2
218 cache. */
219 L2EventSource_SRRCVD = 52, /*! Speculative read received by slave
220 port S0/1. */
221 L2EventSource_SRCONF = 56, /*! Speculative read confirmed in slave
222 port S0/1. */
223 L2EventSource_EPFRCVD = 60 /*! Prefetch hint received by slave port
224 S0/1. */
225 };
226
227 /*!
228 * ======== sizeL1dCacheLine ========
229 * Size of L1 data cache Line
230 */
231 const UInt sizeL1dCacheLine = 32;
232
233 /*!
234 * ======== sizeL1pCacheLine ========
235 * Size of L1 program cache Line
236 */
237 const UInt sizeL1pCacheLine = 32;
238
239 /*!
240 * ======== sizeL2CacheLine ========
241 * Size of L2 cache Line
242 */
243 const UInt sizeL2CacheLine = 32;
244
245 /*! Cache interrupt handler function type definition. */
246 typedef Void (*CacheIntHandlerFuncPtr)(UArg);
247
248 /*!
249 * ======== ModView ========
250 * @_nodoc
251 */
252 metaonly struct CacheInfoView {
253 String cache;
254 SizeT cacheSize;
255 SizeT lineSize;
256 UInt ways;
257 SizeT waySize;
258 };
259
260 /*!
261 * ======== WayInfoView ========
262 * @_nodoc
263 */
264 metaonly struct WayInfoView {
265 UInt number;
266 Bool locked;
267 Ptr baseAddress;
268 };
269
270 /*!
271 * ======== ErrorStatusView ========
272 * @_nodoc
273 */
274 metaonly struct ErrorStatusView {
275 String DecodeError;
276 String DataRAMReadError;
277 String TagRAMReadError;
278 String DataRAMWriteError;
279 String TagRAMWriteError;
280 String DataRAMParityError;
281 String TagRAMParityError;
282 String SlaveError;
283 };
284
285 /*!
286 * ======== rovViewInfo ========
287 * @_nodoc
288 */
289 @Facet
290 metaonly config ViewInfo.Instance rovViewInfo =
291 ViewInfo.create({
292 viewMap: [
293 ['Cache Info', { type: ViewInfo.MODULE_DATA,
294 viewInitFxn: 'viewInitCacheInfo',
295 structName: 'CacheInfoView'}],
296 ['L2 Way Info', { type: ViewInfo.MODULE_DATA,
297 viewInitFxn: 'viewInitWays',
298 structName: 'WayInfoView'}],
299 ['L2 Error Status', { type: ViewInfo.MODULE_DATA,
300 viewInitFxn: 'viewInitErrorStatus',
301 structName: 'ErrorStatusView'}]
302 ]
303 });
304
305 /*!
306 * ======== A_badBlockLength ========
307 * Asserted in Cache_lock (see {@link #lock Cache_lock} for more info).
308 */
309 config Assert.Id A_badBlockLength = {
310 msg: "A_badBlockLength: Block length too large. Must be <= L2 way size."
311 };
312
313 /*!
314 * ======== A_blockCrossesPage ========
315 * Asserted in Cache_lock (see {@link #lock Cache_lock} for more info).
316 */
317 config Assert.Id A_blockCrossesPage = {
318 msg: "A_blockCrossesPage: Memory block crosses L2 way page boundary."
319 };
320
321 /*!
322 * ======== A_badInvCallWithL2En ========
323 * Assert raised when Cache invalidate called with L2 enabled
324 */
325 config xdc.runtime.Assert.Id A_badInvCallWithL2En = {
326 msg: "A_badInvCallWithL2En: Cannot call cache Invalidate with L2 cache enabled."
327 };
328
329 /*!
330 * ======== A_noNonSecureInterruptAccess ========
331 * Assert raised when enableL2CacheInterruot is set to true, but interrupt
332 * control through non-secure access is not allowed.
333 */
334 config xdc.runtime.Assert.Id A_noNonSecureInterruptAccess = {
335 msg: "A_noNonSecureInterruptAccess: Non-secure interrupt access control disabled."
336 };
337
338 /*!
339 * ======== A_noNonSecureLockdown ========
340 * Assert raised when Cache_lock() called but non-secure lockdown is not
341 * enabled.
342 */
343 config xdc.runtime.Assert.Id A_noNonSecureLockdown = {
344 msg: "A_noNonSecureLockdown: Non-secure lockdown is not enabled."
345 };
346
347 /*!
348 * ======== A_invalidL2CounterId ========
349 * Assert raised when invalid counter id passed to L2 cache event counter
350 * APIs.
351 */
352 config xdc.runtime.Assert.Id A_invalidL2CounterId = {
353 msg: "A_invalidL2CounterId: Invalid L2 cache event counter Id passed."
354 };
355
356 /*!
357 * ======== A_badL2CacheOperation ========
358 * Assert raised when attempting to perform a L2 Cache maintenance
359 * operation with L2 configured as SRAM memory.
360 */
361 config xdc.runtime.Assert.Id A_badL2CacheOperation = {
362 msg: "A_badL2CacheOperation: Cannot perform L2 Cache maintenance when L2 configured as SRAM."
363 };
364
365 /*!
366 * ======== A_l1PrefetchApiNotSupported ========
367 * Assert raised when an unsupported API is called.
368 */
369 config xdc.runtime.Assert.Id A_l1PrefetchApiNotSupported = {
370 msg: "A_l1PrefetchApiNotSupported: Cache_enableL1Prefetch()/Cache_disableL1Prefetch() APIs not supported on this device."
371 };
372
373 /*!
374 * ======== enableCache ========
375 * Enable L1 and L2 data and program caches.
376 *
377 * To enable a subset of the caches, set this parameter
378 * to 'false' and call Cache_enable() within main, passing it only
379 * the {@link Cache#Type Cache_Type(s)} to be enabled.
380 *
381 * Data caching requires the MMU and the memory section/page
382 * descriptor cacheable attribute to be enabled.
383 */
384 config Bool enableCache = true;
385
386 /*!
387 * ======== unlockL2Cache ========
388 * Unlock all L2 cache ways at startup, default is true.
389 *
390 * Ordinarily, the L2 cache ways should all be unlocked at
391 * system startup.
392 *
393 * During development using CCS, if the application exits
394 * while L2 cache ways are locked, the soft-reset function
395 * DOES NOT unlock the L2 cache ways. To overcome this problem,
396 * the L2 cache ways are unlocked during Cache module startup.
397 *
398 * If for any reason this behavior is undesirable, setting this
399 * config parameter to false will disable the automatic unlocking
400 * of the L2 cache ways.
401 */
402 config Bool unlockL2Cache = true;
403
404 /*!
405 * ======== branchPredictionEnabled ========
406 * Enable Branch Prediction at startup, default is true.
407 *
408 * This flag controls whether Branch Prediction should be automatically
409 * enabled or disabled during system startup.
410 *
411 * @a(NOTE)
412 * Upon reset, the A9's Program Flow Prediction (Branch Prediction)
413 * feature is disabled.
414 */
415 config Bool branchPredictionEnabled = true;
416
417 /*!
418 * ======== configureL2Sram ========
419 * Configure L2 as cache or SRAM memory
420 *
421 * By default, when the device resets, it will be set
422 * as cache. If this parameter is set to "true",
423 * L2 cache will be configured as SRAM memory during
424 * startup.
425 *
426 * @a(NOTE)
427 * This config param is only supported on AM437X.
428 */
429 config Bool configureL2Sram = false;
430
431 /*!
432 * @_nodoc
433 * ======== controlModuleReg ========
434 * Base address of Control Module registers
435 */
436 config Ptr controlModuleReg = null;
437
438 /*!
439 * ======== enableL2Interrupt ========
440 * This flag controls whether L2 Cache Controller interrupt is enabled.
441 * (default is true)
442 *
443 * If this flag is enabled, this cache module will enable all L2 cache
444 * controller interrupts and register a L2 cache interrupt handler that
445 * will call the l2InterruptFunc if a callback function is registered.
446 * The callback function is passed the contents of the Masked Interrupt
447 * Status Register.
448 *
449 * The L2 cache interrupt handler will detect any L2 cache errors
450 * and the errors will be visible in this module's ROV view. The handler
451 * also ackowledges the interrupt.
452 */
453 config Bool enableL2Interrupt = true;
454
455 /*!
456 * ======== l2DataPrefetchEnable ========
457 * Enable L2 Data prefetching
458 */
459 config Bool l2DataPrefetchEnable = true;
460
461 /*!
462 * ======== l2InstructionPrefetchEnable ========
463 * Enable L2 Instruction prefetching
464 */
465 config Bool l2InstructionPrefetchEnable = true;
466
467 /*!
468 * ======== l2PrefetchDropEnable ========
469 * Enable L2 prefetch drop
470 *
471 * @a(NOTE)
472 * This config param is only supported on AM437X.
473 */
474 config Bool l2PrefetchDropEnable = false;
475
476 /*!
477 * ======== l2DoubleLinefillEnable ========
478 * Enable L2 Double Linefill
479 *
480 * @a(NOTE)
481 * This config param is only supported on AM437X.
482 */
483 config Bool l2DoubleLinefillEnable = true;
484
485 /*!
486 * ======== l2WrapDoubleLinefillEnable ========
487 * Enable L2 Double Linefill on Wrapping reads
488 *
489 * @a(NOTE)
490 * This config param is only supported on AM437X.
491 */
492 config Bool l2WrapDoubleLinefillEnable = false;
493
494 /*!
495 * ======== l2IncrDoubleLinefillEnable ========
496 * Enable L2 Double Linefill on Incrementing reads
497 *
498 * @a(NOTE)
499 * This config param is only supported on AM437X.
500 */
501 config Bool l2IncrDoubleLinefillEnable = false;
502
503 /*!
504 * ======== l2PrefetchOffset ========
505 * Prefetch offset for subsequent cache line prefetches
506 *
507 * L2C-310 supports offset values of 0-7, 15, 23 and 31 only.
508 *
509 * @a(NOTE)
510 * This field has affect only if L2 data/instruction
511 * prefetching is enabled.
512 */
513 config UInt8 l2PrefetchOffset = 0;
514
515 /*!
516 * ======== l2InterruptFunc ========
517 * Function called when a L2 cache Interrupt occurs
518 *
519 * The function is passed the contents of the Masked Interrupt Status
520 * Register as an argument.
521 *
522 * @p(code)
523 * Masked Interrupt Status Register bit assignments
524 * ----------------------------------------------------------------------
525 * | Bits | Field | Description |
526 * ----------------------------------------------------------------------
527 * | 31:9 | Reserved | Read as 0 |
528 * ----------------------------------------------------------------------
529 * | 8 | Decode error received on master port from L3 | |
530 * ------------------------------------------------------ |
531 * | 7 | Slave error received on master port from L3 | A 1 indicates |
532 * ------------------------------------------------------ the status of |
533 * | 6 | Error on L2 data RAM read | the input line|
534 * ------------------------------------------------------ triggering |
535 * | 5 | Error on L2 tag RAM read | an interrupt. |
536 * ------------------------------------------------------ |
537 * | 4 | Error on L2 data RAM write | A 0 indicates |
538 * ------------------------------------------------------ either no |
539 * | 3 | Error on L2 tag RAM write | interrupt has |
540 * ------------------------------------------------------ been generated|
541 * | 2 | Parity error on L2 data RAM read | or the |
542 * ------------------------------------------------------ interrupt is |
543 * | 1 | Parity error on L2 tag RAM read | masked. |
544 * ------------------------------------------------------ |
545 * | 0 | Event counter 0/1 overflow/increment | |
546 * ----------------------------------------------------------------------
547 * @p
548 */
549 config CacheIntHandlerFuncPtr l2InterruptFunc = null;
550
551 /*!
552 * ======== l2InterruptMask ========
553 * L2 cache Interrupt mask
554 *
555 * This mask controls which L2 cache Interrupts are enabled when
556 * {@link #enableL2Interrupt} is true. By default, all interrupts
557 * are enabled.
558 */
559 config UInt32 l2InterruptMask = 0x1FF;
560
561 /*!
562 * PL310 L2 Cache controller registers. Symbol "Cache_l2ControllerRegs"
563 * is a physical device.
564 */
565 struct L2ControllerRegs {
566 UInt32 CACHEID; /*! 0x000 Cache Id Register */
567 UInt32 CACHETYPE; /*! 0x004 Cache Type Register */
568 UInt32 hole0[62]; /*! 0x008-0x0FC */
569 UInt32 CONTROL; /*! 0x100 Control Register */
570 UInt32 AUXCONTROL; /*! 0x104 Auxiliary Control Register */
571 UInt32 TAGRAMCONTROL; /*! 0x108 Tag RAM Latency Control Register */
572 UInt32 DATARAMCONTROL; /*! 0x10C Data RAM Latency Control Register */
573 UInt32 hole1[60]; /*! 0x110-0x1FC */
574 UInt32 EVCOUNTERCTRL; /*! 0x200 Event Counter Control Register */
575 UInt32 EVCOUNTER1CFG; /*! 0x204 Event Counter1 Config Register */
576 UInt32 EVCOUNTER0CFG; /*! 0x208 Event Counter0 Config Register */
577 UInt32 EVCOUNTER1; /*! 0x20C Event Counter1 Value Register */
578 UInt32 EVCOUNTER0; /*! 0x210 Event Counter0 Value Register */
579 UInt32 INTMASK; /*! 0x214 Interrupt Mask Register */
580 UInt32 INTMASKSTATUS; /*! 0x218 Interrupt Mask Status Register */
581 UInt32 INTRAWSTATUS; /*! 0x21C Interrupt Raw Status Register */
582 UInt32 INTCLEAR; /*! 0x220 Interrupt Clear Register */
583 UInt32 hole2[323]; /*! 0x224-0x72C */
584 UInt32 CACHESYNC; /*! 0x730 Cache Sync Register */
585 UInt32 hole3[15]; /*! 0x734-0x76C */
586 UInt32 INVPA; /*! 0x770 Invalidate By Physical Address */
587 UInt32 hole4[2]; /*! 0x774-0x778 */
588 UInt32 INVWAY; /*! 0x77C Invalidate By Way Number */
589 UInt32 hole5[12]; /*! 0x780-0x7AC */
590 UInt32 CLEANPA; /*! 0x7B0 Clean By Physical Address */
591 UInt32 hole6[1]; /*! 0x7B4 */
592 UInt32 CLEANINDEX; /*! 0x7B8 Clean by Set or Way */
593 UInt32 CLEANWAY; /*! 0x7BC Clean by Way */
594 UInt32 hole7[12]; /*! 0x7C0-0x7EC */
595 UInt32 CLEANINVPA; /*! 0x7F0 Clean & Invalidate by Phy Address */
596 UInt32 hole8[1]; /*! 0x7F4 */
597 UInt32 CLEANINVINDEX; /*! 0x7F8 Clean & Invalidate By Set or Way */
598 UInt32 CLEANINVWAY; /*! 0x7FC Clean & Invalidate By Way */
599 UInt32 hole9[64]; /*! 0x800-0x8FC */
600 UInt32 LOCKDOWN[16]; /*! 0x900-0x93C D & I Cache Lockdown regs */
601 UInt32 hole10[4]; /*! 0x940-0x94C */
602 UInt32 LOCKLINEEN; /*! 0x950 Lock Line Enable */
603 UInt32 UNLOCKWAY; /*! 0x954 Unlock Way */
604 UInt32 hole11[170]; /*! 0x958-0xBFC */
605 UInt32 ADDRFILTERSTART; /*! 0xC00 Address Filtering Start */
606 UInt32 ADDRFILTEREND; /*! 0xC04 Address Filtering End */
607 UInt32 hole12[206]; /*! 0xC08-0xF3C */
608 UInt32 DEBUGCTRL; /*! 0xF40 Debug Register */
609 UInt32 hole13[7]; /*! 0xF44-0xF5C */
610 UInt32 PREFETCHCTRL; /*! 0xF60 Prefetch Control Register */
611 UInt32 hole14[7]; /*! 0xF64-0xF7C */
612 UInt32 POWERCTRL; /*! 0xF80 Power Control Register */
613 };
614
615 extern volatile L2ControllerRegs l2ControllerRegs;
616
617 /*! @_nodoc
618 * ======== getEnabled ========
619 * Get the 'type' bitmask of cache(s) enabled.
620 */
621 Bits16 getEnabled();
622
623 /*!
624 * ======== wait ========
625 * Backend for `{@link ti.sysbios.hal.Cache#wait()}`
626 *
627 * Implementation for this API uses Data Synchronization Barrier (DSB)
628 * ARM instruction.
629 *
630 * @see ti.sysbios.hal.Cache#wait
631 */
632 override Void wait();
633
634 /*!
635 * ======== disable ========
636 * Backend for `{@link ti.sysbios.hal.Cache#disable()}`
637 *
638 * @a(Note)
639 * This function disables interrupts while performing L1 and L2 cache
640 * maintenance operations. It can affect interrupt latency and should
641 * not be called unless absolutely necessary.
642 *
643 * @see ti.sysbios.hal.Cache#disable
644 */
645 override Void disable(Bits16 type);
646
647 /*!
648 * ======== wb ========
649 * Backend for `{@link ti.sysbios.hal.Cache#wb()}`
650 *
651 * @a(Note)
652 * This function ignores the 'type' argument and does a write-back
653 * on both L1 data and L2 caches.
654 *
655 * @see ti.sysbios.hal.Cache#wb
656 */
657 override Void wb(Ptr blockPtr, SizeT byteCnt, Bits16 type, Bool wait);
658
659 /*!
660 * ======== wbInv ========
661 * Backend for `{@link ti.sysbios.hal.Cache#wbInv()}`
662 *
663 * @a(Note)
664 * This function ignores the 'type' argument and does a write-back
665 * invalidate on both L1 data and L2 caches.
666 *
667 * @see ti.sysbios.hal.Cache#wbInv
668 */
669 override Void wbInv(Ptr blockPtr, SizeT byteCnt, Bits16 type, Bool wait);
670
671 /*!
672 * ======== invL1dAll ========
673 * Invalidate all of L1 data cache.
674 *
675 * This function should be used with caution. In general, the
676 * L1 data cache may contain some stack variable or valid data
677 * that should not be invalidated. This function should be used
678 * only when all contents of L1 data cache are unwanted.
679 */
680 Void invL1dAll();
681
682 /*!
683 * ======== invL1pAll ========
684 * Invalidate all of L1 program cache.
685 */
686 Void invL1pAll();
687
688 /*!
689 * ======== invL2All ========
690 * Invalidate entire L2 unified cache.
691 *
692 * @p(Note)
693 * This function should only be called with the L2 cache disabled.
694 * If called with cache enabled, it will generate an assertion failure.
695 */
696 Void invL2All();
697
698 /*!
699 * ======== lock ========
700 * Loads and locks a memory block into the L2 cache.
701 *
702 * A block of memory is loaded into the L2 cache and
703 * a corresponding L2 cache "way" is locked.
704 *
705 * The memory block is loaded into cache one L2 cache line at a time.
706 *
707 * The returned key is a bitmask of the L2 cache "way"
708 * used to lock the memory block. This key should be passed in
709 * a subsequent call to {@link #unlock Cache_unlock()} if the memory
710 * block is to be unlocked.
711 *
712 * If the key returned is zero, then the lock operation failed
713 * due to insufficient cache "ways" remaining to perform the operation.
714 * The locking algorithm requires at least two unlocked cache ways:
715 * one for the memory block, and one for the locking code itself.
716 *
717 * The A9 external L2 cache can be an 8 or 16 way cache. Locking a cache
718 * way consumes 1/8 or 1/16 of the total L2 cache, regardless of the
719 * actual memory block size. For instance, if the size of L2 cache is
720 * 256K bytes and the L2 is configured as a 16 way cache, locking ANY size
721 * memory block into a way will tie up 16K bytes of L2 cache.
722 *
723 * The byteCnt argument must be less than or equal to an L2 "way"
724 * size. Locking memory blocks larger than a way page size requires
725 * calling this API multiple times. An assert is generated if this rule
726 * is violated.
727 *
728 * The memory block must not cross an L2 "way" page boundary.
729 * Locking memory blocks that cross way page boundaries requires
730 * calling this API multiple times. An assert is generated if this rule
731 * is violated.
732 *
733 * Except for the normal L1 instruction cache behavior
734 * during code execution, the L1 instruction cache is
735 * unaffected by this API.
736 * The L1 data cache will be temporarily polluted by the contents
737 * of the referenced memory block.
738 *
739 * @a(NOTE)
740 * Interrupts are disabled for the entire time the memory block
741 * is being loaded into cache. For this reason, use of this API
742 * is probably best at system intialization time
743 * (ie: within 'main()').
744 *
745 * @param(blockPtr) start address of range to be locked
746 * @param(byteCnt) number of bytes to be locked
747 * @b(returns) key = bitmask of L2 cache "way" used
748 */
749 UInt lock(Ptr blockPtr, SizeT byteCnt);
750
751 /*!
752 * ======== unlock ========
753 * Unlocks an L2 cache way.
754 *
755 * Unlocks the L2 cache "way" locked by {@link #lock Cache_lock()}.
756 *
757 * @a(NOTE)
758 * Multiple L2 cache "ways" can be unlocked simultaneously by "or-ing"
759 * together the bitmasks returned from several invocations of Cache_lock().
760 *
761 * @param(key) Key returned by Cache_lock()
762 */
763 Void unlock(UInt key);
764
765 /*!
766 * ======== enableBP ========
767 * Enable Branch Prediction
768 *
769 * Calling this API will enable branch prediction.
770 *
771 * @a(NOTE)
772 * Upon reset, the A9's Program Flow Prediction (Branch Prediction)
773 * feature is disabled.
774 */
775 Void enableBP();
776
777 /*!
778 * ======== disableBP ========
779 * Disable Branch Prediction
780 *
781 * Calling this API will disable branch prediction.
782 *
783 * @a(NOTE)
784 * Upon reset, the A9's Program Flow Prediction (Branch Prediction)
785 * feature is disabled.
786 */
787 Void disableBP();
788
789 /*!
790 * ======== enableL2EventCounters ========
791 * Enables the L2 Cache event counters
792 */
793 Void enableL2EventCounters();
794
795 /*!
796 * ======== disableL2EventCounters ========
797 * Disables the L2 Cache event counters
798 */
799 Void disableL2EventCounters();
800
801 /*!
802 * ======== resetL2EventCounter ========
803 * Reset the specified L2 Cache event counter
804 *
805 * @param(counterId) Event counter Id (0/1)
806 */
807 Void resetL2EventCounter(UInt counterId);
808
809 /*!
810 * ======== configureL2EventCounter ========
811 * Sets the event source and interrupt generation type for the specified
812 * L2 Cache event counter
813 *
814 * @param(counterId) Event counter Id (0/1)
815 * @param(eventSource) Counter event source
816 * @param(interruptType) Bit mask of event counter interrupt generation
817 * type
818 *
819 * @a(NOTE)
820 * This API disables the event counter before updating the event counter
821 * config registers.
822 */
823 Void configureL2EventCounter(UInt counterId, L2EventSource eventSource,
824 L2CounterIntType interruptType);
825
826 /*!
827 * ======== getL2EventCount ========
828 * Return counter register value for the specified L2 Cache event counter.
829 *
830 * @param(counterId) Event counter Id (0/1)
831 * @b(returns) Event count
832 */
833 UInt32 getL2EventCount(UInt counterId);
834
835 /*!
836 * ======== enableL1Prefetch ========
837 * Enable L1 data prefetching
838 *
839 * @a(NOTE)
840 * This API is only supported on AM437X.
841 */
842 Void enableL1Prefetch();
843
844 /*!
845 * ======== disableL1Prefetch ========
846 * Disable L1 data prefetching
847 *
848 * @a(NOTE)
849 * This API is only supported on AM437X.
850 */
851 Void disableL1Prefetch();
852
853 /*!
854 * ======== getL2AuxControlReg ========
855 * Get current L2 Aux Control register contents
856 *
857 * Refer {@link http://infocenter.arm.com/help/topic/com.arm.doc.subset.primecell.system/index.html#pl310 ARM PL310 Cache Controller Reference Manual}
858 * for a description of the Auxiliary Control Register.
859 */
860 Bits32 getL2AuxControlReg();
861
862 /*!
863 * ======== setL2AuxControlReg ========
864 * Set L2 Aux Control register
865 *
866 * Refer {@link http://infocenter.arm.com/help/topic/com.arm.doc.subset.primecell.system/index.html#pl310 ARM PL310 Cache Controller Reference Manual}
867 * for a description of the Auxiliary Control Register.
868 */
869 Void setL2AuxControlReg(Bits32 arg);
870
871 /*!
872 * ======== getL2PrefetchControl ========
873 * Get current L2 prefetch control register contents
874 *
875 * Refer {@link http://infocenter.arm.com/help/topic/com.arm.doc.subset.primecell.system/index.html#pl310 ARM PL310 Cache Controller Reference Manual}
876 * for a description of the Prefetch Control Register.
877 */
878 Bits32 getL2PrefetchControl();
879
880 /*!
881 * ======== setL2PrefetchControl ========
882 * Set L2 prefetch control register
883 *
884 * Refer {@link http://infocenter.arm.com/help/topic/com.arm.doc.subset.primecell.system/index.html#pl310 ARM PL310 Cache Controller Reference Manual}
885 * for a description of the Prefetch Control Register.
886 */
887 Void setL2PrefetchControl(UInt32 regVal);
888
889 internal:
890
891 /*!
892 * ======= isOMAP4 ========
893 */
894 config Bool isOMAP4 = false;
895
896 /*!
897 * ======== l2CacheControllerAddress ========
898 * PL310 L2 cache controller Register base address
899 */
900 metaonly config Ptr l2CacheControllerAddress;
901
902 /*!
903 * ======== startup ========
904 * startup function to enable cache early during climb-up
905 */
906 Void startup();
907
908 /*!
909 * ======== disableL1d ========
910 * Disable L1 data cache
911 */
912 Void disableL1d();
913
914 /*!
915 * ======== disableL1p ========
916 * Disable L1 Program cache
917 *
918 * This function performs an invalidate all of L1 program cache
919 * after it disables the cache.
920 */
921 Void disableL1p();
922
923 /*!
924 * ======== disableL2 ========
925 * Disable L2 Unified Cache
926 */
927 Void disableL2();
928
929 /*!
930 * ======== disableWbInvL2 ========
931 * Disable, Write-Back and Invalidate L2 Unified Cache
932 *
933 * This function first cleans and invalidates the L2 cache and then
934 * disables it.
935 */
936 Void disableWbInvL2();
937
938 /*!
939 * ======== enableL1d ========
940 * Enable L1 data cache.
941 */
942 Void enableL1d();
943
944 /*!
945 * ======== enableL1p ========
946 * Enable L1 program cache.
947 *
948 * This function performs an invalidate all of L1 program cache
949 * before it enables the cache.
950 */
951 Void enableL1p();
952
953 /*!
954 * ======== enableL2 ========
955 * Enable L2 Unified Cache
956 */
957 Void enableL2();
958
959 /*!
960 * ======== initL2Sram ========
961 * Configure L2 as SRAM
962 */
963 Void initL2Sram();
964
965 /*!
966 * ======== sync ========
967 * Sync L2 cache operation
968 */
969 Void sync();
970
971 /*!
972 * ======== debugWriteL2 ========
973 * Write L2 debug register
974 */
975 Void debugWriteL2(UInt32 regVal);
976
977 /*!
978 * ======== invL1d ========
979 * Invalidates range in L1 data cache.
980 */
981 Void invL1d(Ptr blockPtr, SizeT byteCnt, Bool wait);
982
983 /*!
984 * ======== invL1p ========
985 * Invalidates range in L1 program cache.
986 */
987 Void invL1p(Ptr blockPtr, SizeT byteCnt, Bool wait);
988
989 /*!
990 * ======== invL2 ========
991 * Invalidates range in L2 unified cache.
992 */
993 Void invL2(Ptr blockPtr, SizeT byteCnt, Bool wait);
994
995 /*!
996 * ======== wbL1d ========
997 * Write back range in L1 data cache.
998 */
999 Void wbL1d(Ptr blockPtr, SizeT byteCnt, Bool wait);
1000
1001 /*!
1002 * ======== wbL2 ========
1003 * Write back range in L2 unified cache.
1004 */
1005 Void wbL2(Ptr blockPtr, SizeT byteCnt, Bool wait);
1006
1007 /*!
1008 * ======== wbInvL1d ========
1009 * Write back and invalidate range in L1 data cache.
1010 */
1011 Void wbInvL1d(Ptr blockPtr, SizeT byteCnt, Bool wait);
1012
1013 /*!
1014 * ======== wbInvL2 ========
1015 * Write back and invalidate range in L2 unified cache.
1016 */
1017 Void wbInvL2(Ptr blockPtr, SizeT byteCnt, Bool wait);
1018
1019 /*!
1020 * ======== invL1dAllInternal ========
1021 * Invalidate all of L1 data cache.
1022 *
1023 * This function does not save any registers to avoid data
1024 * accesses and cache line buffer fills. This function is
1025 * meant for internal use by other cache functions only.
1026 *
1027 * Corrupted Registers: r4, r5, r7, r9, r10 & r11
1028 *
1029 * Cache_invL1dAll() is a wrapper function that saves these
1030 * registers and should be called instead of this function.
1031 */
1032 Void invL1dAllInternal();
1033
1034 /*!
1035 * ======== wbL1dAll ========
1036 * Write back entire L1 data cache.
1037 */
1038 Void wbL1dAll();
1039
1040 /*!
1041 * ======== wbL2All ========
1042 * Write back entire L2 unified cache.
1043 */
1044 Void wbL2All();
1045
1046 /*!
1047 * ======== wbInvL1dAll ========
1048 * Write back and Invalidate entire L1 data cache.
1049 */
1050 Void wbInvL1dAll();
1051
1052 /*!
1053 * ======== wbInvL2All ========
1054 * Write back and Invalidate entire L2 unified cache.
1055 */
1056 Void wbInvL2All();
1057
1058 /*!
1059 * ======== getLockdownReg ========
1060 * Return current L2 Cache lockdown register value
1061 */
1062 Bits32 getLockdownReg();
1063
1064 /*!
1065 * ======== setLockdownReg ========
1066 * Set L2 Cache lockdown register
1067 */
1068 Void setLockdownReg(Bits32 wayMask);
1069
1070 /*!
1071 * ======== wayLoadLock ========
1072 * Lock a block of memory into the L2 cache way specified by 'wayNum'.
1073 */
1074 Void wayLoadLock(Ptr blockPtr, SizeT byteCnt, UInt wayNum);
1075
1076 /*!
1077 * ======== setL1Prefetch ========
1078 * Enable and disable L1 data prefetching
1079 *
1080 * Pass argument 0x1 to enable prefetching and 0x0 to disable.
1081 *
1082 * @a(NOTE)
1083 * This API is only supported on AM437X.
1084 */
1085 Void setL1Prefetch(UInt32 regVal);
1086
1087 /*!
1088 * ======== getCacheLevelInfo ========
1089 * reads and returns Cache Size Id Register of corresponding Cache level
1090 *
1091 * level values
1092 * 0 = L1D
1093 * 1 = L1P
1094 */
1095 Bits32 getCacheLevelInfo(UInt level);
1096
1097 /*!
1098 * ======== l2InterruptHandler ========
1099 * L2 cache interrupt handler function.
1100 */
1101 Void l2InterruptHandler(UArg arg);
1102
1103 struct Module_State {
1104 Bits32 l1dInfo;
1105 Bits32 l1pInfo;
1106 Bits32 l2Info;
1107 SizeT l2WaySize;
1108 UInt l2NumWays;
1109 UInt l2NumSets;
1110 Bits32 lockRegister;
1111 Hwi.Handle l2CacheHwi;
1112 UInt pl310RTLRelease;
1113 Bits32 l2ErrorStatus;
1114 Ptr baseAddresses[16];
1115 }
1116 }