ref: c881e33e8ed668f4d5a2102c113e3edb1ea80d44
parent: d667607c26626219d8dfc1871f0a4d8ef92c240e
author: cinap_lenrek <[email protected]>
date: Sun May 19 12:54:50 EDT 2019
bcm, bcm64: fix cache operations for dma and emmc always clean AND invalidate caches before dma read, never just invalidate as the buffer might not be aligned to cache lines... we have to invalidate caches again *AFTER* the dma read has completed. the processor can bring in data speculatively into the cache while the dma in in flight.
--- a/sys/src/9/bcm/dma.c
+++ b/sys/src/9/bcm/dma.c
@@ -170,7 +170,7 @@
ti = 0;
switch(dir){
case DmaD2M:
- cachedinvse(dst, len);
+ cachedwbinvse(dst, len);
ti = Srcdreq | Destinc;
cb->sourcead = dmaioaddr(src);
cb->destad = dmaaddr(dst);
@@ -183,7 +183,7 @@
break;
case DmaM2M:
cachedwbse(src, len);
- cachedinvse(dst, len);
+ cachedwbinvse(dst, len);
ti = Srcinc | Destinc;
cb->sourcead = dmaaddr(src);
cb->destad = dmaaddr(dst);
--- a/sys/src/9/bcm/emmc.c
+++ b/sys/src/9/bcm/emmc.c
@@ -398,6 +398,8 @@
}
if(i)
WR(Interrupt, i);
+ if(!write)
+ cachedinvse(buf, len);
poperror();
okay(0);
}