ref: 4be5a05374d01cf615ce2a1d749e714e707eaf5b
parent: 9114cb02ef1f1745a735d9a6a8e8f995f96161e2
author: cinap_lenrek <cinap_lenrek@felloff.net>
date: Sun May 19 12:54:50 EDT 2019
bcm, bcm64: fix cache operations for dma and emmc always clean AND invalidate caches before dma read, never just invalidate as the buffer might not be aligned to cache lines... we have to invalidate caches again *AFTER* the dma read has completed. the processor can bring in data speculatively into the cache while the dma in in flight.
--- a/sys/src/9/bcm/dma.c
+++ b/sys/src/9/bcm/dma.c
@@ -170,7 +170,7 @@
ti = 0;
switch(dir){case DmaD2M:
- cachedinvse(dst, len);
+ cachedwbinvse(dst, len);
ti = Srcdreq | Destinc;
cb->sourcead = dmaioaddr(src);
cb->destad = dmaaddr(dst);
@@ -183,7 +183,7 @@
break;
case DmaM2M:
cachedwbse(src, len);
- cachedinvse(dst, len);
+ cachedwbinvse(dst, len);
ti = Srcinc | Destinc;
cb->sourcead = dmaaddr(src);
cb->destad = dmaaddr(dst);
--- a/sys/src/9/bcm/emmc.c
+++ b/sys/src/9/bcm/emmc.c
@@ -398,6 +398,8 @@
}
if(i)
WR(Interrupt, i);
+ if(!write)
+ cachedinvse(buf, len);
poperror();
okay(0);
}
--
⑨