1 //===-- tsan_interface_java.cc --------------------------------------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file is a part of ThreadSanitizer (TSan), a race detector. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "tsan_interface_java.h" 15 #include "tsan_rtl.h" 16 #include "tsan_mutex.h" 17 #include "sanitizer_common/sanitizer_internal_defs.h" 18 #include "sanitizer_common/sanitizer_common.h" 19 #include "sanitizer_common/sanitizer_placement_new.h" 20 21 using namespace __tsan; // NOLINT 22 23 namespace __tsan { 24 25 const uptr kHeapShadow = 0x300000000000ull; 26 const uptr kHeapAlignment = 8; 27 28 struct BlockDesc { 29 bool begin; 30 Mutex mtx; 31 SyncVar *head; 32 33 BlockDesc() 34 : mtx(MutexTypeJavaMBlock, StatMtxJavaMBlock) 35 , head() { 36 CHECK_EQ(begin, false); 37 begin = true; 38 } 39 40 ~BlockDesc() { 41 CHECK_EQ(begin, true); 42 begin = false; 43 ThreadState *thr = cur_thread(); 44 SyncVar *s = head; 45 while (s) { 46 SyncVar *s1 = s->next; 47 StatInc(thr, StatSyncDestroyed); 48 s->mtx.Lock(); 49 s->mtx.Unlock(); 50 thr->mset.Remove(s->GetId()); 51 DestroyAndFree(s); 52 s = s1; 53 } 54 } 55 }; 56 57 struct JavaContext { 58 const uptr heap_begin; 59 const uptr heap_size; 60 BlockDesc *heap_shadow; 61 62 JavaContext(jptr heap_begin, jptr heap_size) 63 : heap_begin(heap_begin) 64 , heap_size(heap_size) { 65 uptr size = heap_size / kHeapAlignment * sizeof(BlockDesc); 66 heap_shadow = (BlockDesc*)MmapFixedNoReserve(kHeapShadow, size); 67 if ((uptr)heap_shadow != kHeapShadow) { 68 Printf("ThreadSanitizer: failed to mmap Java heap shadow\n"); 69 Die(); 70 } 71 } 72 }; 73 74 class ScopedJavaFunc { 75 public: 76 ScopedJavaFunc(ThreadState *thr, uptr pc) 77 : thr_(thr) { 78 Initialize(thr_); 79 FuncEntry(thr, pc); 80 CHECK_EQ(thr_->in_rtl, 0); 81 thr_->in_rtl++; 82 } 83 84 ~ScopedJavaFunc() { 85 thr_->in_rtl--; 86 CHECK_EQ(thr_->in_rtl, 0); 87 FuncExit(thr_); 88 // FIXME(dvyukov): process pending signals. 89 } 90 91 private: 92 ThreadState *thr_; 93 }; 94 95 static u64 jctx_buf[sizeof(JavaContext) / sizeof(u64) + 1]; 96 static JavaContext *jctx; 97 98 static BlockDesc *getblock(uptr addr) { 99 uptr i = (addr - jctx->heap_begin) / kHeapAlignment; 100 return &jctx->heap_shadow[i]; 101 } 102 103 static uptr USED getmem(BlockDesc *b) { 104 uptr i = b - jctx->heap_shadow; 105 uptr p = jctx->heap_begin + i * kHeapAlignment; 106 CHECK_GE(p, jctx->heap_begin); 107 CHECK_LT(p, jctx->heap_begin + jctx->heap_size); 108 return p; 109 } 110 111 static BlockDesc *getblockbegin(uptr addr) { 112 for (BlockDesc *b = getblock(addr);; b--) { 113 CHECK_GE(b, jctx->heap_shadow); 114 if (b->begin) 115 return b; 116 } 117 return 0; 118 } 119 120 SyncVar* GetJavaSync(ThreadState *thr, uptr pc, uptr addr, 121 bool write_lock, bool create) { 122 if (jctx == 0 || addr < jctx->heap_begin 123 || addr >= jctx->heap_begin + jctx->heap_size) 124 return 0; 125 BlockDesc *b = getblockbegin(addr); 126 DPrintf("#%d: GetJavaSync %p->%p\n", thr->tid, addr, b); 127 Lock l(&b->mtx); 128 SyncVar *s = b->head; 129 for (; s; s = s->next) { 130 if (s->addr == addr) { 131 DPrintf("#%d: found existing sync for %p\n", thr->tid, addr); 132 break; 133 } 134 } 135 if (s == 0 && create) { 136 DPrintf("#%d: creating new sync for %p\n", thr->tid, addr); 137 s = CTX()->synctab.Create(thr, pc, addr); 138 s->next = b->head; 139 b->head = s; 140 } 141 if (s) { 142 if (write_lock) 143 s->mtx.Lock(); 144 else 145 s->mtx.ReadLock(); 146 } 147 return s; 148 } 149 150 SyncVar* GetAndRemoveJavaSync(ThreadState *thr, uptr pc, uptr addr) { 151 // We do not destroy Java mutexes other than in __tsan_java_free(). 152 return 0; 153 } 154 155 } // namespace __tsan 156 157 #define SCOPED_JAVA_FUNC(func) \ 158 ThreadState *thr = cur_thread(); \ 159 const uptr caller_pc = GET_CALLER_PC(); \ 160 const uptr pc = (uptr)&func; \ 161 (void)pc; \ 162 ScopedJavaFunc scoped(thr, caller_pc); \ 163 /**/ 164 165 void __tsan_java_init(jptr heap_begin, jptr heap_size) { 166 SCOPED_JAVA_FUNC(__tsan_java_init); 167 DPrintf("#%d: java_init(%p, %p)\n", thr->tid, heap_begin, heap_size); 168 CHECK_EQ(jctx, 0); 169 CHECK_GT(heap_begin, 0); 170 CHECK_GT(heap_size, 0); 171 CHECK_EQ(heap_begin % kHeapAlignment, 0); 172 CHECK_EQ(heap_size % kHeapAlignment, 0); 173 CHECK_LT(heap_begin, heap_begin + heap_size); 174 jctx = new(jctx_buf) JavaContext(heap_begin, heap_size); 175 } 176 177 int __tsan_java_fini() { 178 SCOPED_JAVA_FUNC(__tsan_java_fini); 179 DPrintf("#%d: java_fini()\n", thr->tid); 180 CHECK_NE(jctx, 0); 181 // FIXME(dvyukov): this does not call atexit() callbacks. 182 int status = Finalize(thr); 183 DPrintf("#%d: java_fini() = %d\n", thr->tid, status); 184 return status; 185 } 186 187 void __tsan_java_alloc(jptr ptr, jptr size) { 188 SCOPED_JAVA_FUNC(__tsan_java_alloc); 189 DPrintf("#%d: java_alloc(%p, %p)\n", thr->tid, ptr, size); 190 CHECK_NE(jctx, 0); 191 CHECK_NE(size, 0); 192 CHECK_EQ(ptr % kHeapAlignment, 0); 193 CHECK_EQ(size % kHeapAlignment, 0); 194 CHECK_GE(ptr, jctx->heap_begin); 195 CHECK_LE(ptr + size, jctx->heap_begin + jctx->heap_size); 196 197 BlockDesc *b = getblock(ptr); 198 new(b) BlockDesc(); 199 } 200 201 void __tsan_java_free(jptr ptr, jptr size) { 202 SCOPED_JAVA_FUNC(__tsan_java_free); 203 DPrintf("#%d: java_free(%p, %p)\n", thr->tid, ptr, size); 204 CHECK_NE(jctx, 0); 205 CHECK_NE(size, 0); 206 CHECK_EQ(ptr % kHeapAlignment, 0); 207 CHECK_EQ(size % kHeapAlignment, 0); 208 CHECK_GE(ptr, jctx->heap_begin); 209 CHECK_LE(ptr + size, jctx->heap_begin + jctx->heap_size); 210 211 BlockDesc *beg = getblock(ptr); 212 BlockDesc *end = getblock(ptr + size); 213 for (BlockDesc *b = beg; b != end; b++) { 214 if (b->begin) 215 b->~BlockDesc(); 216 } 217 } 218 219 void __tsan_java_move(jptr src, jptr dst, jptr size) { 220 SCOPED_JAVA_FUNC(__tsan_java_move); 221 DPrintf("#%d: java_move(%p, %p, %p)\n", thr->tid, src, dst, size); 222 CHECK_NE(jctx, 0); 223 CHECK_NE(size, 0); 224 CHECK_EQ(src % kHeapAlignment, 0); 225 CHECK_EQ(dst % kHeapAlignment, 0); 226 CHECK_EQ(size % kHeapAlignment, 0); 227 CHECK_GE(src, jctx->heap_begin); 228 CHECK_LE(src + size, jctx->heap_begin + jctx->heap_size); 229 CHECK_GE(dst, jctx->heap_begin); 230 CHECK_LE(dst + size, jctx->heap_begin + jctx->heap_size); 231 CHECK(dst >= src + size || src >= dst + size); 232 233 // Assuming it's not running concurrently with threads that do 234 // memory accesses and mutex operations (stop-the-world phase). 235 { // NOLINT 236 BlockDesc *s = getblock(src); 237 BlockDesc *d = getblock(dst); 238 BlockDesc *send = getblock(src + size); 239 for (; s != send; s++, d++) { 240 CHECK_EQ(d->begin, false); 241 if (s->begin) { 242 DPrintf("#%d: moving block %p->%p\n", thr->tid, getmem(s), getmem(d)); 243 new(d) BlockDesc; 244 d->head = s->head; 245 for (SyncVar *sync = d->head; sync; sync = sync->next) { 246 uptr newaddr = sync->addr - src + dst; 247 DPrintf("#%d: moving sync %p->%p\n", thr->tid, sync->addr, newaddr); 248 sync->addr = newaddr; 249 } 250 s->head = 0; 251 s->~BlockDesc(); 252 } 253 } 254 } 255 256 { // NOLINT 257 u64 *s = (u64*)MemToShadow(src); 258 u64 *d = (u64*)MemToShadow(dst); 259 u64 *send = (u64*)MemToShadow(src + size); 260 for (; s != send; s++, d++) { 261 *d = *s; 262 *s = 0; 263 } 264 } 265 } 266 267 void __tsan_java_mutex_lock(jptr addr) { 268 SCOPED_JAVA_FUNC(__tsan_java_mutex_lock); 269 DPrintf("#%d: java_mutex_lock(%p)\n", thr->tid, addr); 270 CHECK_NE(jctx, 0); 271 CHECK_GE(addr, jctx->heap_begin); 272 CHECK_LT(addr, jctx->heap_begin + jctx->heap_size); 273 274 MutexLock(thr, pc, addr); 275 } 276 277 void __tsan_java_mutex_unlock(jptr addr) { 278 SCOPED_JAVA_FUNC(__tsan_java_mutex_unlock); 279 DPrintf("#%d: java_mutex_unlock(%p)\n", thr->tid, addr); 280 CHECK_NE(jctx, 0); 281 CHECK_GE(addr, jctx->heap_begin); 282 CHECK_LT(addr, jctx->heap_begin + jctx->heap_size); 283 284 MutexUnlock(thr, pc, addr); 285 } 286 287 void __tsan_java_mutex_read_lock(jptr addr) { 288 SCOPED_JAVA_FUNC(__tsan_java_mutex_read_lock); 289 DPrintf("#%d: java_mutex_read_lock(%p)\n", thr->tid, addr); 290 CHECK_NE(jctx, 0); 291 CHECK_GE(addr, jctx->heap_begin); 292 CHECK_LT(addr, jctx->heap_begin + jctx->heap_size); 293 294 MutexReadLock(thr, pc, addr); 295 } 296 297 void __tsan_java_mutex_read_unlock(jptr addr) { 298 SCOPED_JAVA_FUNC(__tsan_java_mutex_read_unlock); 299 DPrintf("#%d: java_mutex_read_unlock(%p)\n", thr->tid, addr); 300 CHECK_NE(jctx, 0); 301 CHECK_GE(addr, jctx->heap_begin); 302 CHECK_LT(addr, jctx->heap_begin + jctx->heap_size); 303 304 MutexReadUnlock(thr, pc, addr); 305 } 306