Home | History | Annotate | Download | only in rtl

Lines Matching full:u64

51   u64 mtx : 1;  // must be first
52 u64 lst : 44;
53 u64 stk : 31; // on word boundary
54 u64 tid : kTidBits;
55 u64 siz : 128 - 1 - 31 - 44 - kTidBits; // 39
57 u64 raw[2];
61 raw[1] |= (u64)siz << ((1 + 44 + 31 + kTidBits) % 64);
62 raw[1] |= (u64)tid << ((1 + 44 + 31) % 64);
63 raw[0] |= (u64)stk << (1 + 44);
64 raw[1] |= (u64)stk >> (64 - 44 - 1);
89 u64 x = (u64)v ^ (u64)lst;
99 u64 x = (u64)lst ^ (u64)nxt;
108 u64 x = (u64)lst;
138 u64 v1, u64 v2);
148 FastState(u64 tid, u64 epoch) {
156 explicit FastState(u64 x)
160 u64 raw() const {
164 u64 tid() const {
165 u64 res = (x_ & ~kIgnoreBit) >> kTidShift;
169 u64 TidWithIgnore() const {
170 u64 res = x_ >> kTidShift;
174 u64 epoch() const {
175 u64 res = (x_ << (kTidBits + 1)) >> (64 - kClkBits);
180 u64 old_epoch = epoch();
204 u64 GetTracePos() const {
207 const u64 mask = (1ull << (kTracePartSizeBits + hs + 1)) - 1;
215 static const u64 kIgnoreBit = 1ull << 63;
216 static const u64 kFreedBit = 1ull << 63;
217 u64 x_;
230 explicit Shadow(u64 x)
239 void SetAddr0AndSizeLog(u64 addr0, unsigned kAccessSizeLog) {
271 u64 shifted_xor = (s1.x_ ^ s2.x_) >> kTidShift;
277 u64 masked_xor = (s1.x_ ^ s2.x_) & 31;
284 u64 diff = s1.addr0() - s2.addr0();
313 u64 addr0() const { return x_ & 7; }
314 u64 size() const { return 1ull << size_log(); }
343 bool v = x_ & u64(((kIsWrite ^ 1) << kReadShift)
351 <= u64((kIsWrite ^ 1) | (kIsAtomic << 1));
359 >= u64((kIsWrite ^ 1) | (kIsAtomic << 1));
366 static const u64 kReadShift = 5;
367 static const u64 kReadBit = 1ull << kReadShift;
368 static const u64 kAtomicShift = 6;
369 static const u64 kAtomicBit = 1ull << kAtomicShift;
371 u64 size_log() const { return (x_ >> 3) & 3; }
399 u64 fast_synch_epoch;
405 u64 *racy_shadow_addr;
406 u64 racy_state[2];
421 u64 stat[StatCnt];
447 explicit ThreadState(Context *ctx, int tid, int unique_id, u64 epoch,
480 u64 epoch0;
481 u64 epoch1;
533 u64 stat[StatCnt];
534 u64 int_alloc_cnt[MBlockTypeCount];
535 u64 int_alloc_siz[MBlockTypeCount];
567 void AddMutex(u64 id);
573 void RestoreStack(int tid, const u64 epoch, StackTrace *stk, MutexSet *mset);
575 void StatAggregate(u64 *dst, u64 *src);
576 void StatOutput(u64 *stat);
577 void ALWAYS_INLINE INLINE StatInc(ThreadState *thr, StatType typ, u64 n = 1) {
581 void ALWAYS_INLINE INLINE StatSet(ThreadState *thr, StatType typ, u64 n) {
632 u64 *shadow_mem, Shadow cur);
725 EventType typ, u64 addr) {
730 u64 pos = fs.GetTracePos();
740 Event ev = (u64)addr | ((u64)typ << 61);