Bug Summary

File:deps/jemalloc/include/jemalloc/internal/prof_inlines_b.h
Warning:line 196, column 15
Value stored to 'tctx' during its initialization is never read

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -triple x86_64-pc-linux-gnu -analyze -disable-free -disable-llvm-verifier -discard-value-names -main-file-name nstime.c -analyzer-store=region -analyzer-opt-analyze-nested-blocks -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -mrelocation-model static -mthread-model posix -mframe-pointer=none -fmath-errno -fno-rounding-math -masm-verbose -mconstructor-aliases -munwind-tables -target-cpu x86-64 -dwarf-column-info -fno-split-dwarf-inlining -debugger-tuning=gdb -resource-dir /usr/lib/llvm-10/lib/clang/10.0.0 -D _GNU_SOURCE -D _REENTRANT -I include -I include -D JEMALLOC_NO_PRIVATE_NAMESPACE -internal-isystem /usr/local/include -internal-isystem /usr/lib/llvm-10/lib/clang/10.0.0/include -internal-externc-isystem /usr/include/x86_64-linux-gnu -internal-externc-isystem /include -internal-externc-isystem /usr/include -O3 -std=gnu99 -fdebug-compilation-dir /home/netto/Desktop/redis-6.2.1/deps/jemalloc -ferror-limit 19 -fmessage-length 0 -funroll-loops -fgnuc-version=4.2.1 -fobjc-runtime=gcc -fdiagnostics-show-option -vectorize-loops -vectorize-slp -analyzer-output=html -faddrsig -o /tmp/scan-build-2021-03-14-133648-8817-1 -x c src/nstime.c
1#ifndef JEMALLOC_INTERNAL_PROF_INLINES_B_H
2#define JEMALLOC_INTERNAL_PROF_INLINES_B_H
3
4#include "jemalloc/internal/sz.h"
5
6JEMALLOC_ALWAYS_INLINE__attribute__((always_inline)) static inline bool_Bool
7prof_gdump_get_unlocked(void) {
8 /*
9 * No locking is used when reading prof_gdump_val in the fast path, so
10 * there are no guarantees regarding how long it will take for all
11 * threads to notice state changes.
12 */
13 return prof_gdump_val;
14}
15
16JEMALLOC_ALWAYS_INLINE__attribute__((always_inline)) static inline prof_tdata_t *
17prof_tdata_get(tsd_t *tsd, bool_Bool create) {
18 prof_tdata_t *tdata;
19
20 cassert(config_prof)do { if (__builtin_expect(!!(!(config_prof)), 0)) { do { if (
config_debug) { malloc_printf( "<jemalloc>: %s:%d: Unreachable code reached\n"
, "include/jemalloc/internal/prof_inlines_b.h", 20); abort();
} __builtin_unreachable(); } while (0); } } while (0)
;
21
22 tdata = tsd_prof_tdata_get(tsd);
23 if (create) {
24 if (unlikely(tdata == NULL)__builtin_expect(!!(tdata == ((void*)0)), 0)) {
25 if (tsd_nominal(tsd)) {
26 tdata = prof_tdata_init(tsd);
27 tsd_prof_tdata_set(tsd, tdata);
28 }
29 } else if (unlikely(tdata->expired)__builtin_expect(!!(tdata->expired), 0)) {
30 tdata = prof_tdata_reinit(tsd, tdata);
31 tsd_prof_tdata_set(tsd, tdata);
32 }
33 assert(tdata == NULL || tdata->attached)do { if (__builtin_expect(!!(config_debug && !(tdata ==
((void*)0) || tdata->attached)), 0)) { malloc_printf( "<jemalloc>: %s:%d: Failed assertion: \"%s\"\n"
, "include/jemalloc/internal/prof_inlines_b.h", 33, "tdata == NULL || tdata->attached"
); abort(); } } while (0)
;
34 }
35
36 return tdata;
37}
38
39JEMALLOC_ALWAYS_INLINE__attribute__((always_inline)) static inline prof_tctx_t *
40prof_tctx_get(tsdn_t *tsdn, const void *ptr, alloc_ctx_t *alloc_ctx) {
41 cassert(config_prof)do { if (__builtin_expect(!!(!(config_prof)), 0)) { do { if (
config_debug) { malloc_printf( "<jemalloc>: %s:%d: Unreachable code reached\n"
, "include/jemalloc/internal/prof_inlines_b.h", 41); abort();
} __builtin_unreachable(); } while (0); } } while (0)
;
42 assert(ptr != NULL)do { if (__builtin_expect(!!(config_debug && !(ptr !=
((void*)0))), 0)) { malloc_printf( "<jemalloc>: %s:%d: Failed assertion: \"%s\"\n"
, "include/jemalloc/internal/prof_inlines_b.h", 42, "ptr != NULL"
); abort(); } } while (0)
;
43
44 return arena_prof_tctx_get(tsdn, ptr, alloc_ctx);
45}
46
47JEMALLOC_ALWAYS_INLINE__attribute__((always_inline)) static inline void
48prof_tctx_set(tsdn_t *tsdn, const void *ptr, size_t usize,
49 alloc_ctx_t *alloc_ctx, prof_tctx_t *tctx) {
50 cassert(config_prof)do { if (__builtin_expect(!!(!(config_prof)), 0)) { do { if (
config_debug) { malloc_printf( "<jemalloc>: %s:%d: Unreachable code reached\n"
, "include/jemalloc/internal/prof_inlines_b.h", 50); abort();
} __builtin_unreachable(); } while (0); } } while (0)
;
51 assert(ptr != NULL)do { if (__builtin_expect(!!(config_debug && !(ptr !=
((void*)0))), 0)) { malloc_printf( "<jemalloc>: %s:%d: Failed assertion: \"%s\"\n"
, "include/jemalloc/internal/prof_inlines_b.h", 51, "ptr != NULL"
); abort(); } } while (0)
;
52
53 arena_prof_tctx_set(tsdn, ptr, usize, alloc_ctx, tctx);
54}
55
56JEMALLOC_ALWAYS_INLINE__attribute__((always_inline)) static inline void
57prof_tctx_reset(tsdn_t *tsdn, const void *ptr, prof_tctx_t *tctx) {
58 cassert(config_prof)do { if (__builtin_expect(!!(!(config_prof)), 0)) { do { if (
config_debug) { malloc_printf( "<jemalloc>: %s:%d: Unreachable code reached\n"
, "include/jemalloc/internal/prof_inlines_b.h", 58); abort();
} __builtin_unreachable(); } while (0); } } while (0)
;
59 assert(ptr != NULL)do { if (__builtin_expect(!!(config_debug && !(ptr !=
((void*)0))), 0)) { malloc_printf( "<jemalloc>: %s:%d: Failed assertion: \"%s\"\n"
, "include/jemalloc/internal/prof_inlines_b.h", 59, "ptr != NULL"
); abort(); } } while (0)
;
60
61 arena_prof_tctx_reset(tsdn, ptr, tctx);
62}
63
64JEMALLOC_ALWAYS_INLINE__attribute__((always_inline)) static inline bool_Bool
65prof_sample_accum_update(tsd_t *tsd, size_t usize, bool_Bool update,
66 prof_tdata_t **tdata_out) {
67 prof_tdata_t *tdata;
68
69 cassert(config_prof)do { if (__builtin_expect(!!(!(config_prof)), 0)) { do { if (
config_debug) { malloc_printf( "<jemalloc>: %s:%d: Unreachable code reached\n"
, "include/jemalloc/internal/prof_inlines_b.h", 69); abort();
} __builtin_unreachable(); } while (0); } } while (0)
;
70
71 tdata = prof_tdata_get(tsd, true1);
72 if (unlikely((uintptr_t)tdata <= (uintptr_t)PROF_TDATA_STATE_MAX)__builtin_expect(!!((uintptr_t)tdata <= (uintptr_t)((prof_tdata_t
*)(uintptr_t)2)), 0)
) {
73 tdata = NULL((void*)0);
74 }
75
76 if (tdata_out != NULL((void*)0)) {
77 *tdata_out = tdata;
78 }
79
80 if (unlikely(tdata == NULL)__builtin_expect(!!(tdata == ((void*)0)), 0)) {
81 return true1;
82 }
83
84 if (likely(tdata->bytes_until_sample >= usize)__builtin_expect(!!(tdata->bytes_until_sample >= usize)
, 1)
) {
85 if (update) {
86 tdata->bytes_until_sample -= usize;
87 }
88 return true1;
89 } else {
90 if (tsd_reentrancy_level_get(tsd) > 0) {
91 return true1;
92 }
93 /* Compute new sample threshold. */
94 if (update) {
95 prof_sample_threshold_update(tdata);
96 }
97 return !tdata->active;
98 }
99}
100
101JEMALLOC_ALWAYS_INLINE__attribute__((always_inline)) static inline prof_tctx_t *
102prof_alloc_prep(tsd_t *tsd, size_t usize, bool_Bool prof_active, bool_Bool update) {
103 prof_tctx_t *ret;
104 prof_tdata_t *tdata;
105 prof_bt_t bt;
106
107 assert(usize == sz_s2u(usize))do { if (__builtin_expect(!!(config_debug && !(usize ==
sz_s2u(usize))), 0)) { malloc_printf( "<jemalloc>: %s:%d: Failed assertion: \"%s\"\n"
, "include/jemalloc/internal/prof_inlines_b.h", 107, "usize == sz_s2u(usize)"
); abort(); } } while (0)
;
108
109 if (!prof_active || likely(prof_sample_accum_update(tsd, usize, update,__builtin_expect(!!(prof_sample_accum_update(tsd, usize, update
, &tdata)), 1)
110 &tdata))__builtin_expect(!!(prof_sample_accum_update(tsd, usize, update
, &tdata)), 1)
) {
111 ret = (prof_tctx_t *)(uintptr_t)1U;
112 } else {
113 bt_init(&bt, tdata->vec);
114 prof_backtrace(&bt);
115 ret = prof_lookup(tsd, &bt);
116 }
117
118 return ret;
119}
120
121JEMALLOC_ALWAYS_INLINE__attribute__((always_inline)) static inline void
122prof_malloc(tsdn_t *tsdn, const void *ptr, size_t usize, alloc_ctx_t *alloc_ctx,
123 prof_tctx_t *tctx) {
124 cassert(config_prof)do { if (__builtin_expect(!!(!(config_prof)), 0)) { do { if (
config_debug) { malloc_printf( "<jemalloc>: %s:%d: Unreachable code reached\n"
, "include/jemalloc/internal/prof_inlines_b.h", 124); abort()
; } __builtin_unreachable(); } while (0); } } while (0)
;
125 assert(ptr != NULL)do { if (__builtin_expect(!!(config_debug && !(ptr !=
((void*)0))), 0)) { malloc_printf( "<jemalloc>: %s:%d: Failed assertion: \"%s\"\n"
, "include/jemalloc/internal/prof_inlines_b.h", 125, "ptr != NULL"
); abort(); } } while (0)
;
126 assert(usize == isalloc(tsdn, ptr))do { if (__builtin_expect(!!(config_debug && !(usize ==
isalloc(tsdn, ptr))), 0)) { malloc_printf( "<jemalloc>: %s:%d: Failed assertion: \"%s\"\n"
, "include/jemalloc/internal/prof_inlines_b.h", 126, "usize == isalloc(tsdn, ptr)"
); abort(); } } while (0)
;
127
128 if (unlikely((uintptr_t)tctx > (uintptr_t)1U)__builtin_expect(!!((uintptr_t)tctx > (uintptr_t)1U), 0)) {
129 prof_malloc_sample_object(tsdn, ptr, usize, tctx);
130 } else {
131 prof_tctx_set(tsdn, ptr, usize, alloc_ctx,
132 (prof_tctx_t *)(uintptr_t)1U);
133 }
134}
135
136JEMALLOC_ALWAYS_INLINE__attribute__((always_inline)) static inline void
137prof_realloc(tsd_t *tsd, const void *ptr, size_t usize, prof_tctx_t *tctx,
138 bool_Bool prof_active, bool_Bool updated, const void *old_ptr, size_t old_usize,
139 prof_tctx_t *old_tctx) {
140 bool_Bool sampled, old_sampled, moved;
141
142 cassert(config_prof)do { if (__builtin_expect(!!(!(config_prof)), 0)) { do { if (
config_debug) { malloc_printf( "<jemalloc>: %s:%d: Unreachable code reached\n"
, "include/jemalloc/internal/prof_inlines_b.h", 142); abort()
; } __builtin_unreachable(); } while (0); } } while (0)
;
143 assert(ptr != NULL || (uintptr_t)tctx <= (uintptr_t)1U)do { if (__builtin_expect(!!(config_debug && !(ptr !=
((void*)0) || (uintptr_t)tctx <= (uintptr_t)1U)), 0)) { malloc_printf
( "<jemalloc>: %s:%d: Failed assertion: \"%s\"\n", "include/jemalloc/internal/prof_inlines_b.h"
, 143, "ptr != NULL || (uintptr_t)tctx <= (uintptr_t)1U");
abort(); } } while (0)
;
144
145 if (prof_active && !updated && ptr != NULL((void*)0)) {
146 assert(usize == isalloc(tsd_tsdn(tsd), ptr))do { if (__builtin_expect(!!(config_debug && !(usize ==
isalloc(tsd_tsdn(tsd), ptr))), 0)) { malloc_printf( "<jemalloc>: %s:%d: Failed assertion: \"%s\"\n"
, "include/jemalloc/internal/prof_inlines_b.h", 146, "usize == isalloc(tsd_tsdn(tsd), ptr)"
); abort(); } } while (0)
;
147 if (prof_sample_accum_update(tsd, usize, true1, NULL((void*)0))) {
148 /*
149 * Don't sample. The usize passed to prof_alloc_prep()
150 * was larger than what actually got allocated, so a
151 * backtrace was captured for this allocation, even
152 * though its actual usize was insufficient to cross the
153 * sample threshold.
154 */
155 prof_alloc_rollback(tsd, tctx, true1);
156 tctx = (prof_tctx_t *)(uintptr_t)1U;
157 }
158 }
159
160 sampled = ((uintptr_t)tctx > (uintptr_t)1U);
161 old_sampled = ((uintptr_t)old_tctx > (uintptr_t)1U);
162 moved = (ptr != old_ptr);
163
164 if (unlikely(sampled)__builtin_expect(!!(sampled), 0)) {
165 prof_malloc_sample_object(tsd_tsdn(tsd), ptr, usize, tctx);
166 } else if (moved) {
167 prof_tctx_set(tsd_tsdn(tsd), ptr, usize, NULL((void*)0),
168 (prof_tctx_t *)(uintptr_t)1U);
169 } else if (unlikely(old_sampled)__builtin_expect(!!(old_sampled), 0)) {
170 /*
171 * prof_tctx_set() would work for the !moved case as well, but
172 * prof_tctx_reset() is slightly cheaper, and the proper thing
173 * to do here in the presence of explicit knowledge re: moved
174 * state.
175 */
176 prof_tctx_reset(tsd_tsdn(tsd), ptr, tctx);
177 } else {
178 assert((uintptr_t)prof_tctx_get(tsd_tsdn(tsd), ptr, NULL) ==do { if (__builtin_expect(!!(config_debug && !((uintptr_t
)prof_tctx_get(tsd_tsdn(tsd), ptr, ((void*)0)) == (uintptr_t)
1U)), 0)) { malloc_printf( "<jemalloc>: %s:%d: Failed assertion: \"%s\"\n"
, "include/jemalloc/internal/prof_inlines_b.h", 179, "(uintptr_t)prof_tctx_get(tsd_tsdn(tsd), ptr, NULL) == (uintptr_t)1U"
); abort(); } } while (0)
179 (uintptr_t)1U)do { if (__builtin_expect(!!(config_debug && !((uintptr_t
)prof_tctx_get(tsd_tsdn(tsd), ptr, ((void*)0)) == (uintptr_t)
1U)), 0)) { malloc_printf( "<jemalloc>: %s:%d: Failed assertion: \"%s\"\n"
, "include/jemalloc/internal/prof_inlines_b.h", 179, "(uintptr_t)prof_tctx_get(tsd_tsdn(tsd), ptr, NULL) == (uintptr_t)1U"
); abort(); } } while (0)
;
180 }
181
182 /*
183 * The prof_free_sampled_object() call must come after the
184 * prof_malloc_sample_object() call, because tctx and old_tctx may be
185 * the same, in which case reversing the call order could cause the tctx
186 * to be prematurely destroyed as a side effect of momentarily zeroed
187 * counters.
188 */
189 if (unlikely(old_sampled)__builtin_expect(!!(old_sampled), 0)) {
190 prof_free_sampled_object(tsd, old_usize, old_tctx);
191 }
192}
193
194JEMALLOC_ALWAYS_INLINE__attribute__((always_inline)) static inline void
195prof_free(tsd_t *tsd, const void *ptr, size_t usize, alloc_ctx_t *alloc_ctx) {
196 prof_tctx_t *tctx = prof_tctx_get(tsd_tsdn(tsd), ptr, alloc_ctx);
Value stored to 'tctx' during its initialization is never read
197
198 cassert(config_prof)do { if (__builtin_expect(!!(!(config_prof)), 0)) { do { if (
config_debug) { malloc_printf( "<jemalloc>: %s:%d: Unreachable code reached\n"
, "include/jemalloc/internal/prof_inlines_b.h", 198); abort()
; } __builtin_unreachable(); } while (0); } } while (0)
;
199 assert(usize == isalloc(tsd_tsdn(tsd), ptr))do { if (__builtin_expect(!!(config_debug && !(usize ==
isalloc(tsd_tsdn(tsd), ptr))), 0)) { malloc_printf( "<jemalloc>: %s:%d: Failed assertion: \"%s\"\n"
, "include/jemalloc/internal/prof_inlines_b.h", 199, "usize == isalloc(tsd_tsdn(tsd), ptr)"
); abort(); } } while (0)
;
200
201 if (unlikely((uintptr_t)tctx > (uintptr_t)1U)__builtin_expect(!!((uintptr_t)tctx > (uintptr_t)1U), 0)) {
202 prof_free_sampled_object(tsd, usize, tctx);
203 }
204}
205
206#endif /* JEMALLOC_INTERNAL_PROF_INLINES_B_H */