@@ -90,7 +90,7 @@ struct _PyTraceMalloc_Config _Py_tracemalloc_config = _PyTraceMalloc_Config_INIT
90
90
91
91
92
92
static void *
93
- _PyMem_RawMalloc (void * ctx , size_t size )
93
+ _PyMem_RawMalloc (void * Py_UNUSED ( ctx ) , size_t size )
94
94
{
95
95
/* PyMem_RawMalloc(0) means malloc(1). Some systems would return NULL
96
96
for malloc(0), which would be treated as an error. Some platforms would
@@ -102,7 +102,7 @@ _PyMem_RawMalloc(void *ctx, size_t size)
102
102
}
103
103
104
104
static void *
105
- _PyMem_RawCalloc (void * ctx , size_t nelem , size_t elsize )
105
+ _PyMem_RawCalloc (void * Py_UNUSED ( ctx ) , size_t nelem , size_t elsize )
106
106
{
107
107
/* PyMem_RawCalloc(0, 0) means calloc(1, 1). Some systems would return NULL
108
108
for calloc(0, 0), which would be treated as an error. Some platforms
@@ -116,37 +116,38 @@ _PyMem_RawCalloc(void *ctx, size_t nelem, size_t elsize)
116
116
}
117
117
118
118
static void *
119
- _PyMem_RawRealloc (void * ctx , void * ptr , size_t size )
119
+ _PyMem_RawRealloc (void * Py_UNUSED ( ctx ) , void * ptr , size_t size )
120
120
{
121
121
if (size == 0 )
122
122
size = 1 ;
123
123
return realloc (ptr , size );
124
124
}
125
125
126
126
static void
127
- _PyMem_RawFree (void * ctx , void * ptr )
127
+ _PyMem_RawFree (void * Py_UNUSED ( ctx ) , void * ptr )
128
128
{
129
129
free (ptr );
130
130
}
131
131
132
132
133
133
#ifdef MS_WINDOWS
134
134
static void *
135
- _PyObject_ArenaVirtualAlloc (void * ctx , size_t size )
135
+ _PyObject_ArenaVirtualAlloc (void * Py_UNUSED ( ctx ) , size_t size )
136
136
{
137
137
return VirtualAlloc (NULL , size ,
138
138
MEM_COMMIT | MEM_RESERVE , PAGE_READWRITE );
139
139
}
140
140
141
141
static void
142
- _PyObject_ArenaVirtualFree (void * ctx , void * ptr , size_t size )
142
+ _PyObject_ArenaVirtualFree (void * Py_UNUSED (ctx ), void * ptr ,
143
+ size_t Py_UNUSED (size ))
143
144
{
144
145
VirtualFree (ptr , 0 , MEM_RELEASE );
145
146
}
146
147
147
148
#elif defined(ARENAS_USE_MMAP )
148
149
static void *
149
- _PyObject_ArenaMmap (void * ctx , size_t size )
150
+ _PyObject_ArenaMmap (void * Py_UNUSED ( ctx ) , size_t size )
150
151
{
151
152
void * ptr ;
152
153
ptr = mmap (NULL , size , PROT_READ |PROT_WRITE ,
@@ -158,20 +159,20 @@ _PyObject_ArenaMmap(void *ctx, size_t size)
158
159
}
159
160
160
161
static void
161
- _PyObject_ArenaMunmap (void * ctx , void * ptr , size_t size )
162
+ _PyObject_ArenaMunmap (void * Py_UNUSED ( ctx ) , void * ptr , size_t size )
162
163
{
163
164
munmap (ptr , size );
164
165
}
165
166
166
167
#else
167
168
static void *
168
- _PyObject_ArenaMalloc (void * ctx , size_t size )
169
+ _PyObject_ArenaMalloc (void * Py_UNUSED ( ctx ) , size_t size )
169
170
{
170
171
return malloc (size );
171
172
}
172
173
173
174
static void
174
- _PyObject_ArenaFree (void * ctx , void * ptr , size_t size )
175
+ _PyObject_ArenaFree (void * Py_UNUSED ( ctx ) , void * ptr , size_t Py_UNUSED ( size ) )
175
176
{
176
177
free (ptr );
177
178
}
@@ -1684,7 +1685,7 @@ new_arena(void)
1684
1685
pymalloc. When the radix tree is used, 'poolp' is unused.
1685
1686
*/
1686
1687
static bool
1687
- address_in_range (void * p , poolp pool )
1688
+ address_in_range (void * p , poolp Py_UNUSED ( pool ) )
1688
1689
{
1689
1690
return arena_map_is_used (p );
1690
1691
}
@@ -1945,7 +1946,7 @@ allocate_from_new_pool(uint size)
1945
1946
or when the max memory limit has been reached.
1946
1947
*/
1947
1948
static inline void *
1948
- pymalloc_alloc (void * ctx , size_t nbytes )
1949
+ pymalloc_alloc (void * Py_UNUSED ( ctx ) , size_t nbytes )
1949
1950
{
1950
1951
#ifdef WITH_VALGRIND
1951
1952
if (UNLIKELY (running_on_valgrind == -1 )) {
@@ -2215,7 +2216,7 @@ insert_to_freepool(poolp pool)
2215
2216
Return 1 if it was freed.
2216
2217
Return 0 if the block was not allocated by pymalloc_alloc(). */
2217
2218
static inline int
2218
- pymalloc_free (void * ctx , void * p )
2219
+ pymalloc_free (void * Py_UNUSED ( ctx ) , void * p )
2219
2220
{
2220
2221
assert (p != NULL );
2221
2222
0 commit comments