Movatterモバイル変換


[0]ホーム

URL:


Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Sign up
Appearance settings

Commitf13a912

Browse files
committed
Fix issues ine8fdbd5.
When the 64bit atomics simulation is in use, we can't necessarilyguarantee the correct alignment of the atomics due to lack of compilersupport for doing so- that's fine from a safety perspective, becauseeverything is protected by a lock, but we asserted the alignment inall cases. Weaken them. Per complaint from Alvaro Herrera.My #ifdefery for PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY wasn'tsufficient. Fix that. Per complaint from Alexander Korotkov.
1 parent453aaf7 commitf13a912

File tree

2 files changed

+62
-16
lines changed

2 files changed

+62
-16
lines changed

‎src/include/port/atomics.h

Lines changed: 27 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -425,82 +425,107 @@ pg_atomic_sub_fetch_u32(volatile pg_atomic_uint32 *ptr, int32 sub_)
425425
staticinlinevoid
426426
pg_atomic_init_u64(volatilepg_atomic_uint64*ptr,uint64val)
427427
{
428+
/*
429+
* Can't necessarily enforce alignment - and don't need it - when using
430+
* the spinlock based fallback implementation. Therefore only assert when
431+
* not using it.
432+
*/
433+
#ifndefPG_HAVE_ATOMIC_U64_SIMULATION
428434
AssertPointerAlignment(ptr,8);
429-
435+
#endif
430436
pg_atomic_init_u64_impl(ptr,val);
431437
}
432438

433439
staticinlineuint64
434440
pg_atomic_read_u64(volatilepg_atomic_uint64*ptr)
435441
{
442+
#ifndefPG_HAVE_ATOMIC_U64_SIMULATION
436443
AssertPointerAlignment(ptr,8);
444+
#endif
437445
returnpg_atomic_read_u64_impl(ptr);
438446
}
439447

440448
staticinlinevoid
441449
pg_atomic_write_u64(volatilepg_atomic_uint64*ptr,uint64val)
442450
{
451+
#ifndefPG_HAVE_ATOMIC_U64_SIMULATION
443452
AssertPointerAlignment(ptr,8);
453+
#endif
444454
pg_atomic_write_u64_impl(ptr,val);
445455
}
446456

447457
staticinlineuint64
448458
pg_atomic_exchange_u64(volatilepg_atomic_uint64*ptr,uint64newval)
449459
{
460+
#ifndefPG_HAVE_ATOMIC_U64_SIMULATION
450461
AssertPointerAlignment(ptr,8);
451-
462+
#endif
452463
returnpg_atomic_exchange_u64_impl(ptr,newval);
453464
}
454465

455466
staticinlinebool
456467
pg_atomic_compare_exchange_u64(volatilepg_atomic_uint64*ptr,
457468
uint64*expected,uint64newval)
458469
{
470+
#ifndefPG_HAVE_ATOMIC_U64_SIMULATION
459471
AssertPointerAlignment(ptr,8);
460472
AssertPointerAlignment(expected,8);
473+
#endif
461474
returnpg_atomic_compare_exchange_u64_impl(ptr,expected,newval);
462475
}
463476

464477
staticinlineuint64
465478
pg_atomic_fetch_add_u64(volatilepg_atomic_uint64*ptr,int64add_)
466479
{
480+
#ifndefPG_HAVE_ATOMIC_U64_SIMULATION
467481
AssertPointerAlignment(ptr,8);
482+
#endif
468483
returnpg_atomic_fetch_add_u64_impl(ptr,add_);
469484
}
470485

471486
staticinlineuint64
472487
pg_atomic_fetch_sub_u64(volatilepg_atomic_uint64*ptr,int64sub_)
473488
{
489+
#ifndefPG_HAVE_ATOMIC_U64_SIMULATION
474490
AssertPointerAlignment(ptr,8);
491+
#endif
475492
Assert(sub_!=PG_INT64_MIN);
476493
returnpg_atomic_fetch_sub_u64_impl(ptr,sub_);
477494
}
478495

479496
staticinlineuint64
480497
pg_atomic_fetch_and_u64(volatilepg_atomic_uint64*ptr,uint64and_)
481498
{
499+
#ifndefPG_HAVE_ATOMIC_U64_SIMULATION
482500
AssertPointerAlignment(ptr,8);
501+
#endif
483502
returnpg_atomic_fetch_and_u64_impl(ptr,and_);
484503
}
485504

486505
staticinlineuint64
487506
pg_atomic_fetch_or_u64(volatilepg_atomic_uint64*ptr,uint64or_)
488507
{
508+
#ifndefPG_HAVE_ATOMIC_U64_SIMULATION
489509
AssertPointerAlignment(ptr,8);
510+
#endif
490511
returnpg_atomic_fetch_or_u64_impl(ptr,or_);
491512
}
492513

493514
staticinlineuint64
494515
pg_atomic_add_fetch_u64(volatilepg_atomic_uint64*ptr,int64add_)
495516
{
517+
#ifndefPG_HAVE_ATOMIC_U64_SIMULATION
496518
AssertPointerAlignment(ptr,8);
519+
#endif
497520
returnpg_atomic_add_fetch_u64_impl(ptr,add_);
498521
}
499522

500523
staticinlineuint64
501524
pg_atomic_sub_fetch_u64(volatilepg_atomic_uint64*ptr,int64sub_)
502525
{
526+
#ifndefPG_HAVE_ATOMIC_U64_SIMULATION
503527
AssertPointerAlignment(ptr,8);
528+
#endif
504529
Assert(sub_!=PG_INT64_MIN);
505530
returnpg_atomic_sub_fetch_u64_impl(ptr,sub_);
506531
}

‎src/include/port/atomics/generic.h

Lines changed: 35 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -271,26 +271,26 @@ pg_atomic_exchange_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 xchg_)
271271
}
272272
#endif
273273

274-
#ifndefPG_HAVE_ATOMIC_READ_U64
275-
#definePG_HAVE_ATOMIC_READ_U64
276-
staticinlineuint64
277-
pg_atomic_read_u64_impl(volatilepg_atomic_uint64*ptr)
278-
{
279-
return*(&ptr->value);
280-
}
281-
#endif
282-
283274
#ifndefPG_HAVE_ATOMIC_WRITE_U64
284275
#definePG_HAVE_ATOMIC_WRITE_U64
276+
277+
#if defined(PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY)&& \
278+
!defined(PG_HAVE_ATOMIC_U64_SIMULATION)
279+
285280
staticinlinevoid
286281
pg_atomic_write_u64_impl(volatilepg_atomic_uint64*ptr,uint64val)
287282
{
283+
/*
284+
* On this platform aligned 64bit writes are guaranteed to be atomic,
285+
* except if using the fallback implementation, where can't guarantee the
286+
* required alignment.
287+
*/
288+
AssertPointerAlignment(ptr,8);
288289
ptr->value=val;
289290
}
290-
#endif
291291

292-
#ifndefPG_HAVE_ATOMIC_WRITE_U64
293-
#definePG_HAVE_ATOMIC_WRITE_U64
292+
#else
293+
294294
staticinlinevoid
295295
pg_atomic_write_u64_impl(volatilepg_atomic_uint64*ptr,uint64val)
296296
{
@@ -300,10 +300,30 @@ pg_atomic_write_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 val)
300300
*/
301301
pg_atomic_exchange_u64_impl(ptr,val);
302302
}
303-
#endif
303+
304+
#endif/* PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY && !PG_HAVE_ATOMIC_U64_SIMULATION */
305+
#endif/* PG_HAVE_ATOMIC_WRITE_U64 */
304306

305307
#ifndefPG_HAVE_ATOMIC_READ_U64
306308
#definePG_HAVE_ATOMIC_READ_U64
309+
310+
#if defined(PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY)&& \
311+
!defined(PG_HAVE_ATOMIC_U64_SIMULATION)
312+
313+
staticinlineuint64
314+
pg_atomic_read_u64_impl(volatilepg_atomic_uint64*ptr)
315+
{
316+
/*
317+
* On this platform aligned 64bit reads are guaranteed to be atomic,
318+
* except if using the fallback implementation, where can't guarantee the
319+
* required alignment.
320+
*/
321+
AssertPointerAlignment(ptr,8);
322+
return*(&ptr->value);
323+
}
324+
325+
#else
326+
307327
staticinlineuint64
308328
pg_atomic_read_u64_impl(volatilepg_atomic_uint64*ptr)
309329
{
@@ -319,7 +339,8 @@ pg_atomic_read_u64_impl(volatile pg_atomic_uint64 *ptr)
319339

320340
returnold;
321341
}
322-
#endif
342+
#endif/* PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY && !PG_HAVE_ATOMIC_U64_SIMULATION */
343+
#endif/* PG_HAVE_ATOMIC_READ_U64 */
323344

324345
#ifndefPG_HAVE_ATOMIC_INIT_U64
325346
#definePG_HAVE_ATOMIC_INIT_U64

0 commit comments

Comments
 (0)

[8]ページ先頭

©2009-2025 Movatter.jp