Snap for 10447354 from 57465554fc54b04bc9442fdd73a1e8fd6b50dc74 to mainline-wifi-release

Change-Id: I1eebae426c3d28b970dbec8b3e9754be4ff3bd07
diff --git a/.cargo_vcs_info.json b/.cargo_vcs_info.json
index f070bea..8f875ed 100644
--- a/.cargo_vcs_info.json
+++ b/.cargo_vcs_info.json
@@ -1,6 +1,6 @@
 {
   "git": {
-    "sha1": "44852cc72dbfbf57c5477a907ec0ab36527bc36b"
+    "sha1": "35148638c54c6233545c65d1a5e09d5ba0661806"
   },
   "path_in_vcs": ""
 }
\ No newline at end of file
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index 7967bb9..6a998a8 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -21,7 +21,8 @@
     - uses: actions/checkout@v2
       with:
         fetch-depth: 0 # fetch tags for publish
-
+    - uses: Swatinem/rust-cache@359a70e43a0bb8a13953b04a90f76428b4959bb6
     - run: cargo run -p xtask -- ci
       env:
-        CRATES_IO_TOKEN: ${{ secrets.CRATES_IO_TOKEN }}
+        CARGO_REGISTRY_TOKEN: ${{ secrets.CRATES_IO_TOKEN }}
+        MIRIFLAGS: -Zmiri-strict-provenance
diff --git a/Android.bp b/Android.bp
index 7211fe6..e9b9198 100644
--- a/Android.bp
+++ b/Android.bp
@@ -42,9 +42,9 @@
     host_supported: true,
     crate_name: "once_cell",
     cargo_env_compat: true,
-    cargo_pkg_version: "1.9.0",
+    cargo_pkg_version: "1.17.1",
     srcs: ["src/lib.rs"],
-    edition: "2018",
+    edition: "2021",
     features: [
         "alloc",
         "default",
@@ -53,10 +53,30 @@
     ],
     apex_available: [
         "//apex_available:platform",
-        "com.android.compos",
-        "com.android.resolv",
-        "com.android.virt",
+        "//apex_available:anyapex",
     ],
+    product_available: true,
+    vendor_available: true,
+    min_sdk_version: "29",
+}
+
+rust_library_rlib {
+    name: "libonce_cell_nostd",
+    crate_name: "once_cell",
+    cargo_env_compat: true,
+    cargo_pkg_version: "1.16.0",
+    srcs: ["src/lib.rs"],
+    edition: "2021",
+    features: [
+        "alloc",
+        "default",
+        "race",
+    ],
+    apex_available: [
+        "//apex_available:platform",
+        "//apex_available:anyapex"
+    ],
+    vendor_available: true,
     min_sdk_version: "29",
 }
 
@@ -65,14 +85,14 @@
     host_supported: true,
     crate_name: "once_cell",
     cargo_env_compat: true,
-    cargo_pkg_version: "1.9.0",
+    cargo_pkg_version: "1.17.1",
     srcs: ["src/lib.rs"],
     test_suites: ["general-tests"],
     auto_gen_config: true,
     test_options: {
         unit_test: true,
     },
-    edition: "2018",
+    edition: "2021",
     features: [
         "alloc",
         "default",
@@ -91,14 +111,14 @@
     host_supported: true,
     crate_name: "it",
     cargo_env_compat: true,
-    cargo_pkg_version: "1.9.0",
+    cargo_pkg_version: "1.17.1",
     srcs: ["tests/it.rs"],
     test_suites: ["general-tests"],
     auto_gen_config: true,
     test_options: {
         unit_test: true,
     },
-    edition: "2018",
+    edition: "2021",
     features: [
         "alloc",
         "default",
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 2c95053..bf489b2 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,6 +1,63 @@
 # Changelog
 
-## 1.9
+## Unreleased
+
+-
+
+## 1.17.1
+
+- Make `OnceRef` implementation compliant with [strict provenance](https://github.com/rust-lang/rust/issues/95228).
+
+## 1.17.0
+
+- Add `race::OnceRef` for storing a `&'a T`.
+
+## 1.16.0
+
+- Add `no_std` implementation based on `critical-section`,
+  [#195](https://github.com/matklad/once_cell/pull/195).
+- Deprecate `atomic-polyfill` feature (use the new `critical-section` instead)
+
+## 1.15.0
+
+- Increase minimal supported Rust version to 1.56.0.
+- Implement `UnwindSafe` even if the `std` feature is disabled.
+
+## 1.14.0
+
+- Add extension to `unsync` and `sync` `Lazy` mut API:
+  - `force_mut`
+  - `get_mut`
+
+
+## 1.13.1
+
+- Make implementation compliant with [strict provenance](https://github.com/rust-lang/rust/issues/95228).
+- Upgrade `atomic-polyfill` to `1.0`
+
+## 1.13.0
+
+- Add `Lazy::get`, similar to `OnceCell::get`.
+
+## 1.12.1
+
+- Remove incorrect `debug_assert`.
+
+## 1.12.0
+
+- Add `OnceCell::wait`, a blocking variant of `get`.
+
+## 1.11.0
+
+- Add `OnceCell::with_value` to create initialized `OnceCell` in `const` context.
+- Improve `Clone` implementation for `OnceCell`.
+- Rewrite `parking_lot` version on top of `parking_lot_core`, for even smaller cells!
+
+## 1.10.0
+
+- upgrade `parking_lot` to `0.12.0` (note that this bumps MSRV with `parking_lot` feature enabled to `1.49.0`).
+
+## 1.9.0
 
 - Added an `atomic-polyfill` optional dependency to compile `race` on platforms without atomics
 
diff --git a/Cargo.toml b/Cargo.toml
index 0a94bfc..76a6291 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -10,18 +10,31 @@
 # See Cargo.toml.orig for the original contents.
 
 [package]
-edition = "2018"
+edition = "2021"
+rust-version = "1.56"
 name = "once_cell"
-version = "1.9.0"
+version = "1.17.1"
 authors = ["Aleksey Kladov <aleksey.kladov@gmail.com>"]
-exclude = ["*.png", "*.svg", "/Cargo.lock.msrv", "/.travis.yml", "/run-miri-tests.sh", "rustfmt.toml"]
+exclude = [
+    "*.png",
+    "*.svg",
+    "/Cargo.lock.msrv",
+    "rustfmt.toml",
+]
 description = "Single assignment cells and lazy values."
 documentation = "https://docs.rs/once_cell"
 readme = "README.md"
-keywords = ["lazy", "static"]
-categories = ["rust-patterns", "memory-management"]
+keywords = [
+    "lazy",
+    "static",
+]
+categories = [
+    "rust-patterns",
+    "memory-management",
+]
 license = "MIT OR Apache-2.0"
 repository = "https://github.com/matklad/once_cell"
+
 [package.metadata.docs.rs]
 all-features = true
 
@@ -52,16 +65,29 @@
 [[example]]
 name = "test_synchronization"
 required-features = ["std"]
-[dependencies.atomic-polyfill]
-version = "0.1"
-optional = true
 
-[dependencies.parking_lot]
-version = "0.11"
+[dependencies.atomic_polyfill]
+version = "1"
+optional = true
+package = "atomic-polyfill"
+
+[dependencies.critical_section]
+version = "1"
+optional = true
+package = "critical-section"
+
+[dependencies.parking_lot_core]
+version = "0.9.3"
 optional = true
 default_features = false
+
+[dev-dependencies.critical_section]
+version = "1.1.1"
+features = ["std"]
+package = "critical-section"
+
 [dev-dependencies.crossbeam-utils]
-version = "0.7.2"
+version = "0.8.7"
 
 [dev-dependencies.lazy_static]
 version = "1.0.0"
@@ -71,7 +97,13 @@
 
 [features]
 alloc = ["race"]
+atomic-polyfill = ["critical-section"]
+critical-section = [
+    "critical_section",
+    "atomic_polyfill",
+]
 default = ["std"]
+parking_lot = ["parking_lot_core"]
 race = []
 std = ["alloc"]
 unstable = []
diff --git a/Cargo.toml.orig b/Cargo.toml.orig
index 3b2d1aa..ad02c34 100644
--- a/Cargo.toml.orig
+++ b/Cargo.toml.orig
@@ -1,9 +1,10 @@
 [package]
 name = "once_cell"
-version = "1.9.0"
+version = "1.17.1"
 authors = ["Aleksey Kladov <aleksey.kladov@gmail.com>"]
 license = "MIT OR Apache-2.0"
-edition = "2018"
+edition = "2021"
+rust-version = "1.56"
 
 description = "Single assignment cells and lazy values."
 readme = "README.md"
@@ -13,41 +14,53 @@
 keywords = ["lazy", "static"]
 categories = ["rust-patterns", "memory-management"]
 
-exclude = ["*.png", "*.svg", "/Cargo.lock.msrv", "/.travis.yml", "/run-miri-tests.sh", "rustfmt.toml"]
+exclude = ["*.png", "*.svg", "/Cargo.lock.msrv", "rustfmt.toml"]
 
 [workspace]
 members = ["xtask"]
 
 [dependencies]
-# Uses parking_lot to implement once_cell::sync::OnceCell.
-# This makes not speed difference, but makes each OnceCell<T>
-# for up to 16 bytes smaller, depending on the size of the T.
-parking_lot = { version = "0.11", optional = true, default_features = false }
-
-# To be used in order to enable the race feature on targets
-# that do not have atomics
-# *Warning:* This can be unsound. Please read the README of
-# [atomic-polyfill](https://github.com/embassy-rs/atomic-polyfill)
-# and make sure you understand all the implications
-atomic-polyfill = { version = "0.1", optional = true }
+# These optional dependencies are considered private impl details,
+# only features from `[features]` table are a part of semver-guarded API.
+parking_lot_core = { version = "0.9.3", optional = true, default_features = false }
+atomic_polyfill = { package = "atomic-polyfill", version = "1", optional = true }
+critical_section = { package = "critical-section", version = "1", optional = true }
 
 [dev-dependencies]
 lazy_static = "1.0.0"
-crossbeam-utils = "0.7.2"
+crossbeam-utils = "0.8.7"
 regex =  "1.2.0"
+critical_section = { package = "critical-section", version = "1.1.1", features = ["std"] }
 
 [features]
 default = ["std"]
+
 # Enables `once_cell::sync` module.
 std = ["alloc"]
+
 # Enables `once_cell::race::OnceBox` type.
 alloc = ["race"]
+
 # Enables `once_cell::race` module.
 race = []
+
+# Uses parking_lot to implement once_cell::sync::OnceCell.
+# This makes no speed difference, but makes each OnceCell<T>
+# up to 16 bytes smaller, depending on the size of the T.
+parking_lot = ["parking_lot_core"]
+
+# Uses `critical-section` to implement `sync` and `race` modules. in
+# `#![no_std]` mode. Please read `critical-section` docs carefully
+# before enabling this feature.
+critical-section = ["critical_section", "atomic_polyfill" ]
+
 # Enables semver-exempt APIs of this crate.
 # At the moment, this feature is unused.
 unstable = []
 
+# Only for backwards compatibility.
+atomic-polyfill = ["critical-section"]
+
 [[example]]
 name = "bench"
 required-features = ["std"]
diff --git a/METADATA b/METADATA
index 4b16d85..79a823c 100644
--- a/METADATA
+++ b/METADATA
@@ -1,3 +1,7 @@
+# This project was upgraded with external_updater.
+# Usage: tools/external_updater/updater.sh update rust/crates/once_cell
+# For more info, check https://cs.android.com/android/platform/superproject/+/master:tools/external_updater/README.md
+
 name: "once_cell"
 description: "Single assignment cells and lazy values."
 third_party {
@@ -7,13 +11,13 @@
   }
   url {
     type: ARCHIVE
-    value: "https://static.crates.io/crates/once_cell/once_cell-1.9.0.crate"
+    value: "https://static.crates.io/crates/once_cell/once_cell-1.17.1.crate"
   }
-  version: "1.9.0"
+  version: "1.17.1"
   license_type: NOTICE
   last_upgrade_date {
-    year: 2022
-    month: 3
-    day: 1
+    year: 2023
+    month: 2
+    day: 16
   }
 }
diff --git a/README.md b/README.md
index b5c9896..737f2de 100644
--- a/README.md
+++ b/README.md
@@ -1,7 +1,7 @@
 <p align="center"><img src="design/logo.png" alt="once_cell"></p>
 
 
-[![Build Status](https://travis-ci.org/matklad/once_cell.svg?branch=master)](https://travis-ci.org/matklad/once_cell)
+[![Build Status](https://github.com/matklad/once_cell/actions/workflows/ci.yaml/badge.svg)](https://github.com/matklad/once_cell/actions)
 [![Crates.io](https://img.shields.io/crates/v/once_cell.svg)](https://crates.io/crates/once_cell)
 [![API reference](https://docs.rs/once_cell/badge.svg)](https://docs.rs/once_cell/)
 
@@ -51,6 +51,8 @@
 * [lazycell](https://crates.io/crates/lazycell)
 * [mitochondria](https://crates.io/crates/mitochondria)
 * [lazy_static](https://crates.io/crates/lazy_static)
+* [async_once_cell](https://crates.io/crates/async_once_cell)
+* [generic_once_cell](https://crates.io/crates/generic_once_cell) (bring your own mutex)
 
 The API of `once_cell` is being proposed for inclusion in
 [`std`](https://github.com/rust-lang/rfcs/pull/2788).
diff --git a/TEST_MAPPING b/TEST_MAPPING
index febfdce..153b0f1 100644
--- a/TEST_MAPPING
+++ b/TEST_MAPPING
@@ -2,6 +2,24 @@
 {
   "imports": [
     {
+      "path": "external/rust/crates/android_logger"
+    },
+    {
+      "path": "external/rust/crates/argh"
+    },
+    {
+      "path": "external/rust/crates/base64"
+    },
+    {
+      "path": "external/rust/crates/hashbrown"
+    },
+    {
+      "path": "external/rust/crates/hashlink"
+    },
+    {
+      "path": "external/rust/crates/libsqlite3-sys"
+    },
+    {
       "path": "external/rust/crates/quiche"
     },
     {
@@ -11,89 +29,77 @@
       "path": "external/rust/crates/thread_local"
     },
     {
+      "path": "external/rust/crates/tinytemplate"
+    },
+    {
+      "path": "external/rust/crates/tinyvec"
+    },
+    {
+      "path": "external/rust/crates/unicode-xid"
+    },
+    {
       "path": "external/rust/crates/webpki"
+    },
+    {
+      "path": "packages/modules/DnsResolver"
+    },
+    {
+      "path": "packages/modules/Virtualization/apkdmverity"
+    },
+    {
+      "path": "packages/modules/Virtualization/authfs"
+    },
+    {
+      "path": "packages/modules/Virtualization/avmd"
+    },
+    {
+      "path": "packages/modules/Virtualization/encryptedstore"
+    },
+    {
+      "path": "packages/modules/Virtualization/libs/apkverify"
+    },
+    {
+      "path": "packages/modules/Virtualization/microdroid_manager"
+    },
+    {
+      "path": "packages/modules/Virtualization/virtualizationmanager"
+    },
+    {
+      "path": "packages/modules/Virtualization/vm"
+    },
+    {
+      "path": "packages/modules/Virtualization/zipfuse"
+    },
+    {
+      "path": "system/logging/rust"
+    },
+    {
+      "path": "system/security/diced"
+    },
+    {
+      "path": "system/security/keystore2"
+    },
+    {
+      "path": "system/security/keystore2/legacykeystore"
+    },
+    {
+      "path": "system/security/keystore2/selinux"
     }
   ],
   "presubmit": [
     {
-      "name": "ZipFuseTest"
-    },
-    {
-      "name": "apkdmverity.test"
-    },
-    {
-      "name": "authfs_device_test_src_lib"
-    },
-    {
-      "name": "doh_unit_test"
-    },
-    {
-      "name": "keystore2_test"
-    },
-    {
-      "name": "legacykeystore_test"
-    },
-    {
-      "name": "libapkverify.integration_test"
-    },
-    {
-      "name": "libapkverify.test"
-    },
-    {
-      "name": "libidsig.test"
-    },
-    {
-      "name": "microdroid_manager_test"
-    },
-    {
       "name": "once_cell_test_src_lib"
     },
     {
       "name": "once_cell_test_tests_it"
-    },
-    {
-      "name": "virtualizationservice_device_test"
     }
   ],
   "presubmit-rust": [
     {
-      "name": "ZipFuseTest"
-    },
-    {
-      "name": "apkdmverity.test"
-    },
-    {
-      "name": "authfs_device_test_src_lib"
-    },
-    {
-      "name": "doh_unit_test"
-    },
-    {
-      "name": "keystore2_test"
-    },
-    {
-      "name": "legacykeystore_test"
-    },
-    {
-      "name": "libapkverify.integration_test"
-    },
-    {
-      "name": "libapkverify.test"
-    },
-    {
-      "name": "libidsig.test"
-    },
-    {
-      "name": "microdroid_manager_test"
-    },
-    {
       "name": "once_cell_test_src_lib"
     },
     {
       "name": "once_cell_test_tests_it"
-    },
-    {
-      "name": "virtualizationservice_device_test"
     }
   ]
 }
diff --git a/cargo2android.json b/cargo2android.json
index 506d866..674f610 100644
--- a/cargo2android.json
+++ b/cargo2android.json
@@ -1,13 +1,9 @@
 {
-  "apex-available": [
-    "//apex_available:platform",
-    "com.android.compos",
-    "com.android.resolv",
-    "com.android.virt"
-  ],
   "dependencies": true,
   "device": true,
   "min-sdk-version": "29",
+  "patch": "patches/Android.bp.patch",
   "run": true,
-  "tests": true
-}
\ No newline at end of file
+  "tests": true,
+  "vendor-available": true
+}
diff --git a/examples/lazy_static.rs b/examples/lazy_static.rs
index f050560..3cdb19f 100644
--- a/examples/lazy_static.rs
+++ b/examples/lazy_static.rs
@@ -32,5 +32,5 @@
 
     // The same works for function-style:
     assert_eq!(hashmap().get(&0), Some(&"foo"));
-    assert_eq!(hashmap().get(&0), Some(&"bar"));
+    assert_eq!(hashmap().get(&1), Some(&"bar"));
 }
diff --git a/patches/Android.bp.patch b/patches/Android.bp.patch
new file mode 100644
index 0000000..6dee374
--- /dev/null
+++ b/patches/Android.bp.patch
@@ -0,0 +1,47 @@
+diff --git a/Android.bp b/Android.bp
+index d6878ec..59ae682 100644
+--- a/Android.bp
++++ b/Android.bp
+@@ -59,6 +59,26 @@ rust_library {
+     min_sdk_version: "29",
+ }
+ 
++rust_library_rlib {
++    name: "libonce_cell_nostd",
++    crate_name: "once_cell",
++    cargo_env_compat: true,
++    cargo_pkg_version: "1.16.0",
++    srcs: ["src/lib.rs"],
++    edition: "2021",
++    features: [
++        "alloc",
++        "default",
++        "race",
++    ],
++    apex_available: [
++        "//apex_available:platform",
++        "//apex_available:anyapex"
++    ],
++    vendor_available: true,
++    min_sdk_version: "29",
++}
++
+ rust_test {
+     name: "once_cell_test_src_lib",
+     host_supported: true,
+@@ -79,7 +99,6 @@ rust_test {
+         "std",
+     ],
+     rustlibs: [
+-        "libcritical_section",
+         "libcrossbeam_utils",
+         "liblazy_static",
+         "libregex",
+@@ -106,7 +125,6 @@ rust_test {
+         "std",
+     ],
+     rustlibs: [
+-        "libcritical_section",
+         "libcrossbeam_utils",
+         "liblazy_static",
+         "libonce_cell",
diff --git a/patches/imp_std.rs.patch b/patches/imp_std.rs.patch
index 556297f..4916e3a 100644
--- a/patches/imp_std.rs.patch
+++ b/patches/imp_std.rs.patch
@@ -1,8 +1,8 @@
 diff --git a/src/imp_std.rs b/src/imp_std.rs
-index d7dda96..f461c3d 100644
+index 5761f01..d727851 100644
 --- a/src/imp_std.rs
 +++ b/src/imp_std.rs
-@@ -299,6 +299,7 @@ mod tests {
+@@ -358,6 +358,7 @@ mod tests {
      }
  
      #[test]
@@ -10,7 +10,7 @@
      fn poison_bad() {
          static O: OnceCell<()> = OnceCell::new();
  
-@@ -320,6 +321,7 @@ mod tests {
+@@ -379,6 +380,7 @@ mod tests {
      }
  
      #[test]
diff --git a/patches/it.rs.patch b/patches/it.rs.patch
index 8491db5..4d1d23f 100644
--- a/patches/it.rs.patch
+++ b/patches/it.rs.patch
@@ -1,8 +1,8 @@
 diff --git a/tests/it.rs b/tests/it.rs
-index 81faaff..c769487 100644
+index d18f0a1..f94bc69 100644
 --- a/tests/it.rs
 +++ b/tests/it.rs
-@@ -166,6 +166,7 @@ mod unsync {
+@@ -208,6 +208,7 @@ mod unsync {
  
      #[test]
      #[cfg(feature = "std")]
@@ -10,7 +10,15 @@
      fn lazy_poisoning() {
          let x: Lazy<String> = Lazy::new(|| panic!("kaboom"));
          for _ in 0..2 {
-@@ -288,6 +289,7 @@ mod sync {
+@@ -227,6 +228,7 @@ mod unsync {
+ 
+     #[test]
+     #[should_panic(expected = "reentrant init")]
++    #[ignore = "Android: ignore for now. Need to compile these binaries separately."]
+     fn reentrant_init() {
+         let x: OnceCell<Box<i32>> = OnceCell::new();
+         let dangling_ref: Cell<Option<&i32>> = Cell::new(None);
+@@ -342,6 +344,7 @@ mod sync {
      }
  
      #[test]
@@ -18,15 +26,15 @@
      fn get_or_try_init() {
          let cell: OnceCell<String> = OnceCell::new();
          assert!(cell.get().is_none());
-@@ -348,6 +350,7 @@ mod sync {
- 
+@@ -441,6 +441,7 @@ mod sync {
      #[test]
      #[cfg_attr(miri, ignore)] // miri doesn't support processes
+     #[cfg(feature = "std")]
 +    #[ignore = "Android: ignore for now. Need to compile these binaries separately."]
      fn reentrant_init() {
          let examples_dir = {
              let mut exe = std::env::current_exe().unwrap();
-@@ -486,6 +489,7 @@ mod sync {
+@@ -590,6 +593,7 @@ mod sync {
      }
  
      #[test]
diff --git a/src/imp_cs.rs b/src/imp_cs.rs
new file mode 100644
index 0000000..668f18e
--- /dev/null
+++ b/src/imp_cs.rs
@@ -0,0 +1,78 @@
+use core::panic::{RefUnwindSafe, UnwindSafe};
+
+use atomic_polyfill::{AtomicBool, Ordering};
+use critical_section::{CriticalSection, Mutex};
+
+use crate::unsync;
+
+pub(crate) struct OnceCell<T> {
+    initialized: AtomicBool,
+    // Use `unsync::OnceCell` internally since `Mutex` does not provide
+    // interior mutability and to be able to re-use `get_or_try_init`.
+    value: Mutex<unsync::OnceCell<T>>,
+}
+
+// Why do we need `T: Send`?
+// Thread A creates a `OnceCell` and shares it with
+// scoped thread B, which fills the cell, which is
+// then destroyed by A. That is, destructor observes
+// a sent value.
+unsafe impl<T: Sync + Send> Sync for OnceCell<T> {}
+unsafe impl<T: Send> Send for OnceCell<T> {}
+
+impl<T: RefUnwindSafe + UnwindSafe> RefUnwindSafe for OnceCell<T> {}
+impl<T: UnwindSafe> UnwindSafe for OnceCell<T> {}
+
+impl<T> OnceCell<T> {
+    pub(crate) const fn new() -> OnceCell<T> {
+        OnceCell { initialized: AtomicBool::new(false), value: Mutex::new(unsync::OnceCell::new()) }
+    }
+
+    pub(crate) const fn with_value(value: T) -> OnceCell<T> {
+        OnceCell {
+            initialized: AtomicBool::new(true),
+            value: Mutex::new(unsync::OnceCell::with_value(value)),
+        }
+    }
+
+    #[inline]
+    pub(crate) fn is_initialized(&self) -> bool {
+        self.initialized.load(Ordering::Acquire)
+    }
+
+    #[cold]
+    pub(crate) fn initialize<F, E>(&self, f: F) -> Result<(), E>
+    where
+        F: FnOnce() -> Result<T, E>,
+    {
+        critical_section::with(|cs| {
+            let cell = self.value.borrow(cs);
+            cell.get_or_try_init(f).map(|_| {
+                self.initialized.store(true, Ordering::Release);
+            })
+        })
+    }
+
+    /// Get the reference to the underlying value, without checking if the cell
+    /// is initialized.
+    ///
+    /// # Safety
+    ///
+    /// Caller must ensure that the cell is in initialized state, and that
+    /// the contents are acquired by (synchronized to) this thread.
+    pub(crate) unsafe fn get_unchecked(&self) -> &T {
+        debug_assert!(self.is_initialized());
+        // SAFETY: The caller ensures that the value is initialized and access synchronized.
+        crate::unwrap_unchecked(self.value.borrow(CriticalSection::new()).get())
+    }
+
+    #[inline]
+    pub(crate) fn get_mut(&mut self) -> Option<&mut T> {
+        self.value.get_mut().get_mut()
+    }
+
+    #[inline]
+    pub(crate) fn into_inner(self) -> Option<T> {
+        self.value.into_inner().into_inner()
+    }
+}
diff --git a/src/imp_pl.rs b/src/imp_pl.rs
index 6c9b0fe..84d8593 100644
--- a/src/imp_pl.rs
+++ b/src/imp_pl.rs
@@ -1,20 +1,18 @@
 use std::{
     cell::UnsafeCell,
-    hint,
     panic::{RefUnwindSafe, UnwindSafe},
-    sync::atomic::{AtomicBool, Ordering},
+    sync::atomic::{AtomicU8, Ordering},
 };
 
-use parking_lot::Mutex;
-
-use crate::take_unchecked;
-
 pub(crate) struct OnceCell<T> {
-    mutex: Mutex<()>,
-    is_initialized: AtomicBool,
+    state: AtomicU8,
     value: UnsafeCell<Option<T>>,
 }
 
+const INCOMPLETE: u8 = 0x0;
+const RUNNING: u8 = 0x1;
+const COMPLETE: u8 = 0x2;
+
 // Why do we need `T: Send`?
 // Thread A creates a `OnceCell` and shares it with
 // scoped thread B, which fills the cell, which is
@@ -28,17 +26,17 @@
 
 impl<T> OnceCell<T> {
     pub(crate) const fn new() -> OnceCell<T> {
-        OnceCell {
-            mutex: parking_lot::const_mutex(()),
-            is_initialized: AtomicBool::new(false),
-            value: UnsafeCell::new(None),
-        }
+        OnceCell { state: AtomicU8::new(INCOMPLETE), value: UnsafeCell::new(None) }
+    }
+
+    pub(crate) const fn with_value(value: T) -> OnceCell<T> {
+        OnceCell { state: AtomicU8::new(COMPLETE), value: UnsafeCell::new(Some(value)) }
     }
 
     /// Safety: synchronizes with store to value via Release/Acquire.
     #[inline]
     pub(crate) fn is_initialized(&self) -> bool {
-        self.is_initialized.load(Ordering::Acquire)
+        self.state.load(Ordering::Acquire) == COMPLETE
     }
 
     /// Safety: synchronizes with store to value via `is_initialized` or mutex
@@ -51,7 +49,7 @@
         let mut f = Some(f);
         let mut res: Result<(), E> = Ok(());
         let slot: *mut Option<T> = self.value.get();
-        initialize_inner(&self.mutex, &self.is_initialized, &mut || {
+        initialize_inner(&self.state, &mut || {
             // We are calling user-supplied function and need to be careful.
             // - if it returns Err, we unlock mutex and return without touching anything
             // - if it panics, we unlock mutex and propagate panic without touching anything
@@ -60,7 +58,7 @@
             //   but that is more complicated
             // - finally, if it returns Ok, we store the value and store the flag with
             //   `Release`, which synchronizes with `Acquire`s.
-            let f = unsafe { take_unchecked(&mut f) };
+            let f = unsafe { crate::unwrap_unchecked(f.take()) };
             match f() {
                 Ok(value) => unsafe {
                     // Safe b/c we have a unique access and no panic may happen
@@ -78,6 +76,21 @@
         res
     }
 
+    #[cold]
+    pub(crate) fn wait(&self) {
+        let key = &self.state as *const _ as usize;
+        unsafe {
+            parking_lot_core::park(
+                key,
+                || self.state.load(Ordering::Acquire) != COMPLETE,
+                || (),
+                |_, _| (),
+                parking_lot_core::DEFAULT_PARK_TOKEN,
+                None,
+            );
+        }
+    }
+
     /// Get the reference to the underlying value, without checking if the cell
     /// is initialized.
     ///
@@ -87,15 +100,8 @@
     /// the contents are acquired by (synchronized to) this thread.
     pub(crate) unsafe fn get_unchecked(&self) -> &T {
         debug_assert!(self.is_initialized());
-        let slot: &Option<T> = &*self.value.get();
-        match slot {
-            Some(value) => value,
-            // This unsafe does improve performance, see `examples/bench`.
-            None => {
-                debug_assert!(false);
-                hint::unreachable_unchecked()
-            }
-        }
+        let slot = &*self.value.get();
+        crate::unwrap_unchecked(slot.as_ref())
     }
 
     /// Gets the mutable reference to the underlying value.
@@ -113,14 +119,49 @@
     }
 }
 
+struct Guard<'a> {
+    state: &'a AtomicU8,
+    new_state: u8,
+}
+
+impl<'a> Drop for Guard<'a> {
+    fn drop(&mut self) {
+        self.state.store(self.new_state, Ordering::Release);
+        unsafe {
+            let key = self.state as *const AtomicU8 as usize;
+            parking_lot_core::unpark_all(key, parking_lot_core::DEFAULT_UNPARK_TOKEN);
+        }
+    }
+}
+
 // Note: this is intentionally monomorphic
 #[inline(never)]
-fn initialize_inner(mutex: &Mutex<()>, is_initialized: &AtomicBool, init: &mut dyn FnMut() -> bool) {
-    let _guard = mutex.lock();
-
-    if !is_initialized.load(Ordering::Acquire) {
-        if init() {
-            is_initialized.store(true, Ordering::Release);
+fn initialize_inner(state: &AtomicU8, init: &mut dyn FnMut() -> bool) {
+    loop {
+        let exchange =
+            state.compare_exchange_weak(INCOMPLETE, RUNNING, Ordering::Acquire, Ordering::Acquire);
+        match exchange {
+            Ok(_) => {
+                let mut guard = Guard { state, new_state: INCOMPLETE };
+                if init() {
+                    guard.new_state = COMPLETE;
+                }
+                return;
+            }
+            Err(COMPLETE) => return,
+            Err(RUNNING) => unsafe {
+                let key = state as *const AtomicU8 as usize;
+                parking_lot_core::park(
+                    key,
+                    || state.load(Ordering::Relaxed) == RUNNING,
+                    || (),
+                    |_, _| (),
+                    parking_lot_core::DEFAULT_PARK_TOKEN,
+                    None,
+                );
+            },
+            Err(INCOMPLETE) => (),
+            Err(_) => debug_assert!(false),
         }
     }
 }
@@ -129,5 +170,5 @@
 fn test_size() {
     use std::mem::size_of;
 
-    assert_eq!(size_of::<OnceCell<bool>>(), 2 * size_of::<bool>() + size_of::<u8>());
+    assert_eq!(size_of::<OnceCell<bool>>(), 1 * size_of::<bool>() + size_of::<u8>());
 }
diff --git a/src/imp_std.rs b/src/imp_std.rs
index f461c3d..d727851 100644
--- a/src/imp_std.rs
+++ b/src/imp_std.rs
@@ -5,20 +5,23 @@
 
 use std::{
     cell::{Cell, UnsafeCell},
-    hint::unreachable_unchecked,
     marker::PhantomData,
     panic::{RefUnwindSafe, UnwindSafe},
-    sync::atomic::{AtomicBool, AtomicUsize, Ordering},
+    sync::atomic::{AtomicBool, AtomicPtr, Ordering},
     thread::{self, Thread},
 };
 
-use crate::take_unchecked;
-
 #[derive(Debug)]
 pub(crate) struct OnceCell<T> {
-    // This `state` word is actually an encoded version of just a pointer to a
-    // `Waiter`, so we add the `PhantomData` appropriately.
-    state_and_queue: AtomicUsize,
+    // This `queue` field is the core of the implementation. It encodes two
+    // pieces of information:
+    //
+    // * The current state of the cell (`INCOMPLETE`, `RUNNING`, `COMPLETE`)
+    // * Linked list of threads waiting for the current cell.
+    //
+    // State is encoded in two low bits. Only `INCOMPLETE` and `RUNNING` states
+    // allow waiters.
+    queue: AtomicPtr<Waiter>,
     _marker: PhantomData<*mut Waiter>,
     value: UnsafeCell<Option<T>>,
 }
@@ -34,41 +37,23 @@
 impl<T: RefUnwindSafe + UnwindSafe> RefUnwindSafe for OnceCell<T> {}
 impl<T: UnwindSafe> UnwindSafe for OnceCell<T> {}
 
-// Three states that a OnceCell can be in, encoded into the lower bits of `state` in
-// the OnceCell structure.
-const INCOMPLETE: usize = 0x0;
-const RUNNING: usize = 0x1;
-const COMPLETE: usize = 0x2;
-
-// Mask to learn about the state. All other bits are the queue of waiters if
-// this is in the RUNNING state.
-const STATE_MASK: usize = 0x3;
-
-// Representation of a node in the linked list of waiters in the RUNNING state.
-#[repr(align(4))] // Ensure the two lower bits are free to use as state bits.
-struct Waiter {
-    thread: Cell<Option<Thread>>,
-    signaled: AtomicBool,
-    next: *const Waiter,
-}
-
-// Head of a linked list of waiters.
-// Every node is a struct on the stack of a waiting thread.
-// Will wake up the waiters when it gets dropped, i.e. also on panic.
-struct WaiterQueue<'a> {
-    state_and_queue: &'a AtomicUsize,
-    set_state_on_drop_to: usize,
-}
-
 impl<T> OnceCell<T> {
     pub(crate) const fn new() -> OnceCell<T> {
         OnceCell {
-            state_and_queue: AtomicUsize::new(INCOMPLETE),
+            queue: AtomicPtr::new(INCOMPLETE_PTR),
             _marker: PhantomData,
             value: UnsafeCell::new(None),
         }
     }
 
+    pub(crate) const fn with_value(value: T) -> OnceCell<T> {
+        OnceCell {
+            queue: AtomicPtr::new(COMPLETE_PTR),
+            _marker: PhantomData,
+            value: UnsafeCell::new(Some(value)),
+        }
+    }
+
     /// Safety: synchronizes with store to value via Release/(Acquire|SeqCst).
     #[inline]
     pub(crate) fn is_initialized(&self) -> bool {
@@ -76,7 +61,7 @@
         // operations visible to us, and, this being a fast path, weaker
         // ordering helps with performance. This `Acquire` synchronizes with
         // `SeqCst` operations on the slow path.
-        self.state_and_queue.load(Ordering::Acquire) == COMPLETE
+        self.queue.load(Ordering::Acquire) == COMPLETE_PTR
     }
 
     /// Safety: synchronizes with store to value via SeqCst read from state,
@@ -90,22 +75,30 @@
         let mut f = Some(f);
         let mut res: Result<(), E> = Ok(());
         let slot: *mut Option<T> = self.value.get();
-        initialize_inner(&self.state_and_queue, &mut || {
-            let f = unsafe { take_unchecked(&mut f) };
-            match f() {
-                Ok(value) => {
-                    unsafe { *slot = Some(value) };
-                    true
+        initialize_or_wait(
+            &self.queue,
+            Some(&mut || {
+                let f = unsafe { crate::unwrap_unchecked(f.take()) };
+                match f() {
+                    Ok(value) => {
+                        unsafe { *slot = Some(value) };
+                        true
+                    }
+                    Err(err) => {
+                        res = Err(err);
+                        false
+                    }
                 }
-                Err(err) => {
-                    res = Err(err);
-                    false
-                }
-            }
-        });
+            }),
+        );
         res
     }
 
+    #[cold]
+    pub(crate) fn wait(&self) {
+        initialize_or_wait(&self.queue, None);
+    }
+
     /// Get the reference to the underlying value, without checking if the cell
     /// is initialized.
     ///
@@ -115,15 +108,8 @@
     /// the contents are acquired by (synchronized to) this thread.
     pub(crate) unsafe fn get_unchecked(&self) -> &T {
         debug_assert!(self.is_initialized());
-        let slot: &Option<T> = &*self.value.get();
-        match slot {
-            Some(value) => value,
-            // This unsafe does improve performance, see `examples/bench`.
-            None => {
-                debug_assert!(false);
-                unreachable_unchecked()
-            }
-        }
+        let slot = &*self.value.get();
+        crate::unwrap_unchecked(slot.as_ref())
     }
 
     /// Gets the mutable reference to the underlying value.
@@ -144,67 +130,114 @@
     }
 }
 
-// Corresponds to `std::sync::Once::call_inner`
-// Note: this is intentionally monomorphic
-#[inline(never)]
-fn initialize_inner(my_state_and_queue: &AtomicUsize, init: &mut dyn FnMut() -> bool) -> bool {
-    let mut state_and_queue = my_state_and_queue.load(Ordering::Acquire);
+// Three states that a OnceCell can be in, encoded into the lower bits of `queue` in
+// the OnceCell structure.
+const INCOMPLETE: usize = 0x0;
+const RUNNING: usize = 0x1;
+const COMPLETE: usize = 0x2;
+const INCOMPLETE_PTR: *mut Waiter = INCOMPLETE as *mut Waiter;
+const COMPLETE_PTR: *mut Waiter = COMPLETE as *mut Waiter;
 
-    loop {
-        match state_and_queue {
-            COMPLETE => return true,
-            INCOMPLETE => {
-                let exchange = my_state_and_queue.compare_exchange(
-                    state_and_queue,
-                    RUNNING,
-                    Ordering::Acquire,
-                    Ordering::Acquire,
-                );
-                if let Err(old) = exchange {
-                    state_and_queue = old;
-                    continue;
-                }
-                let mut waiter_queue = WaiterQueue {
-                    state_and_queue: my_state_and_queue,
-                    set_state_on_drop_to: INCOMPLETE, // Difference, std uses `POISONED`
-                };
-                let success = init();
+// Mask to learn about the state. All other bits are the queue of waiters if
+// this is in the RUNNING state.
+const STATE_MASK: usize = 0x3;
 
-                // Difference, std always uses `COMPLETE`
-                waiter_queue.set_state_on_drop_to = if success { COMPLETE } else { INCOMPLETE };
-                return success;
-            }
-            _ => {
-                assert!(state_and_queue & STATE_MASK == RUNNING);
-                wait(&my_state_and_queue, state_and_queue);
-                state_and_queue = my_state_and_queue.load(Ordering::Acquire);
+/// Representation of a node in the linked list of waiters in the RUNNING state.
+/// A waiters is stored on the stack of the waiting threads.
+#[repr(align(4))] // Ensure the two lower bits are free to use as state bits.
+struct Waiter {
+    thread: Cell<Option<Thread>>,
+    signaled: AtomicBool,
+    next: *mut Waiter,
+}
+
+/// Drains and notifies the queue of waiters on drop.
+struct Guard<'a> {
+    queue: &'a AtomicPtr<Waiter>,
+    new_queue: *mut Waiter,
+}
+
+impl Drop for Guard<'_> {
+    fn drop(&mut self) {
+        let queue = self.queue.swap(self.new_queue, Ordering::AcqRel);
+
+        let state = strict::addr(queue) & STATE_MASK;
+        assert_eq!(state, RUNNING);
+
+        unsafe {
+            let mut waiter = strict::map_addr(queue, |q| q & !STATE_MASK);
+            while !waiter.is_null() {
+                let next = (*waiter).next;
+                let thread = (*waiter).thread.take().unwrap();
+                (*waiter).signaled.store(true, Ordering::Release);
+                waiter = next;
+                thread.unpark();
             }
         }
     }
 }
 
-// Copy-pasted from std exactly.
-fn wait(state_and_queue: &AtomicUsize, mut current_state: usize) {
-    loop {
-        if current_state & STATE_MASK != RUNNING {
-            return;
-        }
+// Corresponds to `std::sync::Once::call_inner`.
+//
+// Originally copied from std, but since modified to remove poisoning and to
+// support wait.
+//
+// Note: this is intentionally monomorphic
+#[inline(never)]
+fn initialize_or_wait(queue: &AtomicPtr<Waiter>, mut init: Option<&mut dyn FnMut() -> bool>) {
+    let mut curr_queue = queue.load(Ordering::Acquire);
 
+    loop {
+        let curr_state = strict::addr(curr_queue) & STATE_MASK;
+        match (curr_state, &mut init) {
+            (COMPLETE, _) => return,
+            (INCOMPLETE, Some(init)) => {
+                let exchange = queue.compare_exchange(
+                    curr_queue,
+                    strict::map_addr(curr_queue, |q| (q & !STATE_MASK) | RUNNING),
+                    Ordering::Acquire,
+                    Ordering::Acquire,
+                );
+                if let Err(new_queue) = exchange {
+                    curr_queue = new_queue;
+                    continue;
+                }
+                let mut guard = Guard { queue, new_queue: INCOMPLETE_PTR };
+                if init() {
+                    guard.new_queue = COMPLETE_PTR;
+                }
+                return;
+            }
+            (INCOMPLETE, None) | (RUNNING, _) => {
+                wait(&queue, curr_queue);
+                curr_queue = queue.load(Ordering::Acquire);
+            }
+            _ => debug_assert!(false),
+        }
+    }
+}
+
+fn wait(queue: &AtomicPtr<Waiter>, mut curr_queue: *mut Waiter) {
+    let curr_state = strict::addr(curr_queue) & STATE_MASK;
+    loop {
         let node = Waiter {
             thread: Cell::new(Some(thread::current())),
             signaled: AtomicBool::new(false),
-            next: (current_state & !STATE_MASK) as *const Waiter,
+            next: strict::map_addr(curr_queue, |q| q & !STATE_MASK),
         };
-        let me = &node as *const Waiter as usize;
+        let me = &node as *const Waiter as *mut Waiter;
 
-        let exchange = state_and_queue.compare_exchange(
-            current_state,
-            me | RUNNING,
+        let exchange = queue.compare_exchange(
+            curr_queue,
+            strict::map_addr(me, |q| q | curr_state),
             Ordering::Release,
             Ordering::Relaxed,
         );
-        if let Err(old) = exchange {
-            current_state = old;
+        if let Err(new_queue) = exchange {
+            if strict::addr(new_queue) & STATE_MASK != curr_state {
+                return;
+            }
+            curr_queue = new_queue;
             continue;
         }
 
@@ -215,24 +248,51 @@
     }
 }
 
-// Copy-pasted from std exactly.
-impl Drop for WaiterQueue<'_> {
-    fn drop(&mut self) {
-        let state_and_queue =
-            self.state_and_queue.swap(self.set_state_on_drop_to, Ordering::AcqRel);
+// Polyfill of strict provenance from https://crates.io/crates/sptr.
+//
+// Use free-standing function rather than a trait to keep things simple and
+// avoid any potential conflicts with future stabile std API.
+mod strict {
+    #[must_use]
+    #[inline]
+    pub(crate) fn addr<T>(ptr: *mut T) -> usize
+    where
+        T: Sized,
+    {
+        // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic.
+        // SAFETY: Pointer-to-integer transmutes are valid (if you are okay with losing the
+        // provenance).
+        unsafe { core::mem::transmute(ptr) }
+    }
 
-        assert_eq!(state_and_queue & STATE_MASK, RUNNING);
+    #[must_use]
+    #[inline]
+    pub(crate) fn with_addr<T>(ptr: *mut T, addr: usize) -> *mut T
+    where
+        T: Sized,
+    {
+        // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic.
+        //
+        // In the mean-time, this operation is defined to be "as if" it was
+        // a wrapping_offset, so we can emulate it as such. This should properly
+        // restore pointer provenance even under today's compiler.
+        let self_addr = self::addr(ptr) as isize;
+        let dest_addr = addr as isize;
+        let offset = dest_addr.wrapping_sub(self_addr);
 
-        unsafe {
-            let mut queue = (state_and_queue & !STATE_MASK) as *const Waiter;
-            while !queue.is_null() {
-                let next = (*queue).next;
-                let thread = (*queue).thread.replace(None).unwrap();
-                (*queue).signaled.store(true, Ordering::Release);
-                queue = next;
-                thread.unpark();
-            }
-        }
+        // This is the canonical desugarring of this operation,
+        // but `pointer::cast` was only stabilized in 1.38.
+        // self.cast::<u8>().wrapping_offset(offset).cast::<T>()
+        (ptr as *mut u8).wrapping_offset(offset) as *mut T
+    }
+
+    #[must_use]
+    #[inline]
+    pub(crate) fn map_addr<T>(ptr: *mut T, f: impl FnOnce(usize) -> usize) -> *mut T
+    where
+        T: Sized,
+    {
+        self::with_addr(ptr, f(addr(ptr)))
     }
 }
 
@@ -262,7 +322,6 @@
     }
 
     #[test]
-    #[cfg(not(miri))]
     fn stampede_once() {
         static O: OnceCell<()> = OnceCell::new();
         static mut RUN: bool = false;
diff --git a/src/lib.rs b/src/lib.rs
index cb8bfc7..c2061f8 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -208,7 +208,6 @@
 //! ```
 //! use once_cell::sync::OnceCell;
 //!
-//! #[derive(Debug)]
 //! pub struct LateInit<T> { cell: OnceCell<T> }
 //!
 //! impl<T> LateInit<T> {
@@ -228,22 +227,24 @@
 //!     }
 //! }
 //!
-//! #[derive(Default, Debug)]
+//! #[derive(Default)]
 //! struct A<'a> {
 //!     b: LateInit<&'a B<'a>>,
 //! }
 //!
-//! #[derive(Default, Debug)]
+//! #[derive(Default)]
 //! struct B<'a> {
 //!     a: LateInit<&'a A<'a>>
 //! }
 //!
+//!
 //! fn build_cycle() {
 //!     let a = A::default();
 //!     let b = B::default();
 //!     a.b.init(&b);
 //!     b.a.init(&a);
-//!     println!("{:?}", a.b.a.b.a);
+//!     
+//!     let _a = &a.b.a.b.a;
 //! }
 //! ```
 //!
@@ -267,9 +268,9 @@
 //!
 //! # Minimum Supported `rustc` Version
 //!
-//! This crate's minimum supported `rustc` version is `1.36.0`.
+//! This crate's minimum supported `rustc` version is `1.56.0`.
 //!
-//! If only the `std` feature is enabled, MSRV will be updated conservatively.
+//! If only the `std` feature is enabled, MSRV will be updated conservatively, supporting at least latest 8 versions of the compiler.
 //! When using other features, like `parking_lot`, MSRV might be updated more frequently, up to the latest stable.
 //! In both cases, increasing MSRV is *not* considered a semver-breaking change.
 //!
@@ -311,6 +312,14 @@
 //! At the moment, `unsync` has an additional benefit that reentrant initialization causes a panic, which
 //! might be easier to debug than a deadlock.
 //!
+//! **Does this crate support async?**
+//!
+//! No, but you can use [`async_once_cell`](https://crates.io/crates/async_once_cell) instead.
+//!
+//! **Can I bring my own mutex?**
+//!
+//! There is [generic_once_cell](https://crates.io/crates/generic_once_cell) to allow just that.
+//!
 //! # Related crates
 //!
 //! * [double-checked-cell](https://github.com/niklasf/double-checked-cell)
@@ -318,6 +327,8 @@
 //! * [lazycell](https://crates.io/crates/lazycell)
 //! * [mitochondria](https://crates.io/crates/mitochondria)
 //! * [lazy_static](https://crates.io/crates/lazy_static)
+//! * [async_once_cell](https://crates.io/crates/async_once_cell)
+//! * [generic_once_cell](https://crates.io/crates/generic_once_cell) (bring your own mutex)
 //!
 //! Most of this crate's functionality is available in `std` in nightly Rust.
 //! See the [tracking issue](https://github.com/rust-lang/rust/issues/74465).
@@ -327,13 +338,15 @@
 #[cfg(feature = "alloc")]
 extern crate alloc;
 
-#[cfg(feature = "std")]
-#[cfg(feature = "parking_lot")]
+#[cfg(all(feature = "critical-section", not(feature = "std")))]
+#[path = "imp_cs.rs"]
+mod imp;
+
+#[cfg(all(feature = "std", feature = "parking_lot"))]
 #[path = "imp_pl.rs"]
 mod imp;
 
-#[cfg(feature = "std")]
-#[cfg(not(feature = "parking_lot"))]
+#[cfg(all(feature = "std", not(feature = "parking_lot")))]
 #[path = "imp_std.rs"]
 mod imp;
 
@@ -341,12 +354,12 @@
 pub mod unsync {
     use core::{
         cell::{Cell, UnsafeCell},
-        fmt, hint, mem,
+        fmt, mem,
         ops::{Deref, DerefMut},
+        panic::{RefUnwindSafe, UnwindSafe},
     };
 
-    #[cfg(feature = "std")]
-    use std::panic::{RefUnwindSafe, UnwindSafe};
+    use super::unwrap_unchecked;
 
     /// A cell which can be written to only once. It is not thread safe.
     ///
@@ -377,9 +390,7 @@
     // `&unsync::OnceCell` to sneak a `T` through `catch_unwind`,
     // by initializing the cell in closure and extracting the value in the
     // `Drop`.
-    #[cfg(feature = "std")]
     impl<T: RefUnwindSafe + UnwindSafe> RefUnwindSafe for OnceCell<T> {}
-    #[cfg(feature = "std")]
     impl<T: UnwindSafe> UnwindSafe for OnceCell<T> {}
 
     impl<T> Default for OnceCell<T> {
@@ -399,14 +410,17 @@
 
     impl<T: Clone> Clone for OnceCell<T> {
         fn clone(&self) -> OnceCell<T> {
-            let res = OnceCell::new();
-            if let Some(value) = self.get() {
-                match res.set(value.clone()) {
-                    Ok(()) => (),
-                    Err(_) => unreachable!(),
-                }
+            match self.get() {
+                Some(value) => OnceCell::with_value(value.clone()),
+                None => OnceCell::new(),
             }
-            res
+        }
+
+        fn clone_from(&mut self, source: &Self) {
+            match (self.get_mut(), source.get()) {
+                (Some(this), Some(source)) => this.clone_from(source),
+                _ => *self = source.clone(),
+            }
         }
     }
 
@@ -420,7 +434,7 @@
 
     impl<T> From<T> for OnceCell<T> {
         fn from(value: T) -> Self {
-            OnceCell { inner: UnsafeCell::new(Some(value)) }
+            OnceCell::with_value(value)
         }
     }
 
@@ -430,11 +444,20 @@
             OnceCell { inner: UnsafeCell::new(None) }
         }
 
+        /// Creates a new initialized cell.
+        pub const fn with_value(value: T) -> OnceCell<T> {
+            OnceCell { inner: UnsafeCell::new(Some(value)) }
+        }
+
         /// Gets a reference to the underlying value.
         ///
         /// Returns `None` if the cell is empty.
+        #[inline]
         pub fn get(&self) -> Option<&T> {
-            // Safe due to `inner`'s invariant
+            // Safe due to `inner`'s invariant of being written to at most once.
+            // Had multiple writes to `inner` been allowed, a reference to the
+            // value we return now would become dangling by a write of a
+            // different value later.
             unsafe { &*self.inner.get() }.as_ref()
         }
 
@@ -451,8 +474,10 @@
         ///
         /// let mut cell: OnceCell<u32> = OnceCell::new();
         /// cell.set(92).unwrap();
-        /// cell = OnceCell::new();
+        /// *cell.get_mut().unwrap() = 93;
+        /// assert_eq!(cell.get(), Some(&93));
         /// ```
+        #[inline]
         pub fn get_mut(&mut self) -> Option<&mut T> {
             // Safe because we have unique access
             unsafe { &mut *self.inner.get() }.as_mut()
@@ -482,7 +507,7 @@
             }
         }
 
-        /// Like [`set`](Self::set), but also returns a referce to the final cell value.
+        /// Like [`set`](Self::set), but also returns a reference to the final cell value.
         ///
         /// # Example
         /// ```
@@ -500,16 +525,14 @@
             if let Some(old) = self.get() {
                 return Err((old, value));
             }
+
             let slot = unsafe { &mut *self.inner.get() };
             // This is the only place where we set the slot, no races
             // due to reentrancy/concurrency are possible, and we've
             // checked that slot is currently `None`, so this write
             // maintains the `inner`'s invariant.
             *slot = Some(value);
-            Ok(match &*slot {
-                Some(value) => value,
-                None => unsafe { hint::unreachable_unchecked() },
-            })
+            Ok(unsafe { unwrap_unchecked(slot.as_ref()) })
         }
 
         /// Gets the contents of the cell, initializing it with `f`
@@ -582,7 +605,7 @@
             // `assert`, while keeping `set/get` would be sound, but it seems
             // better to panic, rather than to silently use an old value.
             assert!(self.set(val).is_ok(), "reentrant init");
-            Ok(self.get().unwrap())
+            Ok(unsafe { unwrap_unchecked(self.get()) })
         }
 
         /// Takes the value out of this `OnceCell`, moving it back to an uninitialized state.
@@ -666,7 +689,6 @@
         init: Cell<Option<F>>,
     }
 
-    #[cfg(feature = "std")]
     impl<T, F: RefUnwindSafe> RefUnwindSafe for Lazy<T, F> where OnceCell<T>: RefUnwindSafe {}
 
     impl<T: fmt::Debug, F> fmt::Debug for Lazy<T, F> {
@@ -727,6 +749,59 @@
                 None => panic!("Lazy instance has previously been poisoned"),
             })
         }
+
+        /// Forces the evaluation of this lazy value and returns a mutable reference to
+        /// the result.
+        ///
+        /// This is equivalent to the `DerefMut` impl, but is explicit.
+        ///
+        /// # Example
+        /// ```
+        /// use once_cell::unsync::Lazy;
+        ///
+        /// let mut lazy = Lazy::new(|| 92);
+        ///
+        /// assert_eq!(Lazy::force_mut(&mut lazy), &92);
+        /// assert_eq!(*lazy, 92);
+        /// ```
+        pub fn force_mut(this: &mut Lazy<T, F>) -> &mut T {
+            Self::force(this);
+            Self::get_mut(this).unwrap_or_else(|| unreachable!())
+        }
+
+        /// Gets the reference to the result of this lazy value if
+        /// it was initialized, otherwise returns `None`.
+        ///
+        /// # Example
+        /// ```
+        /// use once_cell::unsync::Lazy;
+        ///
+        /// let lazy = Lazy::new(|| 92);
+        ///
+        /// assert_eq!(Lazy::get(&lazy), None);
+        /// assert_eq!(&*lazy, &92);
+        /// assert_eq!(Lazy::get(&lazy), Some(&92));
+        /// ```
+        pub fn get(this: &Lazy<T, F>) -> Option<&T> {
+            this.cell.get()
+        }
+
+        /// Gets the mutable reference to the result of this lazy value if
+        /// it was initialized, otherwise returns `None`.
+        ///
+        /// # Example
+        /// ```
+        /// use once_cell::unsync::Lazy;
+        ///
+        /// let mut lazy = Lazy::new(|| 92);
+        ///
+        /// assert_eq!(Lazy::get_mut(&mut lazy), None);
+        /// assert_eq!(*lazy, 92);
+        /// assert_eq!(Lazy::get_mut(&mut lazy), Some(&mut 92));
+        /// ```
+        pub fn get_mut(this: &mut Lazy<T, F>) -> Option<&mut T> {
+            this.cell.get_mut()
+        }
     }
 
     impl<T, F: FnOnce() -> T> Deref for Lazy<T, F> {
@@ -752,16 +827,16 @@
 }
 
 /// Thread-safe, blocking version of `OnceCell`.
-#[cfg(feature = "std")]
+#[cfg(any(feature = "std", feature = "critical-section"))]
 pub mod sync {
-    use std::{
+    use core::{
         cell::Cell,
         fmt, mem,
         ops::{Deref, DerefMut},
         panic::RefUnwindSafe,
     };
 
-    use crate::{imp::OnceCell as Imp, take_unchecked};
+    use super::{imp::OnceCell as Imp, unwrap_unchecked};
 
     /// A thread-safe cell which can be written to only once.
     ///
@@ -810,22 +885,23 @@
 
     impl<T: Clone> Clone for OnceCell<T> {
         fn clone(&self) -> OnceCell<T> {
-            let res = OnceCell::new();
-            if let Some(value) = self.get() {
-                match res.set(value.clone()) {
-                    Ok(()) => (),
-                    Err(_) => unreachable!(),
-                }
+            match self.get() {
+                Some(value) => Self::with_value(value.clone()),
+                None => Self::new(),
             }
-            res
+        }
+
+        fn clone_from(&mut self, source: &Self) {
+            match (self.get_mut(), source.get()) {
+                (Some(this), Some(source)) => this.clone_from(source),
+                _ => *self = source.clone(),
+            }
         }
     }
 
     impl<T> From<T> for OnceCell<T> {
         fn from(value: T) -> Self {
-            let cell = Self::new();
-            cell.get_or_init(|| value);
-            cell
+            Self::with_value(value)
         }
     }
 
@@ -843,6 +919,11 @@
             OnceCell(Imp::new())
         }
 
+        /// Creates a new initialized cell.
+        pub const fn with_value(value: T) -> OnceCell<T> {
+            OnceCell(Imp::with_value(value))
+        }
+
         /// Gets the reference to the underlying value.
         ///
         /// Returns `None` if the cell is empty, or being initialized. This
@@ -856,6 +937,37 @@
             }
         }
 
+        /// Gets the reference to the underlying value, blocking the current
+        /// thread until it is set.
+        ///
+        /// ```
+        /// use once_cell::sync::OnceCell;
+        ///
+        /// let mut cell = std::sync::Arc::new(OnceCell::new());
+        /// let t = std::thread::spawn({
+        ///     let cell = std::sync::Arc::clone(&cell);
+        ///     move || cell.set(92).unwrap()
+        /// });
+        ///
+        /// // Returns immediately, but might return None.
+        /// let _value_or_none = cell.get();
+        ///
+        /// // Will return 92, but might block until the other thread does `.set`.
+        /// let value: &u32 = cell.wait();
+        /// assert_eq!(*value, 92);
+        /// t.join().unwrap();
+        /// ```
+        #[cfg(feature = "std")]
+        pub fn wait(&self) -> &T {
+            if !self.0.is_initialized() {
+                self.0.wait()
+            }
+            debug_assert!(self.0.is_initialized());
+            // Safe b/c of the wait call above and the fact that we didn't
+            // relinquish our borrow.
+            unsafe { self.get_unchecked() }
+        }
+
         /// Gets the mutable reference to the underlying value.
         ///
         /// Returns `None` if the cell is empty.
@@ -871,6 +983,7 @@
         /// cell.set(92).unwrap();
         /// cell = OnceCell::new();
         /// ```
+        #[inline]
         pub fn get_mut(&mut self) -> Option<&mut T> {
             self.0.get_mut()
         }
@@ -882,6 +995,7 @@
         ///
         /// Caller must ensure that the cell is in initialized state, and that
         /// the contents are acquired by (synchronized to) this thread.
+        #[inline]
         pub unsafe fn get_unchecked(&self) -> &T {
             self.0.get_unchecked()
         }
@@ -933,7 +1047,7 @@
         /// ```
         pub fn try_insert(&self, value: T) -> Result<&T, (&T, T)> {
             let mut value = Some(value);
-            let res = self.get_or_init(|| unsafe { take_unchecked(&mut value) });
+            let res = self.get_or_init(|| unsafe { unwrap_unchecked(value.take()) });
             match value {
                 None => Ok(res),
                 Some(value) => Err((res, value)),
@@ -1011,6 +1125,7 @@
             if let Some(value) = self.get() {
                 return Ok(value);
             }
+
             self.0.initialize(f)?;
 
             // Safe b/c value is initialized.
@@ -1066,6 +1181,7 @@
         /// cell.set("hello".to_string()).unwrap();
         /// assert_eq!(cell.into_inner(), Some("hello".to_string()));
         /// ```
+        #[inline]
         pub fn into_inner(self) -> Option<T> {
             self.0.into_inner()
         }
@@ -1116,13 +1232,12 @@
     }
 
     // We never create a `&F` from a `&Lazy<T, F>` so it is fine to not impl
-    // `Sync` for `F`. we do create a `&mut Option<F>` in `force`, but this is
+    // `Sync` for `F`. We do create a `&mut Option<F>` in `force`, but this is
     // properly synchronized, so it only happens once so it also does not
     // contribute to this impl.
     unsafe impl<T, F: Send> Sync for Lazy<T, F> where OnceCell<T>: Sync {}
     // auto-derived `Send` impl is OK.
 
-    #[cfg(feature = "std")]
     impl<T, F: RefUnwindSafe> RefUnwindSafe for Lazy<T, F> where OnceCell<T>: RefUnwindSafe {}
 
     impl<T, F> Lazy<T, F> {
@@ -1164,6 +1279,57 @@
                 None => panic!("Lazy instance has previously been poisoned"),
             })
         }
+
+        /// Forces the evaluation of this lazy value and
+        /// returns a mutable reference to the result. This is equivalent
+        /// to the `Deref` impl, but is explicit.
+        ///
+        /// # Example
+        /// ```
+        /// use once_cell::sync::Lazy;
+        ///
+        /// let mut lazy = Lazy::new(|| 92);
+        ///
+        /// assert_eq!(Lazy::force_mut(&mut lazy), &mut 92);
+        /// ```
+        pub fn force_mut(this: &mut Lazy<T, F>) -> &mut T {
+            Self::force(this);
+            Self::get_mut(this).unwrap_or_else(|| unreachable!())
+        }
+
+        /// Gets the reference to the result of this lazy value if
+        /// it was initialized, otherwise returns `None`.
+        ///
+        /// # Example
+        /// ```
+        /// use once_cell::sync::Lazy;
+        ///
+        /// let lazy = Lazy::new(|| 92);
+        ///
+        /// assert_eq!(Lazy::get(&lazy), None);
+        /// assert_eq!(&*lazy, &92);
+        /// assert_eq!(Lazy::get(&lazy), Some(&92));
+        /// ```
+        pub fn get(this: &Lazy<T, F>) -> Option<&T> {
+            this.cell.get()
+        }
+
+        /// Gets the reference to the result of this lazy value if
+        /// it was initialized, otherwise returns `None`.
+        ///
+        /// # Example
+        /// ```
+        /// use once_cell::sync::Lazy;
+        ///
+        /// let mut lazy = Lazy::new(|| 92);
+        ///
+        /// assert_eq!(Lazy::get_mut(&mut lazy), None);
+        /// assert_eq!(&*lazy, &92);
+        /// assert_eq!(Lazy::get_mut(&mut lazy), Some(&mut 92));
+        /// ```
+        pub fn get_mut(this: &mut Lazy<T, F>) -> Option<&mut T> {
+            this.cell.get_mut()
+        }
     }
 
     impl<T, F: FnOnce() -> T> Deref for Lazy<T, F> {
@@ -1208,13 +1374,14 @@
 #[cfg(feature = "race")]
 pub mod race;
 
-#[cfg(feature = "std")]
-unsafe fn take_unchecked<T>(val: &mut Option<T>) -> T {
-    match val.take() {
-        Some(it) => it,
+// Remove once MSRV is at least 1.58.
+#[inline]
+unsafe fn unwrap_unchecked<T>(val: Option<T>) -> T {
+    match val {
+        Some(value) => value,
         None => {
             debug_assert!(false);
-            std::hint::unreachable_unchecked()
+            core::hint::unreachable_unchecked()
         }
     }
 }
diff --git a/src/race.rs b/src/race.rs
index 3576420..ee3d51a 100644
--- a/src/race.rs
+++ b/src/race.rs
@@ -5,14 +5,30 @@
 //! them stores the result.
 //!
 //! This module does not require `std` feature.
+//!
+//! # Atomic orderings
+//!
+//! All types in this module use `Acquire` and `Release`
+//! [atomic orderings](Ordering) for all their operations. While this is not
+//! strictly necessary for types other than `OnceBox`, it is useful for users as
+//! it allows them to be certain that after `get` or `get_or_init` returns on
+//! one thread, any side-effects caused by the setter thread prior to them
+//! calling `set` or `get_or_init` will be made visible to that thread; without
+//! it, it's possible for it to appear as if they haven't happened yet from the
+//! getter thread's perspective. This is an acceptable tradeoff to make since
+//! `Acquire` and `Release` have very little performance overhead on most
+//! architectures versus `Relaxed`.
 
-#[cfg(feature = "atomic-polyfill")]
+#[cfg(feature = "critical-section")]
 use atomic_polyfill as atomic;
-#[cfg(not(feature = "atomic-polyfill"))]
+#[cfg(not(feature = "critical-section"))]
 use core::sync::atomic;
 
-use atomic::{AtomicUsize, Ordering};
+use atomic::{AtomicPtr, AtomicUsize, Ordering};
+use core::cell::UnsafeCell;
+use core::marker::PhantomData;
 use core::num::NonZeroUsize;
+use core::ptr;
 
 /// A thread-safe cell which can be written to only once.
 #[derive(Default, Debug)]
@@ -152,12 +168,125 @@
     fn from_usize(value: NonZeroUsize) -> bool {
         value.get() == 1
     }
+
     #[inline]
     fn to_usize(value: bool) -> NonZeroUsize {
         unsafe { NonZeroUsize::new_unchecked(if value { 1 } else { 2 }) }
     }
 }
 
+/// A thread-safe cell which can be written to only once.
+pub struct OnceRef<'a, T> {
+    inner: AtomicPtr<T>,
+    ghost: PhantomData<UnsafeCell<&'a T>>,
+}
+
+// TODO: Replace UnsafeCell with SyncUnsafeCell once stabilized
+unsafe impl<'a, T: Sync> Sync for OnceRef<'a, T> {}
+
+impl<'a, T> core::fmt::Debug for OnceRef<'a, T> {
+    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+        write!(f, "OnceRef({:?})", self.inner)
+    }
+}
+
+impl<'a, T> Default for OnceRef<'a, T> {
+    fn default() -> Self {
+        Self::new()
+    }
+}
+
+impl<'a, T> OnceRef<'a, T> {
+    /// Creates a new empty cell.
+    pub const fn new() -> OnceRef<'a, T> {
+        OnceRef { inner: AtomicPtr::new(ptr::null_mut()), ghost: PhantomData }
+    }
+
+    /// Gets a reference to the underlying value.
+    pub fn get(&self) -> Option<&'a T> {
+        let ptr = self.inner.load(Ordering::Acquire);
+        unsafe { ptr.as_ref() }
+    }
+
+    /// Sets the contents of this cell to `value`.
+    ///
+    /// Returns `Ok(())` if the cell was empty and `Err(value)` if it was
+    /// full.
+    pub fn set(&self, value: &'a T) -> Result<(), ()> {
+        let ptr = value as *const T as *mut T;
+        let exchange =
+            self.inner.compare_exchange(ptr::null_mut(), ptr, Ordering::AcqRel, Ordering::Acquire);
+        match exchange {
+            Ok(_) => Ok(()),
+            Err(_) => Err(()),
+        }
+    }
+
+    /// Gets the contents of the cell, initializing it with `f` if the cell was
+    /// empty.
+    ///
+    /// If several threads concurrently run `get_or_init`, more than one `f` can
+    /// be called. However, all threads will return the same value, produced by
+    /// some `f`.
+    pub fn get_or_init<F>(&self, f: F) -> &'a T
+    where
+        F: FnOnce() -> &'a T,
+    {
+        enum Void {}
+        match self.get_or_try_init(|| Ok::<&'a T, Void>(f())) {
+            Ok(val) => val,
+            Err(void) => match void {},
+        }
+    }
+
+    /// Gets the contents of the cell, initializing it with `f` if
+    /// the cell was empty. If the cell was empty and `f` failed, an
+    /// error is returned.
+    ///
+    /// If several threads concurrently run `get_or_init`, more than one `f` can
+    /// be called. However, all threads will return the same value, produced by
+    /// some `f`.
+    pub fn get_or_try_init<F, E>(&self, f: F) -> Result<&'a T, E>
+    where
+        F: FnOnce() -> Result<&'a T, E>,
+    {
+        let mut ptr = self.inner.load(Ordering::Acquire);
+
+        if ptr.is_null() {
+            // TODO replace with `cast_mut` when MSRV reaches 1.65.0 (also in `set`)
+            ptr = f()? as *const T as *mut T;
+            let exchange = self.inner.compare_exchange(
+                ptr::null_mut(),
+                ptr,
+                Ordering::AcqRel,
+                Ordering::Acquire,
+            );
+            if let Err(old) = exchange {
+                ptr = old;
+            }
+        }
+
+        Ok(unsafe { &*ptr })
+    }
+
+    /// ```compile_fail
+    /// use once_cell::race::OnceRef;
+    ///
+    /// let mut l = OnceRef::new();
+    ///
+    /// {
+    ///     let y = 2;
+    ///     let mut r = OnceRef::new();
+    ///     r.set(&y).unwrap();
+    ///     core::mem::swap(&mut l, &mut r);
+    /// }
+    ///
+    /// // l now contains a dangling reference to y
+    /// eprintln!("uaf: {}", l.get().unwrap());
+    /// ```
+    fn _dummy() {}
+}
+
 #[cfg(feature = "alloc")]
 pub use self::once_box::OnceBox;
 
diff --git a/tests/it.rs b/tests/it.rs
index c769487..ec35e4b 100644
--- a/tests/it.rs
+++ b/tests/it.rs
@@ -18,6 +18,13 @@
     }
 
     #[test]
+    fn once_cell_with_value() {
+        const CELL: OnceCell<i32> = OnceCell::with_value(12);
+        let cell = CELL;
+        assert_eq!(cell.get(), Some(&12));
+    }
+
+    #[test]
     fn once_cell_get_mut() {
         let mut c = OnceCell::new();
         assert!(c.get_mut().is_none());
@@ -131,6 +138,41 @@
     }
 
     #[test]
+    fn lazy_force_mut() {
+        let called = Cell::new(0);
+        let mut x = Lazy::new(|| {
+            called.set(called.get() + 1);
+            92
+        });
+        assert_eq!(called.get(), 0);
+        let v = Lazy::force_mut(&mut x);
+        assert_eq!(called.get(), 1);
+
+        *v /= 2;
+        assert_eq!(*x, 46);
+        assert_eq!(called.get(), 1);
+    }
+
+    #[test]
+    fn lazy_get_mut() {
+        let called = Cell::new(0);
+        let mut x: Lazy<u32, _> = Lazy::new(|| {
+            called.set(called.get() + 1);
+            92
+        });
+
+        assert_eq!(called.get(), 0);
+        assert_eq!(*x, 92);
+
+        let mut_ref: &mut u32 = Lazy::get_mut(&mut x).unwrap();
+        assert_eq!(called.get(), 1);
+
+        *mut_ref /= 2;
+        assert_eq!(*x, 46);
+        assert_eq!(called.get(), 1);
+    }
+
+    #[test]
     fn lazy_default() {
         static CALLED: AtomicUsize = AtomicUsize::new(0);
 
@@ -186,6 +228,7 @@
 
     #[test]
     #[should_panic(expected = "reentrant init")]
+    #[ignore = "Android: ignore for now. Need to compile these binaries separately."]
     fn reentrant_init() {
         let x: OnceCell<Box<i32>> = OnceCell::new();
         let dangling_ref: Cell<Option<&i32>> = Cell::new(None);
@@ -208,10 +251,16 @@
     }
 }
 
-#[cfg(feature = "std")]
+#[cfg(any(feature = "std", feature = "critical-section"))]
 mod sync {
     use std::sync::atomic::{AtomicUsize, Ordering::SeqCst};
 
+    #[cfg(feature = "std")]
+    use std::sync::Barrier;
+
+    #[cfg(not(feature = "std"))]
+    use core::cell::Cell;
+
     use crossbeam_utils::thread::scope;
 
     use once_cell::sync::{Lazy, OnceCell};
@@ -232,6 +281,12 @@
     }
 
     #[test]
+    fn once_cell_with_value() {
+        static CELL: OnceCell<i32> = OnceCell::with_value(12);
+        assert_eq!(CELL.get(), Some(&12));
+    }
+
+    #[test]
     fn once_cell_get_mut() {
         let mut c = OnceCell::new();
         assert!(c.get_mut().is_none());
@@ -308,6 +363,41 @@
         assert_eq!(cell.get(), Some(&"hello".to_string()));
     }
 
+    #[cfg(feature = "std")]
+    #[test]
+    fn wait() {
+        let cell: OnceCell<String> = OnceCell::new();
+        scope(|s| {
+            s.spawn(|_| cell.set("hello".to_string()));
+            let greeting = cell.wait();
+            assert_eq!(greeting, "hello")
+        })
+        .unwrap();
+    }
+
+    #[cfg(feature = "std")]
+    #[test]
+    fn get_or_init_stress() {
+        let n_threads = if cfg!(miri) { 30 } else { 1_000 };
+        let n_cells = if cfg!(miri) { 30 } else { 1_000 };
+        let cells: Vec<_> = std::iter::repeat_with(|| (Barrier::new(n_threads), OnceCell::new()))
+            .take(n_cells)
+            .collect();
+        scope(|s| {
+            for t in 0..n_threads {
+                let cells = &cells;
+                s.spawn(move |_| {
+                    for (i, (b, s)) in cells.iter().enumerate() {
+                        b.wait();
+                        let j = if t % 2 == 0 { s.wait() } else { s.get_or_init(|| i) };
+                        assert_eq!(*j, i);
+                    }
+                });
+            }
+        })
+        .unwrap();
+    }
+
     #[test]
     fn from_impl() {
         assert_eq!(OnceCell::from("value").get(), Some(&"value"));
@@ -350,6 +440,7 @@
 
     #[test]
     #[cfg_attr(miri, ignore)] // miri doesn't support processes
+    #[cfg(feature = "std")]
     #[ignore = "Android: ignore for now. Need to compile these binaries separately."]
     fn reentrant_init() {
         let examples_dir = {
@@ -378,6 +469,20 @@
         }
     }
 
+    #[cfg(not(feature = "std"))]
+    #[test]
+    #[should_panic(expected = "reentrant init")]
+    fn reentrant_init() {
+        let x: OnceCell<Box<i32>> = OnceCell::new();
+        let dangling_ref: Cell<Option<&i32>> = Cell::new(None);
+        x.get_or_init(|| {
+            let r = x.get_or_init(|| Box::new(92));
+            dangling_ref.set(Some(r));
+            Box::new(62)
+        });
+        eprintln!("use after free: {:?}", dangling_ref.get().unwrap());
+    }
+
     #[test]
     fn lazy_new() {
         let called = AtomicUsize::new(0);
@@ -533,9 +638,8 @@
     }
 
     #[test]
-    #[cfg_attr(miri, ignore)] // FIXME: deadlocks, likely caused by https://github.com/rust-lang/miri/issues/1388
     fn once_cell_does_not_leak_partially_constructed_boxes() {
-        let n_tries = 100;
+        let n_tries = if cfg!(miri) { 10 } else { 100 };
         let n_readers = 10;
         let n_writers = 3;
         const MSG: &str = "Hello, World";
@@ -559,11 +663,9 @@
         }
     }
 
+    #[cfg(feature = "std")]
     #[test]
-    #[cfg_attr(miri, ignore)] // miri doesn't support Barrier
     fn get_does_not_block() {
-        use std::sync::Barrier;
-
         let cell = OnceCell::new();
         let barrier = Barrier::new(2);
         scope(|scope| {
@@ -595,12 +697,11 @@
 
 #[cfg(feature = "race")]
 mod race {
+    #[cfg(feature = "std")]
+    use std::sync::Barrier;
     use std::{
         num::NonZeroUsize,
-        sync::{
-            atomic::{AtomicUsize, Ordering::SeqCst},
-            Barrier,
-        },
+        sync::atomic::{AtomicUsize, Ordering::SeqCst},
     };
 
     use crossbeam_utils::thread::scope;
@@ -652,6 +753,7 @@
         assert_eq!(cell.get(), Some(val1));
     }
 
+    #[cfg(feature = "std")]
     #[test]
     fn once_non_zero_usize_first_wins() {
         let val1 = NonZeroUsize::new(92).unwrap();
@@ -731,12 +833,16 @@
 
 #[cfg(all(feature = "race", feature = "alloc"))]
 mod race_once_box {
+    #[cfg(feature = "std")]
+    use std::sync::Barrier;
     use std::sync::{
         atomic::{AtomicUsize, Ordering::SeqCst},
-        Arc, Barrier,
+        Arc,
     };
 
+    #[cfg(feature = "std")]
     use crossbeam_utils::thread::scope;
+
     use once_cell::race::OnceBox;
 
     #[derive(Default)]
@@ -766,6 +872,7 @@
         }
     }
 
+    #[cfg(feature = "std")]
     #[test]
     fn once_box_smoke_test() {
         let heap = Heap::default();
@@ -820,6 +927,7 @@
         assert_eq!(heap.total(), 0);
     }
 
+    #[cfg(feature = "std")]
     #[test]
     fn once_box_first_wins() {
         let cell = OnceBox::new();