Vendor dependencies for 0.3.0 release

This commit is contained in:
2025-09-27 10:29:08 -05:00
parent 0c8d39d483
commit 82ab7f317b
26803 changed files with 16134934 additions and 0 deletions

View File

@@ -0,0 +1 @@
{"files":{"Cargo.lock":"c3973c2b160cd1da89d52527ad8bbee126c3960f989e26e9d09130f69fc61d37","Cargo.toml":"d16e28d17ce0cb21b73661d025424a3e00e21ddc6c231b9739e574239d0acb6b","Changelog":"3b14c74ea3f02d0e242b5b6b8b0d0b475c318270392ac9a6d289af6a619f51f7","LICENSE-APACHE":"7277e044486d82039c8855cf1611f57e64b7980213ba3589db49daf09dbdfd3e","LICENSE-MIT":"267f4a928bb3cf6ad37252621eb1199d2cb58560046b7dea4ec3f7a4cc9261a7","README.md":"bab576b972f18be9c83cad6e0af7145802a65aed139f6f7333deebd74ba3ac30","bench-ocaml/build.sh":"5e283a96a8b5c0f3f61f1e9de2a88a935b5790a3aeb4949956af29e0f21af32f","bench-ocaml/test.ml":"1063b1a1c756d99f35501d0fa012923a1011ffe4a128c8c777685f5a6fac49c3","rustfmt.toml":"62097c3a7a8c8a3c2829a86af8c03607d2d11aea48fca76b8c5a62d19278be36","src/avl.rs":"dcbb43bd47ee62ad05abe444017553a6f69f3266bf77aa4d53678b836c55d416","src/chunk.rs":"da5f4c4ecf4a7c207f9c7f886f9ff76d1de43327223b12f64e95a880938ccaa9","src/lib.rs":"1e4cc575e3f644855d2b46fdb715c5857749da1086c53056a1f133f6597bf726","src/map.rs":"e8563f41ee20d7fc0cef6e74af12a607462d366c6929eb1a56bd1cce18b2c666","src/set.rs":"39f388814c3f0dfe32fd8fa3a09ce2e45bc1e0a9255dca4a004ca6e860d4580d","src/tests.rs":"1d5302a1dcd5d19c63dcf95ed421e2a7c157586cd02eb6ee9f62d78f668ef3ba"},"package":"9a3e98b1520e49e252237edc238a39869da9f3241f2ec19dc788c1d24694d1e4"}

1472
vendor/immutable-chunkmap/Cargo.lock generated vendored Normal file

File diff suppressed because it is too large Load Diff

101
vendor/immutable-chunkmap/Cargo.toml vendored Normal file
View File

@@ -0,0 +1,101 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g., crates.io) dependencies.
#
# If you are reading this file be aware that the original Cargo.toml
# will likely look very different (and much more reasonable).
# See Cargo.toml.orig for the original contents.
[package]
edition = "2018"
name = "immutable-chunkmap"
version = "2.1.2"
authors = ["Eric Stokes <letaris@gmail.com>"]
build = false
publish = true
autolib = false
autobins = false
autoexamples = false
autotests = false
autobenches = false
description = "A fast immutable map and set with batch insert and update methods, COW operations, and big O efficient implementations of set and merge operations"
documentation = "https://docs.rs/immutable-chunkmap"
readme = "README.md"
keywords = [
"map",
"set",
"immutable",
"persistent",
"functional",
]
categories = [
"data-structures",
"no-std",
]
license = "Apache-2.0 OR MIT"
repository = "https://github.com/estokes/immutable-chunkmap"
[features]
default = []
pool = [
"dep:poolshark",
"dep:fxhash",
]
rayon = ["dep:rayon"]
serde = ["dep:serde"]
[lib]
name = "immutable_chunkmap"
path = "src/lib.rs"
[dependencies.arrayvec]
version = "0.7"
default-features = false
[dependencies.fxhash]
version = "0.2"
optional = true
[dependencies.poolshark]
version = "0.2.0"
optional = true
[dependencies.rayon]
version = "1"
optional = true
[dependencies.serde]
version = "1"
features = ["alloc"]
optional = true
default-features = false
[dev-dependencies.arcstr]
version = "1"
[dev-dependencies.compact_str]
version = "0.9"
[dev-dependencies.hashbrown]
version = "0.15"
[dev-dependencies.netidx-value]
version = "0.29"
[dev-dependencies.paste]
version = "1"
[dev-dependencies.rand]
version = "0.8"
[dev-dependencies.rayon]
version = "1"
[dev-dependencies.serde]
version = "1"
[dev-dependencies.serde_json]
version = "1"

189
vendor/immutable-chunkmap/Changelog vendored Normal file
View File

@@ -0,0 +1,189 @@
2.1.2
* fix unsoundness pooling tree nodes, now reenabled
2.1.1
* stop pooling tree nodes, because of notch analysis they can never meet the
safety requirements of IsoPoolable.
2.1.0
* add an optional feature "pool". If enabled memory allocation is greatly
reduced by reusing tree components instead of freeing them. The performance
improvement depends on key type and tree size but can be as high as %30 for
plain insert/remove on very large maps. Memory fragmentation will also be
greatly reduced, but this is much harder to measure.
* pool is not compatible with no_std
* fix a number of test bugs (actual bugs in the TEST code, not the tree) that
cropped up when running model checking for multiple days continuously
2.0.6
* merge #10, no_std support (alloc required)
2.0.5
* merge #8 hand coded packing of height/size to remove the dependency
on packed struct
* fix a few bugs in the model checking COW tests
2.0.4
* remove redundant type parameter from iter_mut_cow
2.0.3
* implement iter_mut_cow, range_mut_cow, for copy on write iterators
2.0.2
* implement get_or_insert_cow and get_or_default_cow
2.0.1
* implement get_mut_cow to improve the ergonomics of using nested
maps with COW operations
2.0.0
* fix the range api to match the standard library. Sorry about the
previous two releases, it's actually quite a subtle api and I
consistently got it wrong. I also got semantic versioning wrong, and
I'm really sorry about that, I hope nothing broke annoyingly for
anyone (aside from for me :))
1.1.1
* fix ?Sized on range
1.1.0
* fix the arguments to range to have the correct borrow type
1.0.5
* add remove_many, a small wrapper around update_many
* fix a bug in update_many where trying to remove elements that don't
exist in the map could cause a panic. Add a test for this case.
1.0.4
* add optional rayon support for map and set
1.0.3
* add optional serde support for map and set
1.0.2
* stop depending on serde by default
1.0.1
* replace vec chunks with arrayvec chunks to eliminate an indirection
* implement tree compaction to keep inner chunks from getting too sparse
1.0.0 "the cake is not a lie"
* Add copy on write mutable operations. They are 10x faster than plain
insert/remove, and bring update performance within an 2-3x of
BTreeMap (with ArcStr keys).
* Chunk size is now configurable with a const generic parameter. Three
different sizes are exposed as default type aliases.
* bump dependencies, update readme, and refresh the benchmarks
* feels like 1.0 to me!
0.5.9
* add weak references to maps and sets as well as methods to get the
strong and weak count of map/set references.
0.5.8
* further 20% performance improvement on batch update operations.
0.5.7
* improve performance of batch update operations on unsorted data by a
large amount (7x on trees of size 10million and chunks of size 100k).
0.5.6
* packed the height and length together, which reduced the size of
nodes by 1 word. That reduction gets us a 1-2% improvement in
lookup times (more benefit for larger trees). Tree length is now
limited to 2^56 elements instead of usize::MAX elements.
* made the tests run faster by wrapping strings in Arcs, which is
closer to how you'd use them in a real program anyway.
0.5.5
* small performance optimizations update operations
0.5.4
* Edition 2018
0.5.3
* implement Set::diff, and Map::diff, O(log(N) + M) where M is the
number of intersecting chunks. Now all the fundamental set
operations are implemented.
* rename Map::merge to Map::union. Sorry for the tiny break in
semantic versioning, but given it was just released I don't
think it's a huge problem to change it now.
0.5.2
* implement Set::intersect, and Map::intersect, O(log(N) + M)
where M is the number of intersecting chunks.
0.5.1
* implement Set::union, and Map::merge, O(log(N) + M) where M is the
number of intersecting chunks and N is the size of the largest
tree. Should always be as fast as update_many from the other map's
iterator, a lot faster in the case of a small intersection.
* remove my silly &mut F requirement for closure arguments to
functions in the public interface. Sorry I'm still learning rust, I
didn't know FnMut was also implemented by a &mut.
0.5.0
* implement map get_key, get_full
* implement set update_many
* fix some incorrect documentation
* BREAKING change map and set update functions so they are able to
work with borrowed forms of the key
0.4.1
* implement get in the set module
* properly implement Ord, PartialOrd, Eq, PartialEq, Hash, and Debug
0.4.0
* add a set module
* remove the rc and arc modules. There is no observable performance
difference between rc and arc, so just use arc everywhere. This is
especially relevant because all the practical applications of this
library than I know about require using Arc.
* BREAKING: fix return type of insert and remove to match BTreeMap
0.3.2
* fix a small performance regression in update_many caused by my last
change
0.3.1
* fix a bug in update_many that could rarely cause a removed item not
to be removed
0.3.0(yanked)
* BREAKING: change the name of insert_sorted to insert_many
* add update, and update_many
* insert 14% performance improvement
* insert_many 42% performance improvement on unsorted data, now faster
than insert on random data
0.2.1
* BREAKING: change signature of insert to match BTreeMap as closely as
possible. Sorry I was new to rust when I first wrote this module :-(
0.2.0
* iteration runs in constant space
* Implement collection range api
* Implement DoubleEndedIterator
* insert_sorted performance improved on degenerate cases
* BREAKING: insert_sorted now takes IntoIterator instead of an explicit slice
* BREAKING: change the name of length to len, like BTreeMap
0.1.2
* Initial public release

201
vendor/immutable-chunkmap/LICENSE-APACHE vendored Normal file
View File

@@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2022 Eric Stokes
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

7
vendor/immutable-chunkmap/LICENSE-MIT vendored Normal file
View File

@@ -0,0 +1,7 @@
Copyright 2022 Eric Stokes
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

44
vendor/immutable-chunkmap/README.md vendored Normal file
View File

@@ -0,0 +1,44 @@
# immutable chunk map
A cache efficient immutable map with lookup performance close to
BTreeMap and reasonably good insertion performance. Optional copy on
write mutable operations bring modification performance within 2x of
BTreeMap in the best case while still offering snapshotting, and big O
efficient set operations of a persistant data structure.
A graph of lookup performance of various data structures using usize
keys. Full test data in the bench/charts directory. Tests performed on
an Intel Core i7 8550U under Linux with a locked frequency of 1.8 GHz.
* OCaml: core map (from the Jane Street core library), an AVL tree
with distinct leaf nodes and a relaxed balance constraint.
* Chunkmap: this library
* Chunkmap COW: this library using only COW operations
* BTreeMap: from the Rust standard library
* HashMap: from the Rust standard library
![alt text](bench/charts/intel_corei7-8550U_arrayvec_compact/usize_get.png "average lookup time")
Chunkmap is very close to BTreeMap for random accesses using keys
without hashing. Obviously if you don't need ordered data use a
HashMap.
![alt text](bench/charts/intel_corei7-8550U_arrayvec_compact/usize_insert.png "average insert time")
Insertion performance, while not as good as most mutable data
structures, is not awful when using COW mode exclusively. In the case
where you have many updates to do at once you can go even faster by
using insert_many. In some cases, e.g. building a map from scratch
using sorted inputs this can be faster than even a HashMap. The below
case is more typical, adding 10% of a data set to the map.
![alt text](bench/charts/intel_corei7-8550U_arrayvec_compact/usize_insert_many.png "insert many")
A note about the COW bar on this graph. It represents using only
mutable COW operations on the map, it is perfectly possible to use an
actual insert_many call instead of mutable COW operations if it's
faster in your application, which as you can see, depends on the size
of the map.
# License
This project is dual licensed under the MIT or the Apache 2 at your discretion.

View File

@@ -0,0 +1,3 @@
#! /bin/bash
ocamlfind ocamlopt -package core -linkpkg -thread -o test test.ml

View File

@@ -0,0 +1,88 @@
open Core
let min_iter = 1_000_000
let random_string () =
let size = 32 in
let s = Bytes.create size in
for i = 0 to size - 1 do
let c = Option.value_exn (Char.of_int (Random.int 254)) in
Bytes.set s i c;
done;
Bytes.to_string s
let random_array size mk k =
let s = Hash_set.create ~size k in
while Hash_set.length s < size do
Hash_set.add s (mk ())
done;
Hash_set.to_array s
let bench_add cmp k v =
let kv = Option.value_exn (Array.zip k v) in
let st = Time_float.now () in
let m =
Array.fold kv ~init:(Map.empty cmp)
~f:(fun m (k, v) -> Map.set m ~key:k ~data:v)
in
let en = Time_float.now () in
(m, Time_float.diff en st)
let bench_find m k =
let st = Time_float.now () in
let i = ref 0 in
let len = Array.length k in
let iter = Int.max len min_iter in
while !i < iter do
assert (Option.is_some (Map.find m (Array.unsafe_get k (!i mod len))));
incr i
done;
let en = Time_float.now () in
Time_float.diff en st
let bench_remove m k =
let st = Time_float.now () in
let m = Array.fold k ~init:m ~f:(fun m k -> Map.remove m k) in
if Map.length m <> 0 then failwith "remove is broken";
let en = Time_float.now () in
Time_float.diff en st
let () =
let args = Sys.get_argv () in
let size =
if Array.length args = 4 then Int.of_string args.(3)
else begin
printf "usage: test <unused> <kind> <size>\n%!";
exit 0
end
in
let str t sz = sprintf "%g" (Time_float.Span.to_ns t /. float sz) in
match args.(2) with
"ptr" -> begin
let mk () = Random.int Int.max_value in
let k = random_array size mk (module Int) in
let ks = random_array size mk (module Int) in
let v = random_array size mk (module Int) in
Array.sort ~compare:Int.compare ks;
let (m, add) = bench_add (module Int) k v in
let (_, adds) = bench_add (module Int) ks v in
let find = bench_find m k in
let rm = bench_remove m k in
printf "%d,%s,%s,%s,%s,%s,%s\n%!"
size (str add size) (str adds size) "0."
(str find (Int.max min_iter size)) "0" (str rm size)
end
| "str" -> begin
let k = random_array size random_string (module String) in
let ks = random_array size random_string (module String) in
let v = random_array size random_string (module String) in
Array.sort ~compare:String.compare ks;
let (m, add) = bench_add (module String) k v in
let (_, adds) = bench_add (module String) ks v in
let find = bench_find m k in
let rm = bench_remove m k in
printf "%d,%s,%s,%s,%s,%s,%s\n%!"
size (str add size) (str adds size) "0."
(str find (Int.max min_iter size)) "0" (str rm size)
end
| _ -> failwith "invalid kind. Allowed kinds: [ptr, str]"

60
vendor/immutable-chunkmap/rustfmt.toml vendored Normal file
View File

@@ -0,0 +1,60 @@
max_width = 90
hard_tabs = false
tab_spaces = 4
newline_style = "Auto"
use_small_heuristics = "Default"
indent_style = "Block"
wrap_comments = false
comment_width = 80
normalize_comments = false
license_template_path = ""
format_strings = false
format_macro_matchers = false
format_macro_bodies = true
empty_item_single_line = true
struct_lit_single_line = true
fn_single_line = false
where_single_line = false
imports_indent = "Block"
imports_layout = "Mixed"
merge_imports = false
reorder_imports = true
reorder_modules = true
reorder_impl_items = false
type_punctuation_density = "Wide"
space_before_colon = false
space_after_colon = true
spaces_around_ranges = false
binop_separator = "Front"
remove_nested_parens = true
combine_control_expr = true
struct_field_align_threshold = 0
match_arm_blocks = true
force_multiline_blocks = false
fn_args_density = "Tall"
brace_style = "SameLineWhere"
control_brace_style = "AlwaysSameLine"
trailing_semicolon = true
trailing_comma = "Vertical"
match_block_trailing_comma = false
blank_lines_upper_bound = 1
blank_lines_lower_bound = 0
edition = "2018"
merge_derives = true
use_try_shorthand = false
use_field_init_shorthand = false
force_explicit_abi = true
condense_wildcard_suffixes = false
color = "Auto"
required_version = "0.99.4"
unstable_features = false
disable_all_formatting = false
skip_children = false
hide_parse_errors = false
error_on_line_overflow = false
error_on_unformatted = false
report_todo = "Never"
report_fixme = "Never"
ignore = []
emit_mode = "Files"
make_backup = false

1836
vendor/immutable-chunkmap/src/avl.rs vendored Normal file

File diff suppressed because it is too large Load Diff

826
vendor/immutable-chunkmap/src/chunk.rs vendored Normal file
View File

@@ -0,0 +1,826 @@
use alloc::{sync::Arc, vec::Vec};
use arrayvec::ArrayVec;
use core::{
borrow::Borrow,
cmp::{min, Ord, Ordering},
fmt::{self, Debug, Formatter},
iter, mem,
ops::Deref,
slice,
};
#[cfg(feature = "pool")]
use core::{mem::ManuallyDrop, ptr};
#[cfg(feature = "pool")]
use poolshark::{
local::{insert_raw, take},
location_id, Discriminant, IsoPoolable, Poolable,
};
#[derive(PartialEq)]
pub(crate) enum Loc {
InRight,
InLeft,
NotPresent(usize),
Here(usize),
}
/*
elts is a sorted array of pairs, increasing the SIZE has several effects;
-- decreases the height of the tree for a given number of elements,
decreasing the amount of indirection necessary to get to any given
key.
-- decreases the number of objects allocated on the heap each time a
key is added or removed
-- increases the size of each allocation
-- icreases the overall amount of memory allocated for each change to
the tree
*/
pub const DEFAULT_SIZE: usize = 512;
pub(crate) enum UpdateChunk<
Q: Ord,
K: Ord + Clone + Borrow<Q>,
V: Clone,
D,
const SIZE: usize,
> {
UpdateLeft(Vec<(Q, D)>),
UpdateRight(Vec<(Q, D)>),
Updated {
elts: Chunk<K, V, SIZE>,
update_left: Vec<(Q, D)>,
update_right: Vec<(Q, D)>,
overflow_right: Vec<(K, V)>,
},
Removed {
not_done: Vec<(Q, D)>,
update_left: Vec<(Q, D)>,
update_right: Vec<(Q, D)>,
},
}
pub(crate) enum Update<Q: Ord, K: Ord + Clone + Borrow<Q>, V: Clone, D, const SIZE: usize>
{
UpdateLeft(Q, D),
UpdateRight(Q, D),
Updated {
elts: Chunk<K, V, SIZE>,
overflow: Option<(K, V)>,
previous: Option<V>,
},
}
pub(crate) enum MutUpdate<Q: Ord, K: Ord + Clone + Borrow<Q>, V: Clone, D> {
UpdateLeft(Q, D),
UpdateRight(Q, D),
Updated {
overflow: Option<(K, V)>,
previous: Option<V>,
},
}
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord)]
pub(crate) struct ChunkInner<K, V, const SIZE: usize> {
keys: ArrayVec<K, SIZE>,
vals: ArrayVec<V, SIZE>,
}
#[cfg(feature = "pool")]
impl<K, V, const SIZE: usize> ChunkInner<K, V, SIZE> {
fn new() -> Self {
Self {
keys: ArrayVec::new(),
vals: ArrayVec::new(),
}
}
fn reset(&mut self) {
self.keys.clear();
self.vals.clear();
}
}
#[cfg(feature = "pool")]
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord)]
pub(crate) struct Chunk<K: Ord + Clone, V: Clone, const SIZE: usize>(
ManuallyDrop<Arc<ChunkInner<K, V, SIZE>>>,
);
#[cfg(feature = "pool")]
impl<K: Ord + Clone, V: Clone, const SIZE: usize> Poolable for Chunk<K, V, SIZE> {
fn capacity(&self) -> usize {
1
}
fn empty() -> Self {
Self(ManuallyDrop::new(Arc::new(ChunkInner::new())))
}
fn really_dropped(&mut self) -> bool {
Arc::get_mut(&mut self.0).is_some()
}
fn reset(&mut self) {
Arc::get_mut(&mut self.0).unwrap().reset()
}
}
#[cfg(feature = "pool")]
unsafe impl<K: Ord + Clone, V: Clone, const SIZE: usize> IsoPoolable
for Chunk<K, V, SIZE>
{
const DISCRIMINANT: Option<Discriminant> =
Discriminant::new_p2_size::<K, V, SIZE>(location_id!());
}
#[cfg(feature = "pool")]
impl<K: Ord + Clone, V: Clone, const SIZE: usize> Drop for Chunk<K, V, SIZE> {
fn drop(&mut self) {
match Arc::get_mut(&mut self.0) {
None => unsafe {
ManuallyDrop::drop(&mut self.0);
},
Some(inner) => {
inner.reset();
if let Some(mut t) = unsafe { insert_raw::<Self>(ptr::read(self)) } {
unsafe { ManuallyDrop::drop(&mut t.0) };
mem::forget(t); // don't call ourselves recursively
}
}
}
}
}
#[cfg(not(feature = "pool"))]
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord)]
pub(crate) struct Chunk<K: Ord + Clone, V: Clone, const SIZE: usize>(
Arc<ChunkInner<K, V, SIZE>>,
);
impl<K: Ord + Clone, V: Clone, const SIZE: usize> Deref for Chunk<K, V, SIZE> {
type Target = ChunkInner<K, V, SIZE>;
fn deref(&self) -> &Self::Target {
&*self.0
}
}
impl<K: Ord + Clone, V: Clone, const SIZE: usize> Debug for Chunk<K, V, SIZE>
where
K: Debug,
V: Debug,
{
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
f.debug_map().entries(self.into_iter()).finish()
}
}
impl<K: Ord + Clone, V: Clone, const SIZE: usize> Chunk<K, V, SIZE>
where
K: Ord + Clone,
V: Clone,
{
pub(crate) fn singleton(k: K, v: V) -> Self {
let mut t = Self::empty();
let inner = Arc::get_mut(&mut t.0).unwrap();
inner.keys.push(k);
inner.vals.push(v);
t
}
#[cfg(feature = "pool")]
fn make_mut<'a>(&'a mut self) -> &'a mut ChunkInner<K, V, SIZE> {
match Arc::get_mut(&mut *self.0).map(|i| i as *mut _) {
Some(i) => unsafe { &mut *i },
None => {
let mut ni = Self::empty();
*Arc::get_mut(&mut *ni.0).unwrap() = (**self.0).clone();
*self = ni;
Arc::get_mut(&mut *self.0).unwrap()
}
}
}
#[cfg(not(feature = "pool"))]
fn make_mut<'a>(&'a mut self) -> &'a mut ChunkInner<K, V, SIZE> {
Arc::make_mut(&mut self.0)
}
#[cfg(feature = "pool")]
pub(crate) fn empty() -> Self {
take()
}
#[cfg(not(feature = "pool"))]
pub(crate) fn empty() -> Self {
Self(Arc::new(ChunkInner {
keys: ArrayVec::new(),
vals: ArrayVec::new(),
}))
}
pub(crate) fn create_with<Q, D, F>(chunk: Vec<(Q, D)>, f: &mut F) -> Self
where
Q: Ord,
K: Borrow<Q>,
F: FnMut(Q, D, Option<(&K, &V)>) -> Option<(K, V)>,
{
let mut t = Chunk::empty();
let inner = Arc::get_mut(&mut t.0).unwrap();
for (k, v) in chunk.into_iter().filter_map(|(q, d)| f(q, d, None)) {
inner.keys.push(k);
inner.vals.push(v);
}
t
}
pub(crate) fn get_local<Q: ?Sized + Ord>(&self, k: &Q) -> Option<usize>
where
K: Borrow<Q>,
{
match self.keys.binary_search_by_key(&k, |k| k.borrow()) {
Ok(i) => Some(i),
Err(_) => None,
}
}
pub(crate) fn get<Q: ?Sized + Ord>(&self, k: &Q) -> Loc
where
K: Borrow<Q>,
{
let len = self.len();
if len == 0 {
Loc::NotPresent(0)
} else {
let first = k.cmp(&self.keys[0].borrow());
let last = k.cmp(&self.keys[len - 1].borrow());
match (first, last) {
(Ordering::Equal, _) => Loc::Here(0),
(_, Ordering::Equal) => Loc::Here(len - 1),
(Ordering::Less, _) => Loc::InLeft,
(_, Ordering::Greater) => Loc::InRight,
(Ordering::Greater, Ordering::Less) => {
match self.keys.binary_search_by_key(&k, |k| k.borrow()) {
Result::Ok(i) => Loc::Here(i),
Result::Err(i) => Loc::NotPresent(i),
}
}
}
}
}
// invariant: chunk is sorted and deduped
pub(crate) fn update_chunk<Q, D, F>(
&self,
chunk: Vec<(Q, D)>,
leaf: bool,
f: &mut F,
) -> UpdateChunk<Q, K, V, D, SIZE>
where
Q: Ord,
K: Borrow<Q>,
F: FnMut(Q, D, Option<(&K, &V)>) -> Option<(K, V)>,
{
assert!(chunk.len() <= SIZE && chunk.len() > 0 && self.len() > 0);
let full = !leaf || self.len() >= SIZE;
let in_left = self.get(&chunk[chunk.len() - 1].0) == Loc::InLeft;
let in_right = self.get(&chunk[0].0) == Loc::InRight;
if full && in_left {
UpdateChunk::UpdateLeft(chunk)
} else if full && in_right {
UpdateChunk::UpdateRight(chunk)
} else if leaf && (in_left || in_right) {
let iter = chunk.into_iter().filter_map(|(q, d)| f(q, d, None));
let mut overflow_right = Vec::new();
let mut elts = Chunk::empty();
let inner = Arc::get_mut(&mut elts.0).unwrap();
if in_right {
inner.clone_from(self);
for (k, v) in iter {
if inner.keys.len() < SIZE {
inner.keys.push(k);
inner.vals.push(v);
} else {
overflow_right.push((k, v));
}
}
} else {
for (k, v) in iter {
if inner.keys.len() < SIZE {
inner.keys.push(k);
inner.vals.push(v);
} else {
overflow_right.push((k, v));
}
}
for (k, v) in self.keys.iter().cloned().zip(self.vals.iter().cloned()) {
if inner.keys.len() < SIZE {
inner.keys.push(k);
inner.vals.push(v);
} else {
overflow_right.push((k, v));
}
}
}
UpdateChunk::Updated {
elts,
update_left: Vec::new(),
update_right: Vec::new(),
overflow_right,
}
} else {
#[inline(always)]
fn copy_up_to<K, V, const SIZE: usize>(
t: &Chunk<K, V, SIZE>,
elts: &mut ChunkInner<K, V, SIZE>,
overflow_right: &mut Vec<(K, V)>,
m: &mut usize,
i: usize,
) where
K: Ord + Clone,
V: Clone,
{
let n = min(i.saturating_sub(*m), SIZE.saturating_sub(elts.keys.len()));
if n > 0 {
elts.keys.extend(t.keys[*m..*m + n].iter().cloned());
elts.vals.extend(t.vals[*m..*m + n].iter().cloned());
*m += n;
}
if *m < i {
overflow_right.extend(
t.keys.as_slice()[*m..i]
.iter()
.cloned()
.zip(t.vals.as_slice()[*m..i].iter().cloned()),
);
*m = i;
}
}
// invariant: don't cross the streams.
let mut not_done = Vec::new();
let mut update_left = Vec::new();
let mut update_right = Vec::new();
let mut overflow_right = Vec::new();
let mut m = 0;
let mut elts = Chunk::empty();
let inner = Arc::get_mut(&mut elts.0).unwrap();
let mut chunk = chunk.into_iter();
loop {
if m == self.len() && inner.keys.len() == 0 {
not_done.extend(chunk);
break;
}
match chunk.next() {
None => {
copy_up_to(self, inner, &mut overflow_right, &mut m, self.len());
break;
}
Some((q, d)) => {
match self.get(&q) {
Loc::Here(i) => {
copy_up_to(self, inner, &mut overflow_right, &mut m, i);
let r = f(q, d, Some((&self.keys[i], &self.vals[i])));
if let Some((k, v)) = r {
if inner.keys.len() < SIZE {
inner.keys.push(k);
inner.vals.push(v);
} else {
overflow_right.push((k, v))
}
}
// self[i] was replaced or deleted, skip it
m += 1;
}
Loc::NotPresent(i) => {
copy_up_to(self, inner, &mut overflow_right, &mut m, i);
if let Some((k, v)) = f(q, d, None) {
if inner.keys.len() < SIZE {
inner.keys.push(k);
inner.vals.push(v);
} else {
overflow_right.push((k, v));
}
}
}
Loc::InLeft => {
if leaf && inner.keys.len() < SIZE {
if let Some((k, v)) = f(q, d, None) {
inner.keys.push(k);
inner.vals.push(v);
}
} else {
update_left.push((q, d))
}
}
Loc::InRight => {
let len = self.len();
copy_up_to(self, inner, &mut overflow_right, &mut m, len);
if leaf && inner.keys.len() < SIZE {
let iter = iter::once((q, d))
.chain(chunk)
.filter_map(|(q, d)| f(q, d, None));
for (k, v) in iter {
if inner.keys.len() < SIZE {
inner.keys.push(k);
inner.vals.push(v);
} else {
overflow_right.push((k, v));
}
}
} else {
update_right.push((q, d));
update_right.extend(chunk);
}
break;
}
}
}
}
}
if elts.len() > 0 {
assert_eq!(not_done.len(), 0);
UpdateChunk::Updated {
elts,
update_left,
update_right,
overflow_right,
}
} else {
assert_eq!(overflow_right.len(), 0);
UpdateChunk::Removed {
not_done,
update_left,
update_right,
}
}
}
}
pub(crate) fn update<Q, D, F>(
&self,
q: Q,
d: D,
leaf: bool,
f: &mut F,
) -> Update<Q, K, V, D, SIZE>
where
Q: Ord,
K: Borrow<Q>,
F: FnMut(Q, D, Option<(&K, &V)>) -> Option<(K, V)>,
{
match self.get(&q) {
Loc::Here(i) => {
let mut elts = Chunk::empty();
let inner = Arc::get_mut(&mut elts.0).unwrap();
inner.keys.extend(self.keys[0..i].iter().cloned());
inner.vals.extend(self.vals[0..i].iter().cloned());
if let Some((k, v)) = f(q, d, Some((&self.keys[i], &self.vals[i]))) {
inner.keys.push(k);
inner.vals.push(v);
}
if i + 1 < self.len() {
inner
.keys
.extend(self.keys[i + 1..self.len()].iter().cloned());
inner
.vals
.extend(self.vals[i + 1..self.len()].iter().cloned());
}
Update::Updated {
elts,
overflow: None,
previous: Some(self.vals[i].clone()),
}
}
Loc::NotPresent(i) => {
let mut elts = Chunk::empty();
let inner = Arc::get_mut(&mut elts.0).unwrap();
inner.keys.extend(self.keys[0..i].iter().cloned());
inner.vals.extend(self.vals[0..i].iter().cloned());
let overflow = match f(q, d, None) {
None => None,
Some((k, v)) => {
if inner.keys.len() == SIZE {
let (ok, ov) =
(inner.keys.pop().unwrap(), inner.vals.pop().unwrap());
inner.keys.push(k);
inner.vals.push(v);
Some((ok, ov))
} else {
inner.keys.push(k);
inner.vals.push(v);
let mut iter = self.keys[i..self.len()]
.iter()
.cloned()
.zip(self.vals[i..self.len()].iter().cloned());
loop {
match iter.next() {
None => break None,
Some((k, v)) => {
if inner.keys.len() < SIZE {
inner.keys.push(k);
inner.vals.push(v);
} else {
break Some((k, v));
}
}
}
}
}
}
};
Update::Updated {
elts,
overflow,
previous: None,
}
}
loc @ Loc::InLeft | loc @ Loc::InRight => {
if !leaf || self.len() == SIZE {
match loc {
Loc::InLeft => Update::UpdateLeft(q, d),
Loc::InRight => Update::UpdateRight(q, d),
Loc::Here(..) | Loc::NotPresent(..) => unreachable!(),
}
} else {
let mut elts = Chunk::empty();
let inner = Arc::get_mut(&mut elts.0).unwrap();
match loc {
Loc::InLeft => {
if let Some((k, v)) = f(q, d, None) {
inner.keys.push(k);
inner.vals.push(v);
}
inner.keys.extend(self.keys[0..self.len()].iter().cloned());
inner.vals.extend(self.vals[0..self.len()].iter().cloned());
}
Loc::InRight => {
inner.keys.extend(self.keys[0..self.len()].iter().cloned());
inner.vals.extend(self.vals[0..self.len()].iter().cloned());
if let Some((k, v)) = f(q, d, None) {
inner.keys.push(k);
inner.vals.push(v);
}
}
_ => unreachable!("bug"),
};
Update::Updated {
elts,
overflow: None,
previous: None,
}
}
}
}
}
pub(crate) fn update_mut<Q, D, F>(
&mut self,
q: Q,
d: D,
leaf: bool,
f: &mut F,
) -> MutUpdate<Q, K, V, D>
where
Q: Ord,
K: Borrow<Q>,
F: FnMut(Q, D, Option<(&K, &V)>) -> Option<(K, V)>,
{
match self.get(&q) {
Loc::Here(i) => match f(q, d, Some((&self.keys[i], &self.vals[i]))) {
Some((k, v)) => {
let inner = self.make_mut();
inner.keys[i] = k;
MutUpdate::Updated {
overflow: None,
previous: Some(mem::replace(&mut inner.vals[i], v)),
}
}
None => {
let inner = self.make_mut();
inner.keys.remove(i);
MutUpdate::Updated {
overflow: None,
previous: Some(inner.vals.remove(i)),
}
}
},
Loc::NotPresent(i) => match f(q, d, None) {
Some((k, v)) => {
let inner = self.make_mut();
let overflow = if inner.keys.len() == SIZE {
let (ok, ov) =
(inner.keys.pop().unwrap(), inner.vals.pop().unwrap());
inner.keys.insert(i, k);
inner.vals.insert(i, v);
Some((ok, ov))
} else {
inner.keys.insert(i, k);
inner.vals.insert(i, v);
None
};
MutUpdate::Updated {
overflow,
previous: None,
}
}
None => MutUpdate::Updated {
overflow: None,
previous: None,
},
},
loc @ Loc::InLeft | loc @ Loc::InRight => {
if !leaf || self.len() == SIZE {
match loc {
Loc::InLeft => MutUpdate::UpdateLeft(q, d),
Loc::InRight => MutUpdate::UpdateRight(q, d),
Loc::Here(..) | Loc::NotPresent(..) => unreachable!(),
}
} else {
let inner = self.make_mut();
match loc {
Loc::InLeft => {
if let Some((k, v)) = f(q, d, None) {
inner.keys.insert(0, k);
inner.vals.insert(0, v);
}
}
Loc::InRight => {
if let Some((k, v)) = f(q, d, None) {
inner.keys.push(k);
inner.vals.push(v);
}
}
_ => unreachable!("bug"),
};
MutUpdate::Updated {
overflow: None,
previous: None,
}
}
}
}
}
pub(crate) fn intersect<F>(
c0: &Chunk<K, V, SIZE>,
c1: &Chunk<K, V, SIZE>,
r: &mut Vec<(K, V)>,
f: &mut F,
) where
F: FnMut(&K, &V, &V) -> Option<V>,
{
if c0.len() > 0 && c1.len() > 0 {
let (c0, c1) = if c0.len() < c1.len() {
(c0, c1)
} else {
(c1, c0)
};
r.extend(c0.keys.iter().enumerate().filter_map(|(i, k)| {
match c1.keys.binary_search(&k) {
Err(_) => None,
Ok(j) => f(k, &c0.vals[i], &c1.vals[j]).map(|v| (k.clone(), v)),
}
}))
}
}
pub(crate) fn remove_elt_at(&self, i: usize) -> Self {
let mut elts = Chunk::empty();
let t = Arc::get_mut(&mut elts.0).unwrap();
if i >= self.keys.len() {
panic!("remove_elt_at: out of bounds")
} else if self.len() == 1 {
elts
} else if i == 0 {
t.keys.extend(self.keys[1..self.len()].iter().cloned());
t.vals.extend(self.vals[1..self.len()].iter().cloned());
elts
} else if i == self.keys.len() - 1 {
t.keys.extend(self.keys[0..self.len() - 1].iter().cloned());
t.vals.extend(self.vals[0..self.len() - 1].iter().cloned());
elts
} else {
t.keys.extend(self.keys[0..i].iter().cloned());
t.keys.extend(self.keys[i + 1..self.len()].iter().cloned());
t.vals.extend(self.vals[0..i].iter().cloned());
t.vals.extend(self.vals[i + 1..self.len()].iter().cloned());
elts
}
}
pub(crate) fn remove_elt_at_mut(&mut self, i: usize) -> (K, V) {
if i >= self.len() {
panic!("remove_elt_at_mut: out of bounds")
} else {
let inner = self.make_mut();
let k = inner.keys.remove(i);
let v = inner.vals.remove(i);
(k, v)
}
}
pub(crate) fn append<I: IntoIterator<Item = (K, V)>>(&self, other: I) -> Self {
let mut elts = self.clone();
let inner = elts.make_mut();
for (k, v) in other {
if inner.keys.len() < SIZE {
inner.keys.push(k);
inner.vals.push(v);
}
}
elts
}
pub(crate) fn min_max_key(&self) -> Option<(K, K)> {
if self.len() == 0 {
None
} else {
Some((self.keys[0].clone(), self.keys[self.len() - 1].clone()))
}
}
pub(crate) fn min_elt(&self) -> Option<(&K, &V)> {
if self.len() == 0 {
None
} else {
Some((&self.keys[0], &self.vals[0]))
}
}
pub(crate) fn max_elt(&self) -> Option<(&K, &V)> {
if self.len() == 0 {
None
} else {
let last = self.len() - 1;
Some((&self.keys[last], &self.vals[last]))
}
}
pub(crate) fn len(&self) -> usize {
self.keys.len()
}
pub(crate) fn key(&self, i: usize) -> &K {
&self.keys[i]
}
pub(crate) fn val(&self, i: usize) -> &V {
&self.vals[i]
}
pub(crate) fn val_mut(&mut self, i: usize) -> &mut V {
&mut self.make_mut().vals[i]
}
pub(crate) fn kv(&self, i: usize) -> (&K, &V) {
(&self.keys[i], &self.vals[i])
}
pub(crate) fn to_vec(&self) -> Vec<(K, V)> {
self.into_iter()
.map(|(k, v)| (k.clone(), v.clone()))
.collect()
}
}
impl<K: Ord + Clone, V: Clone, const SIZE: usize> IntoIterator for Chunk<K, V, SIZE> {
type Item = (K, V);
type IntoIter = iter::Zip<arrayvec::IntoIter<K, SIZE>, arrayvec::IntoIter<V, SIZE>>;
fn into_iter(mut self) -> Self::IntoIter {
let inner = mem::replace(
self.make_mut(),
ChunkInner {
keys: ArrayVec::new(),
vals: ArrayVec::new(),
},
);
inner.keys.into_iter().zip(inner.vals.into_iter())
}
}
impl<'a, K: Ord + Clone, V: Clone, const SIZE: usize> IntoIterator
for &'a Chunk<K, V, SIZE>
{
type Item = (&'a K, &'a V);
type IntoIter = iter::Zip<slice::Iter<'a, K>, slice::Iter<'a, V>>;
fn into_iter(self) -> Self::IntoIter {
(&self.keys).into_iter().zip(&self.vals)
}
}
impl<'a, K: Ord + Clone, V: Clone, const SIZE: usize> IntoIterator
for &'a mut Chunk<K, V, SIZE>
{
type Item = (&'a K, &'a mut V);
type IntoIter = iter::Zip<slice::Iter<'a, K>, slice::IterMut<'a, V>>;
fn into_iter(self) -> Self::IntoIter {
let inner = self.make_mut();
(&inner.keys).into_iter().zip(&mut inner.vals)
}
}

13
vendor/immutable-chunkmap/src/lib.rs vendored Normal file
View File

@@ -0,0 +1,13 @@
//! Immutable maps and sets. See map and set modules for details.
#![cfg_attr(not(any(feature = "rayon", feature = "pool")), no_std)]
extern crate alloc;
pub(crate) mod avl;
pub(crate) mod chunk;
pub mod map;
pub mod set;
#[cfg(test)]
mod tests;

778
vendor/immutable-chunkmap/src/map.rs vendored Normal file
View File

@@ -0,0 +1,778 @@
use crate::avl::{Iter, IterMut, Tree, WeakTree};
pub use crate::chunk::DEFAULT_SIZE;
use core::{
borrow::Borrow,
cmp::{Eq, Ord, Ordering, PartialEq, PartialOrd},
default::Default,
fmt::{self, Debug, Formatter},
hash::{Hash, Hasher},
iter::FromIterator,
ops::{Index, IndexMut, RangeBounds, RangeFull},
};
#[cfg(feature = "serde")]
use serde::{
de::{MapAccess, Visitor},
ser::SerializeMap,
Deserialize, Deserializer, Serialize, Serializer,
};
#[cfg(feature = "serde")]
use core::marker::PhantomData;
#[cfg(feature = "rayon")]
use rayon::{
iter::{FromParallelIterator, IntoParallelIterator},
prelude::*,
};
/// This Map uses a similar strategy to BTreeMap to ensure cache
/// efficient performance on modern hardware while still providing
/// log(N) get, insert, and remove operations.
///
/// For good performance, it is very important to understand
/// that clone is a fundamental operation, it needs to be fast
/// for your key and data types, because it's going to be
/// called a lot whenever you change the map.
///
/// # Why
///
/// 1. Multiple threads can read this structure even while one thread
/// is updating it. Using a library like arc_swap you can avoid ever
/// blocking readers.
///
/// 2. Snapshotting this structure is free.
///
/// # Examples
/// ```
/// # extern crate alloc;
/// use alloc::string::String;
/// use self::immutable_chunkmap::map::MapM;
///
/// let m =
/// MapM::new()
/// .insert(String::from("1"), 1).0
/// .insert(String::from("2"), 2).0
/// .insert(String::from("3"), 3).0;
///
/// assert_eq!(m.get("1"), Option::Some(&1));
/// assert_eq!(m.get("2"), Option::Some(&2));
/// assert_eq!(m.get("3"), Option::Some(&3));
/// assert_eq!(m.get("4"), Option::None);
///
/// for (k, v) in &m {
/// println!("key {}, val: {}", k, v)
/// }
/// ```
#[derive(Clone)]
pub struct Map<K: Ord + Clone, V: Clone, const SIZE: usize>(Tree<K, V, SIZE>);
/// Map using a smaller chunk size, faster to update, slower to search
pub type MapS<K, V> = Map<K, V, { DEFAULT_SIZE / 2 }>;
/// Map using the default chunk size, a good balance of update and search
pub type MapM<K, V> = Map<K, V, DEFAULT_SIZE>;
/// Map using a larger chunk size, faster to search, slower to update
pub type MapL<K, V> = Map<K, V, { DEFAULT_SIZE * 2 }>;
/// A weak reference to a map.
#[derive(Clone)]
pub struct WeakMapRef<K: Ord + Clone, V: Clone, const SIZE: usize>(WeakTree<K, V, SIZE>);
pub type WeakMapRefS<K, V> = WeakMapRef<K, V, { DEFAULT_SIZE / 2 }>;
pub type WeakMapRefM<K, V> = WeakMapRef<K, V, DEFAULT_SIZE>;
pub type WeakMapRefL<K, V> = WeakMapRef<K, V, { DEFAULT_SIZE * 2 }>;
impl<K, V, const SIZE: usize> WeakMapRef<K, V, SIZE>
where
K: Ord + Clone,
V: Clone,
{
pub fn upgrade(&self) -> Option<Map<K, V, SIZE>> {
self.0.upgrade().map(Map)
}
}
impl<K, V, const SIZE: usize> Hash for Map<K, V, SIZE>
where
K: Hash + Ord + Clone,
V: Hash + Clone,
{
fn hash<H: Hasher>(&self, state: &mut H) {
self.0.hash(state)
}
}
impl<K, V, const SIZE: usize> Default for Map<K, V, SIZE>
where
K: Ord + Clone,
V: Clone,
{
fn default() -> Map<K, V, SIZE> {
Map::new()
}
}
impl<K, V, const SIZE: usize> PartialEq for Map<K, V, SIZE>
where
K: PartialEq + Ord + Clone,
V: PartialEq + Clone,
{
fn eq(&self, other: &Map<K, V, SIZE>) -> bool {
self.0 == other.0
}
}
impl<K, V, const SIZE: usize> Eq for Map<K, V, SIZE>
where
K: Eq + Ord + Clone,
V: Eq + Clone,
{
}
impl<K, V, const SIZE: usize> PartialOrd for Map<K, V, SIZE>
where
K: Ord + Clone,
V: PartialOrd + Clone,
{
fn partial_cmp(&self, other: &Map<K, V, SIZE>) -> Option<Ordering> {
self.0.partial_cmp(&other.0)
}
}
impl<K, V, const SIZE: usize> Ord for Map<K, V, SIZE>
where
K: Ord + Clone,
V: Ord + Clone,
{
fn cmp(&self, other: &Map<K, V, SIZE>) -> Ordering {
self.0.cmp(&other.0)
}
}
impl<K, V, const SIZE: usize> Debug for Map<K, V, SIZE>
where
K: Debug + Ord + Clone,
V: Debug + Clone,
{
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
self.0.fmt(f)
}
}
impl<'a, Q, K, V, const SIZE: usize> Index<&'a Q> for Map<K, V, SIZE>
where
Q: Ord,
K: Ord + Clone + Borrow<Q>,
V: Clone,
{
type Output = V;
fn index(&self, k: &Q) -> &V {
self.get(k).expect("element not found for key")
}
}
impl<'a, Q, K, V, const SIZE: usize> IndexMut<&'a Q> for Map<K, V, SIZE>
where
Q: Ord,
K: Ord + Clone + Borrow<Q>,
V: Clone,
{
fn index_mut(&mut self, k: &'a Q) -> &mut Self::Output {
self.get_mut_cow(k).expect("element not found for key")
}
}
impl<K, V, const SIZE: usize> FromIterator<(K, V)> for Map<K, V, SIZE>
where
K: Ord + Clone,
V: Clone,
{
fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self {
Map::new().insert_many(iter)
}
}
impl<'a, K, V, const SIZE: usize> IntoIterator for &'a Map<K, V, SIZE>
where
K: 'a + Ord + Clone,
V: 'a + Clone,
{
type Item = (&'a K, &'a V);
type IntoIter = Iter<'a, RangeFull, K, K, V, SIZE>;
fn into_iter(self) -> Self::IntoIter {
self.0.into_iter()
}
}
#[cfg(feature = "serde")]
impl<K, V, const SIZE: usize> Serialize for Map<K, V, SIZE>
where
K: Serialize + Clone + Ord,
V: Serialize + Clone,
{
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut map = serializer.serialize_map(Some(self.len()))?;
for (k, v) in self {
map.serialize_entry(k, v)?
}
map.end()
}
}
#[cfg(feature = "serde")]
struct MapVisitor<K: Clone + Ord, V: Clone, const SIZE: usize> {
marker: PhantomData<fn() -> Map<K, V, SIZE>>,
}
#[cfg(feature = "serde")]
impl<'a, K, V, const SIZE: usize> Visitor<'a> for MapVisitor<K, V, SIZE>
where
K: Deserialize<'a> + Clone + Ord,
V: Deserialize<'a> + Clone,
{
type Value = Map<K, V, SIZE>;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("expected an immutable_chunkmap::Map")
}
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>
where
A: MapAccess<'a>,
{
let mut t = Map::<K, V, SIZE>::new();
while let Some((k, v)) = map.next_entry()? {
t.insert_cow(k, v);
}
Ok(t)
}
}
#[cfg(feature = "serde")]
impl<'a, K, V, const SIZE: usize> Deserialize<'a> for Map<K, V, SIZE>
where
K: Deserialize<'a> + Clone + Ord,
V: Deserialize<'a> + Clone,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'a>,
{
deserializer.deserialize_map(MapVisitor {
marker: PhantomData,
})
}
}
#[cfg(feature = "rayon")]
impl<'a, K, V, const SIZE: usize> IntoParallelIterator for &'a Map<K, V, SIZE>
where
K: 'a + Ord + Clone + Send + Sync,
V: Clone + Send + Sync,
{
type Item = (&'a K, &'a V);
type Iter = rayon::vec::IntoIter<(&'a K, &'a V)>;
fn into_par_iter(self) -> Self::Iter {
self.into_iter().collect::<Vec<_>>().into_par_iter()
}
}
#[cfg(feature = "rayon")]
impl<K, V, const SIZE: usize> FromParallelIterator<(K, V)> for Map<K, V, SIZE>
where
K: Ord + Clone + Send + Sync,
V: Clone + Send + Sync,
{
fn from_par_iter<I>(i: I) -> Self
where
I: IntoParallelIterator<Item = (K, V)>,
{
i.into_par_iter()
.fold_with(Map::new(), |mut m, (k, v)| {
m.insert_cow(k, v);
m
})
.reduce_with(|m0, m1| m0.union(&m1, |_k, _v0, v1| Some(v1.clone())))
.unwrap_or_else(Map::new)
}
}
impl<K, V, const SIZE: usize> Map<K, V, SIZE>
where
K: Ord + Clone,
V: Clone,
{
/// Create a new empty map
pub fn new() -> Self {
Map(Tree::new())
}
/// Create a weak reference to this map
pub fn downgrade(&self) -> WeakMapRef<K, V, SIZE> {
WeakMapRef(self.0.downgrade())
}
/// Return the number of strong references to this map (see Arc)
pub fn strong_count(&self) -> usize {
self.0.strong_count()
}
/// Return the number of weak references to this map (see Arc)
pub fn weak_count(&self) -> usize {
self.0.weak_count()
}
/// This will insert many elements at once, and is
/// potentially a lot faster than inserting one by one,
/// especially if the data is sorted. It is just a wrapper
/// around the more general update_many method.
///
/// #Examples
///```
/// use self::immutable_chunkmap::map::MapM;
///
/// let mut v = vec![(1, 3), (10, 1), (-12, 2), (44, 0), (50, -1)];
/// v.sort_unstable_by_key(|&(k, _)| k);
///
/// let m = MapM::new().insert_many(v.iter().map(|(k, v)| (*k, *v)));
///
/// for (k, v) in &v {
/// assert_eq!(m.get(k), Option::Some(v))
/// }
/// ```
pub fn insert_many<E: IntoIterator<Item = (K, V)>>(&self, elts: E) -> Self {
Map(self.0.insert_many(elts))
}
/// This will remove many elements at once, and is potentially a
/// lot faster than removing elements one by one, especially if
/// the data is sorted. It is just a wrapper around the more
/// general update_many method.
pub fn remove_many<Q, E>(&self, elts: E) -> Self
where
E: IntoIterator<Item = Q>,
Q: Ord,
K: Borrow<Q>,
{
self.update_many(elts.into_iter().map(|q| (q, ())), |_, _, _| None)
}
/// This method updates multiple bindings in one call. Given an
/// iterator of an arbitrary type (Q, D), where Q is any borrowed
/// form of K, an update function taking Q, D, the current binding
/// in the map, if any, and producing the new binding, if any,
/// this method will produce a new map with updated bindings of
/// many elements at once. It will skip intermediate node
/// allocations where possible. If the data in elts is sorted, it
/// will be able to skip many more intermediate allocations, and
/// can produce a speedup of about 10x compared to
/// inserting/updating one by one. In any case it should always be
/// faster than inserting elements one by one, even with random
/// unsorted keys.
///
/// #Examples
/// ```
/// use core::iter::FromIterator;
/// use self::immutable_chunkmap::map::MapM;
///
/// let m = MapM::from_iter((0..4).map(|k| (k, k)));
/// let m = m.update_many(
/// (0..4).map(|x| (x, ())),
/// |k, (), cur| cur.map(|(_, c)| (k, c + 1))
/// );
/// assert_eq!(
/// m.into_iter().map(|(k, v)| (*k, *v)).collect::<Vec<_>>(),
/// vec![(0, 1), (1, 2), (2, 3), (3, 4)]
/// );
/// ```
pub fn update_many<Q, D, E, F>(&self, elts: E, mut f: F) -> Self
where
E: IntoIterator<Item = (Q, D)>,
Q: Ord,
K: Borrow<Q>,
F: FnMut(Q, D, Option<(&K, &V)>) -> Option<(K, V)>,
{
Map(self.0.update_many(elts, &mut f))
}
/// return a new map with (k, v) inserted into it. If k
/// already exists in the old map, the old binding will be
/// returned, and the new map will contain the new
/// binding. In fact this method is just a wrapper around
/// update.
pub fn insert(&self, k: K, v: V) -> (Self, Option<V>) {
let (root, prev) = self.0.insert(k, v);
(Map(root), prev)
}
/// insert in place using copy on write semantics if self is not a
/// unique reference to the map. see `update_cow`.
pub fn insert_cow(&mut self, k: K, v: V) -> Option<V> {
self.0.insert_cow(k, v)
}
/// return a new map with the binding for q, which can be any
/// borrowed form of k, updated to the result of f. If f returns
/// None, the binding will be removed from the new map, otherwise
/// it will be inserted. This function is more efficient than
/// calling `get` and then `insert`, since it makes only one tree
/// traversal instead of two. This method runs in log(N) time and
/// space where N is the size of the map.
///
/// # Examples
/// ```
/// use self::immutable_chunkmap::map::MapM;
///
/// let (m, _) = MapM::new().update(0, 0, |k, d, _| Some((k, d)));
/// let (m, _) = m.update(1, 1, |k, d, _| Some((k, d)));
/// let (m, _) = m.update(2, 2, |k, d, _| Some((k, d)));
/// assert_eq!(m.get(&0), Some(&0));
/// assert_eq!(m.get(&1), Some(&1));
/// assert_eq!(m.get(&2), Some(&2));
///
/// let (m, _) = m.update(0, (), |k, (), v| v.map(move |(_, v)| (k, v + 1)));
/// assert_eq!(m.get(&0), Some(&1));
/// assert_eq!(m.get(&1), Some(&1));
/// assert_eq!(m.get(&2), Some(&2));
///
/// let (m, _) = m.update(1, (), |_, (), _| None);
/// assert_eq!(m.get(&0), Some(&1));
/// assert_eq!(m.get(&1), None);
/// assert_eq!(m.get(&2), Some(&2));
/// ```
pub fn update<Q, D, F>(&self, q: Q, d: D, mut f: F) -> (Self, Option<V>)
where
Q: Ord,
K: Borrow<Q>,
F: FnMut(Q, D, Option<(&K, &V)>) -> Option<(K, V)>,
{
let (root, prev) = self.0.update(q, d, &mut f);
(Map(root), prev)
}
/// Perform a copy on write update to the map. In the case that
/// self is a unique reference to the map, then the update will be
/// performed completly in place. self will be mutated, and no
/// previous version will be available. In the case that self has
/// a clone, or clones, then only the parts of the map that need
/// to be mutated will be copied before the update is
/// performed. self will reference the mutated copy, and previous
/// versions of the map will not be modified. self will still
/// share all the parts of the map that did not need to be mutated
/// with any pre existing clones.
///
/// COW semantics are a flexible middle way between full
/// peristance and full mutability. Needless to say in the case
/// where you have a unique reference to the map, using update_cow
/// is a lot faster than using update, and a lot more flexible
/// than update_many.
///
/// Other than copy on write the semanics of this method are
/// identical to those of update.
///
/// #Examples
///```
/// use self::immutable_chunkmap::map::MapM;
///
/// let mut m = MapM::new().update(0, 0, |k, d, _| Some((k, d))).0;
/// let orig = m.clone();
/// m.update_cow(1, 1, |k, d, _| Some((k, d))); // copies the original chunk
/// m.update_cow(2, 2, |k, d, _| Some((k, d))); // doesn't copy anything
/// assert_eq!(m.len(), 3);
/// assert_eq!(orig.len(), 1);
/// assert_eq!(m.get(&0), Some(&0));
/// assert_eq!(m.get(&1), Some(&1));
/// assert_eq!(m.get(&2), Some(&2));
/// assert_eq!(orig.get(&0), Some(&0));
/// assert_eq!(orig.get(&1), None);
/// assert_eq!(orig.get(&2), None);
///```
pub fn update_cow<Q, D, F>(&mut self, q: Q, d: D, mut f: F) -> Option<V>
where
Q: Ord,
K: Borrow<Q>,
F: FnMut(Q, D, Option<(&K, &V)>) -> Option<(K, V)>,
{
self.0.update_cow(q, d, &mut f)
}
/// Merge two maps together. Bindings that exist in both maps will
/// be passed to f, which may elect to remove the binding by
/// returning None. This function runs in O(log(n) + m) time and
/// space, where n is the size of the largest map, and m is the
/// number of intersecting chunks. It will never be slower than
/// calling update_many on the first map with an iterator over the
/// second, and will be significantly faster if the intersection
/// is minimal or empty.
///
/// # Examples
/// ```
/// use core::iter::FromIterator;
/// use self::immutable_chunkmap::map::MapM;
///
/// let m0 = MapM::from_iter((0..10).map(|k| (k, 1)));
/// let m1 = MapM::from_iter((10..20).map(|k| (k, 1)));
/// let m2 = m0.union(&m1, |_k, _v0, _v1| panic!("no intersection expected"));
///
/// for i in 0..20 {
/// assert!(m2.get(&i).is_some())
/// }
///
/// let m3 = MapM::from_iter((5..9).map(|k| (k, 1)));
/// let m4 = m3.union(&m2, |_k, v0, v1| Some(v0 + v1));
///
/// for i in 0..20 {
/// assert!(
/// *m4.get(&i).unwrap() ==
/// *m3.get(&i).unwrap_or(&0) + *m2.get(&i).unwrap_or(&0)
/// )
/// }
/// ```
pub fn union<F>(&self, other: &Map<K, V, SIZE>, mut f: F) -> Self
where
F: FnMut(&K, &V, &V) -> Option<V>,
{
Map(Tree::union(&self.0, &other.0, &mut f))
}
/// Produce a map containing the mapping over F of the
/// intersection (by key) of two maps. The function f runs on each
/// intersecting element, and has the option to omit elements from
/// the intersection by returning None, or change the value any
/// way it likes. Runs in O(log(N) + M) time and space where N is
/// the size of the smallest map, and M is the number of
/// intersecting chunks.
///
/// # Examples
///```
/// use core::iter::FromIterator;
/// use self::immutable_chunkmap::map::MapM;
///
/// let m0 = MapM::from_iter((0..100000).map(|k| (k, 1)));
/// let m1 = MapM::from_iter((50..30000).map(|k| (k, 1)));
/// let m2 = m0.intersect(&m1, |_k, v0, v1| Some(v0 + v1));
///
/// for i in 0..100000 {
/// if i >= 30000 || i < 50 {
/// assert!(m2.get(&i).is_none());
/// } else {
/// assert!(*m2.get(&i).unwrap() == 2);
/// }
/// }
/// ```
pub fn intersect<F>(&self, other: &Map<K, V, SIZE>, mut f: F) -> Self
where
F: FnMut(&K, &V, &V) -> Option<V>,
{
Map(Tree::intersect(&self.0, &other.0, &mut f))
}
/// Produce a map containing the second map subtracted from the
/// first. The function F is called for each intersecting element,
/// and ultimately decides whether it appears in the result, for
/// example, to compute a classical set diff, the function should
/// always return None.
///
/// # Examples
///```
/// use core::iter::FromIterator;
/// use self::immutable_chunkmap::map::MapM;
///
/// let m0 = MapM::from_iter((0..10000).map(|k| (k, 1)));
/// let m1 = MapM::from_iter((50..3000).map(|k| (k, 1)));
/// let m2 = m0.diff(&m1, |_k, _v0, _v1| None);
///
/// m2.invariant();
/// dbg!(m2.len());
/// assert!(m2.len() == 10000 - 2950);
/// for i in 0..10000 {
/// if i >= 3000 || i < 50 {
/// assert!(*m2.get(&i).unwrap() == 1);
/// } else {
/// assert!(m2.get(&i).is_none());
/// }
/// }
/// ```
pub fn diff<F>(&self, other: &Map<K, V, SIZE>, mut f: F) -> Self
where
F: FnMut(&K, &V, &V) -> Option<V>,
K: Debug,
V: Debug,
{
Map(Tree::diff(&self.0, &other.0, &mut f))
}
/// lookup the mapping for k. If it doesn't exist return
/// None. Runs in log(N) time and constant space. where N
/// is the size of the map.
pub fn get<'a, Q: ?Sized + Ord>(&'a self, k: &Q) -> Option<&'a V>
where
K: Borrow<Q>,
{
self.0.get(k)
}
/// lookup the mapping for k. Return the key. If it doesn't exist
/// return None. Runs in log(N) time and constant space. where N
/// is the size of the map.
pub fn get_key<'a, Q: ?Sized + Ord>(&'a self, k: &Q) -> Option<&'a K>
where
K: Borrow<Q>,
{
self.0.get_key(k)
}
/// lookup the mapping for k. Return both the key and the
/// value. If it doesn't exist return None. Runs in log(N) time
/// and constant space. where N is the size of the map.
pub fn get_full<'a, Q: ?Sized + Ord>(&'a self, k: &Q) -> Option<(&'a K, &'a V)>
where
K: Borrow<Q>,
{
self.0.get_full(k)
}
/// Get a mutable reference to the value mapped to `k` using copy on write semantics.
/// This works as `Arc::make_mut`, it will only clone the parts of the tree that are,
/// - required to reach `k`
/// - have a strong count > 1
///
/// This operation is also triggered by mut indexing on the map, e.g. `&mut m[k]`
/// calls `get_mut_cow` on `m`
///
/// # Example
/// ```
/// use core::iter::FromIterator;
/// use self::immutable_chunkmap::map::MapM as Map;
///
/// let mut m = Map::from_iter((0..100).map(|k| (k, Map::from_iter((0..100).map(|k| (k, 1))))));
/// let orig = m.clone();
///
/// if let Some(inner) = m.get_mut_cow(&0) {
/// if let Some(v) = inner.get_mut_cow(&0) {
/// *v += 1
/// }
/// }
///
/// assert_eq!(m.get(&0).and_then(|m| m.get(&0)), Some(&2));
/// assert_eq!(orig.get(&0).and_then(|m| m.get(&0)), Some(&1));
/// ```
pub fn get_mut_cow<'a, Q: ?Sized + Ord>(&'a mut self, k: &Q) -> Option<&'a mut V>
where
K: Borrow<Q>,
{
self.0.get_mut_cow(k)
}
/// Same as `get_mut_cow` except if the value is not in the map it will
/// first be inserted by calling `f`
pub fn get_or_insert_cow<'a, F>(&'a mut self, k: K, f: F) -> &'a mut V
where
F: FnOnce() -> V,
{
self.0.get_or_insert_cow(k, f)
}
/// return a new map with the mapping under k removed. If
/// the binding existed in the old map return it. Runs in
/// log(N) time and log(N) space, where N is the size of
/// the map.
pub fn remove<Q: Sized + Ord>(&self, k: &Q) -> (Self, Option<V>)
where
K: Borrow<Q>,
{
let (t, prev) = self.0.remove(k);
(Map(t), prev)
}
/// remove in place using copy on write semantics if self is not a
/// unique reference to the map. see `update_cow`.
pub fn remove_cow<Q: Sized + Ord>(&mut self, k: &Q) -> Option<V>
where
K: Borrow<Q>,
{
self.0.remove_cow(k)
}
/// get the number of elements in the map O(1) time and space
pub fn len(&self) -> usize {
self.0.len()
}
/// return an iterator over the subset of elements in the
/// map that are within the specified range.
///
/// The returned iterator runs in O(log(N) + M) time, and
/// constant space. N is the number of elements in the
/// tree, and M is the number of elements you examine.
///
/// if lbound >= ubound the returned iterator will be empty
pub fn range<'a, Q, R>(&'a self, r: R) -> Iter<'a, R, Q, K, V, SIZE>
where
Q: Ord + ?Sized + 'a,
K: Borrow<Q>,
R: RangeBounds<Q> + 'a,
{
self.0.range(r)
}
/// return a mutable iterator over the subset of elements in the
/// map that are within the specified range. The iterator will
/// copy on write the part of the tree that it visits,
/// specifically it will be as if you ran get_mut_cow on every
/// element you visit.
///
/// The returned iterator runs in O(log(N) + M) time, and
/// constant space. N is the number of elements in the
/// tree, and M is the number of elements you examine.
///
/// if lbound >= ubound the returned iterator will be empty
pub fn range_mut_cow<'a, Q, R>(&'a mut self, r: R) -> IterMut<'a, R, Q, K, V, SIZE>
where
Q: Ord + ?Sized + 'a,
K: Borrow<Q>,
R: RangeBounds<Q> + 'a,
{
self.0.range_mut_cow(r)
}
/// return a mutable iterator over the entire map. The iterator
/// will copy on write every element in the tree, specifically it
/// will be as if you ran get_mut_cow on every element.
///
/// The returned iterator runs in O(log(N) + M) time, and
/// constant space. N is the number of elements in the
/// tree, and M is the number of elements you examine.
pub fn iter_mut_cow<'a>(&'a mut self) -> IterMut<'a, RangeFull, K, K, V, SIZE> {
self.0.iter_mut_cow()
}
}
impl<K, V, const SIZE: usize> Map<K, V, SIZE>
where
K: Ord + Clone,
V: Clone + Default,
{
/// Same as `get_mut_cow` except if the value isn't in the map it will
/// be added by calling `V::default`
pub fn get_or_default_cow<'a>(&'a mut self, k: K) -> &'a mut V {
self.get_or_insert_cow(k, V::default)
}
}
impl<K, V, const SIZE: usize> Map<K, V, SIZE>
where
K: Ord + Clone + Debug,
V: Clone + Debug,
{
#[allow(dead_code)]
pub fn invariant(&self) -> () {
self.0.invariant()
}
}

528
vendor/immutable-chunkmap/src/set.rs vendored Normal file
View File

@@ -0,0 +1,528 @@
use crate::avl::{Iter, Tree, WeakTree};
pub use crate::chunk::DEFAULT_SIZE;
use core::{
borrow::Borrow,
cmp::{Eq, Ord, Ordering, PartialEq, PartialOrd},
default::Default,
fmt::{self, Debug, Formatter},
hash::{Hash, Hasher},
iter::FromIterator,
ops::{RangeBounds, RangeFull},
};
#[cfg(feature = "serde")]
use serde::{
de::{SeqAccess, Visitor},
ser::SerializeSeq,
Deserialize, Deserializer, Serialize, Serializer,
};
#[cfg(feature = "serde")]
use core::marker::PhantomData;
#[cfg(feature = "rayon")]
use rayon::{
iter::{FromParallelIterator, IntoParallelIterator},
prelude::*,
};
/// This set uses a similar strategy to BTreeSet to ensure cache
/// efficient performance on modern hardware while still providing
/// log(N) get, insert, and remove operations.
/// # Examples
/// ```
/// # extern crate alloc;
/// use alloc::string::String;
/// use self::immutable_chunkmap::set::SetM;
///
/// let m =
/// SetM::new()
/// .insert(String::from("1")).0
/// .insert(String::from("2")).0
/// .insert(String::from("3")).0;
///
/// assert_eq!(m.contains("1"), true);
/// assert_eq!(m.contains("2"), true);
/// assert_eq!(m.contains("3"), true);
/// assert_eq!(m.contains("4"), false);
///
/// for k in &m { println!("{}", k) }
/// ```
#[derive(Clone)]
pub struct Set<K: Ord + Clone, const SIZE: usize>(Tree<K, (), SIZE>);
/// set with a smaller chunk size, faster to update, slower to search
pub type SetS<K> = Set<K, { DEFAULT_SIZE / 2 }>;
/// set with the default chunk size, a good balance of search and update performance
pub type SetM<K> = Set<K, DEFAULT_SIZE>;
/// set with a larger chunk size, faster to search, slower to update
pub type SetL<K> = Set<K, { DEFAULT_SIZE * 2 }>;
#[derive(Clone)]
pub struct WeakSetRef<K: Ord + Clone, const SIZE: usize>(WeakTree<K, (), SIZE>);
pub type WeakSetRefS<K> = WeakSetRef<K, 32>;
pub type WeakSetRefM<K> = WeakSetRef<K, 128>;
pub type WeakSetRefL<K> = WeakSetRef<K, 512>;
impl<K, const SIZE: usize> WeakSetRef<K, SIZE>
where
K: Ord + Clone,
{
pub fn upgrade(&self) -> Option<Set<K, SIZE>> {
self.0.upgrade().map(Set)
}
}
impl<K, const SIZE: usize> Hash for Set<K, SIZE>
where
K: Hash + Ord + Clone,
{
fn hash<H: Hasher>(&self, state: &mut H) {
self.0.hash(state)
}
}
impl<K, const SIZE: usize> Default for Set<K, SIZE>
where
K: Ord + Clone,
{
fn default() -> Set<K, SIZE> {
Set::new()
}
}
impl<K, const SIZE: usize> PartialEq for Set<K, SIZE>
where
K: Ord + Clone,
{
fn eq(&self, other: &Set<K, SIZE>) -> bool {
self.0 == other.0
}
}
impl<K, const SIZE: usize> Eq for Set<K, SIZE> where K: Eq + Ord + Clone {}
impl<K, const SIZE: usize> PartialOrd for Set<K, SIZE>
where
K: Ord + Clone,
{
fn partial_cmp(&self, other: &Set<K, SIZE>) -> Option<Ordering> {
self.0.partial_cmp(&other.0)
}
}
impl<K, const SIZE: usize> Ord for Set<K, SIZE>
where
K: Ord + Clone,
{
fn cmp(&self, other: &Set<K, SIZE>) -> Ordering {
self.0.cmp(&other.0)
}
}
impl<K, const SIZE: usize> Debug for Set<K, SIZE>
where
K: Debug + Ord + Clone,
{
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
f.debug_set().entries(self.into_iter()).finish()
}
}
impl<K, const SIZE: usize> FromIterator<K> for Set<K, SIZE>
where
K: Ord + Clone,
{
fn from_iter<T: IntoIterator<Item = K>>(iter: T) -> Self {
Set::new().insert_many(iter)
}
}
pub struct SetIter<
'a,
R: RangeBounds<Q> + 'a,
Q: Ord + ?Sized,
K: 'a + Clone + Ord + Borrow<Q>,
const SIZE: usize,
>(Iter<'a, R, Q, K, (), SIZE>);
impl<'a, R, Q, K, const SIZE: usize> Iterator for SetIter<'a, R, Q, K, SIZE>
where
Q: Ord + ?Sized,
R: RangeBounds<Q> + 'a,
K: 'a + Clone + Ord + Borrow<Q>,
{
type Item = &'a K;
fn next(&mut self) -> Option<Self::Item> {
self.0.next().map(|(k, ())| k)
}
}
impl<'a, R, Q, K, const SIZE: usize> DoubleEndedIterator for SetIter<'a, R, Q, K, SIZE>
where
Q: Ord + ?Sized,
R: RangeBounds<Q> + 'a,
K: 'a + Clone + Ord + Borrow<Q>,
{
fn next_back(&mut self) -> Option<Self::Item> {
self.0.next_back().map(|(k, ())| k)
}
}
impl<'a, K, const SIZE: usize> IntoIterator for &'a Set<K, SIZE>
where
K: 'a + Ord + Clone,
{
type Item = &'a K;
type IntoIter = SetIter<'a, RangeFull, K, K, SIZE>;
fn into_iter(self) -> Self::IntoIter {
SetIter(self.0.into_iter())
}
}
#[cfg(feature = "serde")]
impl<V, const SIZE: usize> Serialize for Set<V, SIZE>
where
V: Serialize + Clone + Ord,
{
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut seq = serializer.serialize_seq(Some(self.len()))?;
for v in self {
seq.serialize_element(v)?
}
seq.end()
}
}
#[cfg(feature = "serde")]
struct SetVisitor<V: Clone + Ord, const SIZE: usize> {
marker: PhantomData<fn() -> Set<V, SIZE>>,
}
#[cfg(feature = "serde")]
impl<'a, V, const SIZE: usize> Visitor<'a> for SetVisitor<V, SIZE>
where
V: Deserialize<'a> + Clone + Ord,
{
type Value = Set<V, SIZE>;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("expecting an immutable_chunkmap::Set")
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: SeqAccess<'a>,
{
let mut t = Set::<V, SIZE>::new();
while let Some(v) = seq.next_element()? {
t.insert_cow(v);
}
Ok(t)
}
}
#[cfg(feature = "serde")]
impl<'a, V, const SIZE: usize> Deserialize<'a> for Set<V, SIZE>
where
V: Deserialize<'a> + Clone + Ord,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'a>,
{
deserializer.deserialize_seq(SetVisitor {
marker: PhantomData,
})
}
}
#[cfg(feature = "rayon")]
impl<'a, V, const SIZE: usize> IntoParallelIterator for &'a Set<V, SIZE>
where
V: 'a + Ord + Clone + Send + Sync,
{
type Item = &'a V;
type Iter = rayon::vec::IntoIter<&'a V>;
fn into_par_iter(self) -> Self::Iter {
self.into_iter().collect::<Vec<_>>().into_par_iter()
}
}
#[cfg(feature = "rayon")]
impl<V, const SIZE: usize> FromParallelIterator<V> for Set<V, SIZE>
where
V: Ord + Clone + Send + Sync,
{
fn from_par_iter<I>(i: I) -> Self
where
I: IntoParallelIterator<Item = V>,
{
i.into_par_iter()
.fold_with(Set::new(), |mut m, v| {
m.insert_cow(v);
m
})
.reduce_with(|m0, m1| m0.union(&m1))
.unwrap_or_else(Set::new)
}
}
impl<K, const SIZE: usize> Set<K, SIZE>
where
K: Ord + Clone,
{
/// Create a new empty set
pub fn new() -> Self {
Set(Tree::new())
}
/// Create a weak reference to this set
pub fn downgrade(&self) -> WeakSetRef<K, SIZE> {
WeakSetRef(self.0.downgrade())
}
/// Return the number of strong references to this set (see Arc)
pub fn strong_count(&self) -> usize {
self.0.strong_count()
}
/// Return the number of weak references to this set (see Arc)
pub fn weak_count(&self) -> usize {
self.0.weak_count()
}
/// This will insert many elements at once, and is
/// potentially a lot faster than inserting one by one,
/// especially if the data is sorted.
///
/// #Examples
///```
/// use self::immutable_chunkmap::set::SetM;
///
/// let mut v = vec![1, 10, -12, 44, 50];
/// v.sort_unstable();
///
/// let m = SetM::new().insert_many(v.iter().map(|k| *k));
///
/// for k in &v {
/// assert_eq!(m.contains(k), true)
/// }
/// ```
pub fn insert_many<E: IntoIterator<Item = K>>(&self, elts: E) -> Self {
let root = self.0.insert_many(elts.into_iter().map(|k| (k, ())));
Set(root)
}
/// Remove multiple elements in a single pass. Similar performance
/// to insert_many.
pub fn remove_many<Q, E>(&self, elts: E) -> Self
where
Q: Ord,
K: Borrow<Q>,
E: IntoIterator<Item = Q>,
{
let root = self
.0
.update_many(elts.into_iter().map(|k| (k, ())), &mut |_, _, _| None);
Set(root)
}
/// This is just slightly wierd, however if you have a bunch of
/// borrowed forms of members of the set, and you want to look at
/// the real entries and possibly add/update/remove them, then
/// this method is for you.
pub fn update_many<Q, E, F>(&self, elts: E, mut f: F) -> Self
where
Q: Ord,
K: Borrow<Q>,
E: IntoIterator<Item = Q>,
F: FnMut(Q, Option<&K>) -> Option<K>,
{
let root =
self.0
.update_many(elts.into_iter().map(|k| (k, ())), &mut |q, (), cur| {
let cur = cur.map(|(k, ())| k);
f(q, cur).map(|k| (k, ()))
});
Set(root)
}
/// return a new set with k inserted into it. If k already
/// exists in the old set return true, else false. If the
/// element already exists in the set memory will not be
/// allocated.
pub fn insert(&self, k: K) -> (Self, bool) {
if self.contains(&k) {
(self.clone(), true)
} else {
(Set(self.0.insert(k, ()).0), false)
}
}
/// insert `k` with copy on write semantics. if `self` is a unique
/// reference to the set, then k will be inserted in
/// place. Otherwise, only the parts of the set necessary to
/// insert `k` will be copied, and then the copies will be
/// mutated. self will share all the parts that weren't modfied
/// with any previous clones.
pub fn insert_cow(&mut self, k: K) -> bool {
self.0.insert_cow(k, ()).is_some()
}
/// return true if the set contains k, else false. Runs in
/// log(N) time and constant space. where N is the size of
/// the set.
pub fn contains<'a, Q>(&'a self, k: &Q) -> bool
where
Q: ?Sized + Ord,
K: Borrow<Q>,
{
self.0.get(k).is_some()
}
/// return a reference to the item in the set that is equal to the
/// given value, or None if no such value exists.
pub fn get<'a, Q>(&'a self, k: &Q) -> Option<&'a K>
where
Q: ?Sized + Ord,
K: Borrow<Q>,
{
self.0.get_key(k)
}
/// return a new set with k removed. Runs in log(N) time
/// and log(N) space, where N is the size of the set
pub fn remove<Q: Sized + Ord>(&self, k: &Q) -> (Self, bool)
where
K: Borrow<Q>,
{
let (t, prev) = self.0.remove(k);
(Set(t), prev.is_some())
}
/// remove `k` from the set in place with copy on write semantics
/// (see `insert_cow`). return true if `k` was in the set.
pub fn remove_cow<Q: Sized + Ord>(&mut self, k: &Q) -> bool
where
K: Borrow<Q>,
{
self.0.remove_cow(k).is_some()
}
/// return the union of 2 sets. Runs in O(log(N) + M) time and
/// space, where N is the largest of the two sets, and M is the
/// number of chunks that intersect, which is roughly proportional
/// to the size of the intersection.
///
/// # Examples
/// ```
/// use core::iter::FromIterator;
/// use self::immutable_chunkmap::set::SetM;
///
/// let s0 = SetM::from_iter(0..10);
/// let s1 = SetM::from_iter(5..15);
/// let s2 = s0.union(&s1);
/// for i in 0..15 {
/// assert!(s2.contains(&i));
/// }
/// ```
pub fn union(&self, other: &Set<K, SIZE>) -> Self {
Set(Tree::union(&self.0, &other.0, &mut |_, (), ()| Some(())))
}
/// return the intersection of 2 sets. Runs in O(log(N) + M) time
/// and space, where N is the smallest of the two sets, and M is
/// the number of intersecting chunks.
///
/// # Examples
/// use core::iter::FromIterator;
/// use self::immutable_chunkmap::set::SetM;
///
/// let s0 = SetM::from_iter(0..100);
/// let s1 = SetM::from_iter(20..50);
/// let s2 = s0.intersect(&s1);
///
/// assert!(s2.len() == 30);
/// for i in 0..100 {
/// if i < 20 || i >= 50 {
/// assert!(!s2.contains(&i));
/// } else {
/// assert!(s2.contains(&i));
/// }
/// }
pub fn intersect(&self, other: &Set<K, SIZE>) -> Self {
Set(Tree::intersect(
&self.0,
&other.0,
&mut |_, (), ()| Some(()),
))
}
/// Return the difference of two sets. Runs in O(log(N) + M) time
/// and space, where N is the smallest of the two sets, and M is
/// the number of intersecting chunks.
///
/// # Examples
/// ```
/// use core::iter::FromIterator;
/// use self::immutable_chunkmap::set::SetM;
///
/// let s0 = SetM::from_iter(0..100);
/// let s1 = SetM::from_iter(0..50);
/// let s2 = s0.diff(&s1);
///
/// assert!(s2.len() == 50);
/// for i in 0..50 {
/// assert!(!s2.contains(&i));
/// }
/// for i in 50..100 {
/// assert!(s2.contains(&i));
/// }
/// ```
pub fn diff(&self, other: &Set<K, SIZE>) -> Self
where
K: Debug,
{
Set(Tree::diff(&self.0, &other.0, &mut |_, (), ()| None))
}
/// get the number of elements in the map O(1) time and space
pub fn len(&self) -> usize {
self.0.len()
}
/// return an iterator over the subset of elements in the
/// set that are within the specified range.
///
/// The returned iterator runs in O(log(N) + M) time, and
/// constant space. N is the number of elements in the
/// tree, and M is the number of elements you examine.
///
/// if lbound >= ubound the returned iterator will be empty
pub fn range<'a, Q, R>(&'a self, r: R) -> SetIter<'a, R, Q, K, SIZE>
where
Q: Ord + ?Sized + 'a,
K: 'a + Clone + Ord + Borrow<Q>,
R: RangeBounds<Q> + 'a,
{
SetIter(self.0.range(r))
}
}
impl<K, const SIZE: usize> Set<K, SIZE>
where
K: Ord + Clone + Debug,
{
#[allow(dead_code)]
pub(crate) fn invariant(&self) -> () {
self.0.invariant()
}
}

1066
vendor/immutable-chunkmap/src/tests.rs vendored Normal file

File diff suppressed because it is too large Load Diff