Vendor dependencies for 0.3.0 release

This commit is contained in:
2025-09-27 10:29:08 -05:00
parent 0c8d39d483
commit 82ab7f317b
26803 changed files with 16134934 additions and 0 deletions

1
vendor/combine/.cargo-checksum.json vendored Normal file
View File

@@ -0,0 +1 @@
{"files":{"CHANGELOG.md":"2e1470226ebc52eda98ebd2051d9fd8b14eda6e77b78bef529f12f09548f6629","Cargo.lock":"a7a18d65585f2d008512efbd0f6e10b326cf1867928405060bb2cf368438982d","Cargo.toml":"7e755c2622b7a6ee59bd8c21c0e6c98df1f6f30c3f416c6632684a0117dec49a","LICENSE":"9bbc1b3dc4674a9ebb12c2a62f54fe3c08672539717f8d05828f684b6cc419a3","README.md":"426950cd2e2cf23e05859d7e227282c1fbee83e19e7e005138094bb66af49529","benches/buffers.rs":"879f6fb830e844f74f07b4815ea1173e984ab6f364fb8e4db2789cced28d7506","benches/data.json":"dfececbeb40e51033d9437fcb955c524fe31fd1b1734ad0e069b134d5bffd9dd","benches/http-requests.txt":"6c5d29174cc9d5c1064282e6e68d55ba5759ff9d9fac2e9b952822ba64fa6fa1","benches/http.rs":"5570b535a78e2817badcfd573876d846518ed49bdaf043c508bfe047b4bcae39","benches/json.rs":"969e5161f6e89f3f260b498310571533e0505dcb8aeac39f57f81186fbd88158","benches/mp4.rs":"3354bd947d5f66e568b3e26a4813dceedd2ccce1615a7aacf11298fad5c1640a","ci.sh":"b3a050d17575ba7a8e955dbc0272e9325d6d52dbbf88143b59d407c7f462c657","examples/async.rs":"f1c829e3f398ea3a50d74f9246e8ca1a0e6434d4c1346bb88a53f262827dc070","examples/date.rs":"7333b021cc7cc39ce7d3cd0bd603f0b36c64f1695c710be73c3f22206fe5bdd2","examples/ini.rs":"7793257a38aaff7968f0d3c5cb679d6d1d3fd384a2f2091733a74e3f2a8c2022","examples/number.rs":"46423aad5aac010bf779641cd53ca300faba4c920dffb27039b89c628d6f5ead","examples/readme.rs":"c156719ca4b1cca31841578a899d164561a4f151ab3be7e2fc47ed3f82a0db00","release.sh":"f4dc049059df5ba4fcf068330b7c81162fb7738f43c1473f4360444910fcf539","rustfmt.toml":"d8e7f616455a670ba75e3e94bf6f88f4c168c481664d12501820c7dfff5c3cc2","src/error.rs":"2aef1615bb4c83f06a2d23d273febe2b4d4d9a4f7b0747845168e30795a7b35e","src/future_ext.rs":"f6879b2adc93d8c5d1048bd0b4bdd6e8e4ceb7cb7fcdaa1f80c5688fb8a6f07c","src/lib.rs":"9dbda93f6ada241035f6f47dd487644e0cf3b729626cc70265edd5e9468c064a","src/parser/byte.rs":"39a6b430805f9745abe449b80e65e5a3cefbad7db7da7bf767f116203520bbcc","src/parser/char.rs":"36d002702baa2c66fc13a27b89ab0f05244a16b2c8c2ddc68e8972d983d4f93b","src/parser/choice.rs":"2bf4b728cb9a008b77b694b4b4bc2785ac6fb3bf9856d4e50c58c335f2d60d62","src/parser/combinator.rs":"1b4f22777b01ae9314087444158325178a7f42c3fcd69c62322255440cbedeb9","src/parser/error.rs":"669917e75241fb23cf5b6b3d80e123cfa60d21adb229be58e5264aa3673ba81f","src/parser/function.rs":"32c49be9b8f66c41879e25744ed7b93da4740f87d27d14a187101878a203dd39","src/parser/mod.rs":"3d0fcb0a7435670870ad28fd58746ef0d4c11ecc5bb13165ada0f9b09df84864","src/parser/range.rs":"84691cd0ea409f42395c0c261076341b1e5c3a83b87f9495da9ab775525a3816","src/parser/regex.rs":"ca3e46d645c35deee8c995e99e958910aba84a78e14d1e57f35da44fddbb65b0","src/parser/repeat.rs":"dbea60a343481c879073a0a60a13802d7f6acccfb975b86207d07f4911985c57","src/parser/sequence.rs":"cbcbc2f9bb76ffa4a0486779f788ef46f95f3e056673d505dcdf8c59fa443c84","src/parser/token.rs":"2500adb7e8541ed83e5aa9777952920c0169923acf06e32ef48069031697dbbc","src/stream/buf_reader.rs":"c3b89f9658f6a83f2050ae3ae87d7e6821f6016d5a747f0f51f4a35d70ab007b","src/stream/buffered.rs":"75df0dcd053a96b893fedbc393d11fa1c4a56ead450b28d847e443a04c688db2","src/stream/decoder.rs":"b23859913cf7e704b0e1f1477260f2c92be7163fba0b038d42d4245fc0011d46","src/stream/easy.rs":"2097cba05a8d1bf72c540649ef685847fc7ab9a57661fb9b98bad6baa6af1ff4","src/stream/mod.rs":"b7a3a887ea27143818986772b3957f9cc578a0b35dc1dcf5592d206296ea4b36","src/stream/position.rs":"08025dc42db27437cf69ca931178cd2949402913b501f4387fcabc1a3e5c03f9","src/stream/read.rs":"b7b732ac41bf6692334746610d9912bc5c8354dd5136e4e27cc78a6224f6dfde","src/stream/span.rs":"e229ca993efc243888c9c7ddbad60273c16a5b7b1212624c34f8f0186f2c97b7","src/stream/state.rs":"21107661c102b39653a2b90728c23087d30d2b105bd4ac518b6faaea1c098794","tests/async.rs":"6d64fcb32850f173470f89fea95503aa5bf2f6f9d0ef231b32ae4cc7afbd5dd2","tests/buffered_stream.rs":"26fdcec1c13cf0504dadbd37044ebc4dcbba34485fbfda0d262233517e71a9c0","tests/parser.rs":"73d91daca1a5a43c48d41a875bc3e4e9035c8981e26669f03db6487241da4c6e","tests/parser_macro.rs":"4cad91d583cf60f1e917c1f79aeb3701d5a746928c518ec6f2d61c3a3becdea4","tests/support/mod.rs":"9e375de8c6b66d2a40b9f14443bb8c153691afa6e5d8d3061aefa709187ed3d8"},"package":"ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd"}

754
vendor/combine/CHANGELOG.md vendored Normal file
View File

@@ -0,0 +1,754 @@
<a name="v4.6.7"></a>
### v4.6.7 (2024-04-10)
* perf: avoid initializing huge buffers in the stream decoder
<a name="v4.6.6"></a>
### v4.6.6 (2022-08-09)
* memchr: use non deprecated feature `std` instead of `use_std`
* refactor: Extract less-generic code from sequence's add_errors (-5%)
<a name="v4.6.4"></a>
### v4.6.4 (2022-04-25)
<a name="v4.6.3"></a>
### v4.6.3 (2022-01-12)
#### Bug Fixes
* **easy:** Only back-quote parser tokens ([08962d1d](https://github.com/Marwes/combine/commit/08962d1d4287c03edb84bd73a9eb09a42f294bc8), closes [#334](https://github.com/Marwes/combine/issues/334))
<a name="v4.6.3"></a>
### v4.6.3 (2022-01-12)
#### Bug Fixes
* **easy:** Only back-quote parser tokens ([08962d1d](https://github.com/Marwes/combine/commit/08962d1d4287c03edb84bd73a9eb09a42f294bc8), closes [#334](https://github.com/Marwes/combine/issues/334))
<a name="v4.6.2"></a>
### v4.6.2 (2021-10-28)
#### Performance
* Handle restarts better in take_until_byte{,s,2,3} ([03b19498](https://github.com/Marwes/combine/commit/03b19498189c873cdf8195a207a9f5a747ca40b7), closes [#327](https://github.com/Marwes/combine/issues/327))
#### Bug Fixes
* warning ([08753d3e](https://github.com/Marwes/combine/commit/08753d3ebf072c0765e99bd6f440b52ef93cab40))
<a name="v4.6.1"></a>
### v4.6.1 (2021-08-25)
#### Performance
* Avoid a saturating add in slice_uncons_while ([7f330b0c](https://github.com/Marwes/combine/commit/7f330b0cacd61131df88c919074ffa8136100299))
* Avoid a saturating add in slice_uncons_while ([ad4180dd](https://github.com/Marwes/combine/commit/ad4180dd7d3530d47502795ead21e13b7816aed7))
<a name="v4.6.0"></a>
## v4.6.0 (2021-06-16)
#### Features
* Add decode_tokio ([aa20bf64](https://github.com/Marwes/combine/commit/aa20bf641bc5deed7e521de289f2b0963034f750))
<a name="v4.5.2"></a>
### v4.5.2 (2021-01-07)
<a name="v4.5.1"></a>
### v4.5.1 (2020-12-27)
#### Bug Fixes
* Correct the tokio-02 feature ([466a50d3](https://github.com/Marwes/combine/commit/466a50d3533118cca0bafab48451fd39f92a8233))
<a name="v4.5.0"></a>
## v4.5.0 (2020-12-25)
#### Features
* Remove pin-project-lite from the 'std' feature ([32ef87b0](https://github.com/Marwes/combine/commit/32ef87b08a643c9814cb9eec6d1f3adfe220c690))
* Add async decoding for tokio version 1 ([8e91f57d](https://github.com/Marwes/combine/commit/8e91f57d1ddaefafcaf244df7a7eea2096c6d6aa))
<a name="v4.4.0"></a>
## v4.4.0 (2020-11-18)
#### Features
* Add support for decoding using tokio_03 ([ce1612ff](https://github.com/Marwes/combine/commit/ce1612ffa0a15547bc967474b511ec35eaaf743f))
* Add a length_prefix combinator ([9e343b2a](https://github.com/Marwes/combine/commit/9e343b2a0067ea70b049369ea1023c364c14b19d), closes [#297](https://github.com/Marwes/combine/issues/297))
* Add a spanned combinator ([1a70f3f4](https://github.com/Marwes/combine/commit/1a70f3f4219eac71880cbb376e62d3f01c22d981))
* Add a spanned combinator ([9a8fead8](https://github.com/Marwes/combine/commit/9a8fead85b3eb0d7ee21f80d076af9f59aa408d1))
* Allow conversion between errors with different token,range,position types ([65fe0af2](https://github.com/Marwes/combine/commit/65fe0af27fa560f90da2723160efd2d1782bea4f), breaks [#](https://github.com/Marwes/combine/issues/))
#### Breaking Changes
* Allow conversion between errors with different token,range,position types ([65fe0af2](https://github.com/Marwes/combine/commit/65fe0af27fa560f90da2723160efd2d1782bea4f), breaks [#](https://github.com/Marwes/combine/issues/))
<a name="v4.3.2"></a>
### v4.3.2 (2020-09-08)
<a name="v4.3.1"></a>
### v4.3.1 (2020-08-26)
#### Bug Fixes
* Make the decode macros work for redis ([82b908d8](https://github.com/Marwes/combine/commit/82b908d80ac21aeab27b169a1f1857a68a6bb11c))
<a name="v4.3.0"></a>
## v4.3.0 (2020-07-10)
#### Features
* Specialize decoding on BufReader ([9559e114](https://github.com/Marwes/combine/commit/9559e114658b60a59d570ffd78f4d7ecb597b814))
* Add a BufReader abstraction for Decoder ([d65a23e8](https://github.com/Marwes/combine/commit/d65a23e81f482c6a5482e53d63c17fcbcf67d623))
* Allow tuple structs and functions in struct_parser! ([48a16f6b](https://github.com/Marwes/combine/commit/48a16f6bf84939da9eacb1f82b4cff6ca7f324e0))
<a name="v4.2.1"></a>
### v4.2.1 (2020-05-20)
#### Performance
* Use size_hint in iterate ([b2649e3b](https://github.com/Marwes/combine/commit/b2649e3b467fc84ef48a91d56ec1fcb40291978e))
* Pre-allocate collections for count* parsers ([5f37857c](https://github.com/Marwes/combine/commit/5f37857c988b81c2b0613d6f2ab14576c794ae99))
<a name="v4.2.0"></a>
## v4.2.0 (2020-05-17)
#### Features
* Add any_send_sync_partial_state ([7e90807a](https://github.com/Marwes/combine/commit/7e90807a0949411b6aaf24a677e6530a530a1478))
<a name="v4.1.0"></a>
## v4.1.0 (2020-04-22)
<a name="4.0.1"></a>
### 4.0.1 (2020-01-23)
#### Breaking Changes
* Make the decode macros take read by parameter ([aa813fb1](https://github.com/Marwes/combine/commit/aa813fb1b486ecdc5258bf2c89e0b18a8f4fc876), breaks [#](https://github.com/Marwes/combine/issues/))
#### Features
* Make the decode macros take read by parameter ([aa813fb1](https://github.com/Marwes/combine/commit/aa813fb1b486ecdc5258bf2c89e0b18a8f4fc876), breaks [#](https://github.com/Marwes/combine/issues/))
#### Bug Fixes
* Don't block decoding if there are more data in the buffer ([6659f993](https://github.com/Marwes/combine/commit/6659f993784876bdc5d6f6145aaffe2844ada760))
* Don't try to read into the entire buffer for std decoding ([d613dc93](https://github.com/Marwes/combine/commit/d613dc937de6e0b745f8cbcca974e3cfcb3db723))
<a name="4.0.0"></a>
## 4.0.0 (2020-01-16)
#### Features
* Relax the decode*! macros to no longer need BufRead ([bc5b7794](https://github.com/Marwes/combine/commit/bc5b779491c66a18289bc4b237d281bd391b1d69))
* Add decode_futures_03_buf_read ([f403ecda](https://github.com/Marwes/combine/commit/f403ecda68e5d7c5d5cf89d8feb26aa1b715609e))
* Add the produce parser ([ac15b87c](https://github.com/Marwes/combine/commit/ac15b87c4eb23065920b3bb7ad8d590f29937895))
<a name="4.0.0-beta.2"></a>
## 4.0.0-beta.2 (2019-12-19)
#### Features
* Increase tuple parsers to 20 elements ([45781fea](https://github.com/Marwes/combine/commit/45781feac3cd2da252fcc999aa69c9a207af5f6a))
* Remove deprecated re-exports ([b4e23207](https://github.com/Marwes/combine/commit/b4e23207f9c3e47111c69865389853d89b8ce12d), breaks [#](https://github.com/Marwes/combine/issues/))
* Add decode_tokio_buf_read! to parse tokio::io::BufRead ([4d38f7c2](https://github.com/Marwes/combine/commit/4d38f7c25cb0cde10cdc869955c0a98079b5fa08))
* Add `decode_buf_read!` to parse `BufRead` without ([dbe23ce9](https://github.com/Marwes/combine/commit/dbe23ce90803d409c591b02aaeb7005d0f58622a))
* Add a macro to incrementally decode std::io::BufRead ([6e2f1121](https://github.com/Marwes/combine/commit/6e2f1121203c54b1623f9f5a8b35907867c83874))
* Rename Consumed to Commit and Empty to Peek ([129046e3](https://github.com/Marwes/combine/commit/129046e3a555318c3e60f658988f1be8fe83a2fd), breaks [#](https://github.com/Marwes/combine/issues/))
* Rename Consumed{Ok,Err} to Commit{Ok,Err} and Empty{Ok,Err} to Peek{Ok,Err} ([0ac1fd4f](https://github.com/Marwes/combine/commit/0ac1fd4fee89bd540b38b0d3224e0dcf5260ab77))
* Add a dedicated error for ReadStream ([37bbd843](https://github.com/Marwes/combine/commit/37bbd84383ecaa632df537322426407be7712748))
#### Bug Fixes
* Handle partial parsing in sep_end_by ([281e0d30](https://github.com/Marwes/combine/commit/281e0d30276cd8d2b730680fb24117bb1b72b198))
#### Breaking Changes
* Remove deprecated re-exports ([b4e23207](https://github.com/Marwes/combine/commit/b4e23207f9c3e47111c69865389853d89b8ce12d), breaks [#](https://github.com/Marwes/combine/issues/))
* Rename Consumed to Commit and Empty to Peek ([129046e3](https://github.com/Marwes/combine/commit/129046e3a555318c3e60f658988f1be8fe83a2fd), breaks [#](https://github.com/Marwes/combine/issues/))
<a name="4.0.0-beta.1"></a>
## 4.0.0-beta.1 (2019-10-08)
#### Bug Fixes
* Handle partial parsing in the num parsers ([47764c7f](https://github.com/Marwes/combine/commit/47764c7feb8becefd1d4c376fc11492ed3f3cd6a))
* Don't bind the input lifetime to the parser with expected/message ([618c69e0](https://github.com/Marwes/combine/commit/618c69e09afc383dadc9af305394ca82948801c3))
#### Breaking Changes
* Merge FullRangeStream into RangeStreamOnce ([c160a971](https://github.com/Marwes/combine/commit/c160a971b47c29a0c8d37fbca6f77cd4a6c85831), breaks [#](https://github.com/Marwes/combine/issues/))
* Rename `StreamOnce::Item` to `Token` ([74a0bbd3](https://github.com/Marwes/combine/commit/74a0bbd363214047236ae88e76bd7e7a7cdc265f), breaks [#](https://github.com/Marwes/combine/issues/), [#](https://github.com/Marwes/combine/issues/))
* Minimum rust version is now 1.32 ([7b77508c](https://github.com/Marwes/combine/commit/7b77508c05f2075e7795b6026e419a2a57666a1c), breaks [#](https://github.com/Marwes/combine/issues/))
* Only require `&mut I` in decode ([e154dbc2](https://github.com/Marwes/combine/commit/e154dbc2f2ca3288e6733f29c85f4b1efcd689c7), breaks [#](https://github.com/Marwes/combine/issues/))
* Generalize factory to take the Input ([19b1a73e](https://github.com/Marwes/combine/commit/19b1a73ed3516d14b392aefeba0363d01937be22), breaks [#](https://github.com/Marwes/combine/issues/))
#### Features
* Merge FullRangeStream into RangeStreamOnce ([c160a971](https://github.com/Marwes/combine/commit/c160a971b47c29a0c8d37fbca6f77cd4a6c85831), breaks [#](https://github.com/Marwes/combine/issues/))
* Rename `StreamOnce::Item` to `Token` ([74a0bbd3](https://github.com/Marwes/combine/commit/74a0bbd363214047236ae88e76bd7e7a7cdc265f), breaks [#](https://github.com/Marwes/combine/issues/), [#](https://github.com/Marwes/combine/issues/))
* Allow fmt::Display be used to specify errors in combinators ([82796d4a](https://github.com/Marwes/combine/commit/82796d4ae5ea23290920ae4e1586b20535285d49), closes [#255](https://github.com/Marwes/combine/issues/255))
* Minimum rust version is now 1.32 ([7b77508c](https://github.com/Marwes/combine/commit/7b77508c05f2075e7795b6026e419a2a57666a1c), breaks [#](https://github.com/Marwes/combine/issues/))
* Only require `&mut I` in decode ([e154dbc2](https://github.com/Marwes/combine/commit/e154dbc2f2ca3288e6733f29c85f4b1efcd689c7), breaks [#](https://github.com/Marwes/combine/issues/))
* Add RepeatUntil ([7f4a310f](https://github.com/Marwes/combine/commit/7f4a310f8edaebe7d3bd450aca7a2823fcf2e5d6))
* Generalize factory to take the Input ([19b1a73e](https://github.com/Marwes/combine/commit/19b1a73ed3516d14b392aefeba0363d01937be22), breaks [#](https://github.com/Marwes/combine/issues/))
* Add the iterate parser ([342a45b4](https://github.com/Marwes/combine/commit/342a45b484240ace2e313138b2818c95cbec3427))
* Add dispatch! ([0740ce0f](https://github.com/Marwes/combine/commit/0740ce0fd2ade2bd19981261dfeb89cae1f63120))
* impl Stream* for &mut T ([51e7e2b7](https://github.com/Marwes/combine/commit/51e7e2b736f3b3ab8d9ec9877e2afa8fc31f5207))
* Add `From<S>` for `easy::Stream<S>` ([8f695cc6](https://github.com/Marwes/combine/commit/8f695cc62268a855c3c3847661f6ffe8308b745e))
* Allow ReadStream parsers to use &[u8] in errors ([6b62a857](https://github.com/Marwes/combine/commit/6b62a857f441ab930e2f595aff3e87f992c769b8), closes [#249](https://github.com/Marwes/combine/issues/249))
<a name="4.1.0-alpha.2"></a>
## 4.0.0-alpha.2 (2019-06-17)
#### Features
* impl Stream* for &mut T ([51e7e2b7](https://github.com/Marwes/combine/commit/51e7e2b736f3b3ab8d9ec9877e2afa8fc31f5207))
* Add `From<S>` for `easy::Stream<S>` ([8f695cc6](https://github.com/Marwes/combine/commit/8f695cc62268a855c3c3847661f6ffe8308b745e))
* Allow ReadStream parsers to use `&[u8]` in errors ([6b62a857](https://github.com/Marwes/combine/commit/6b62a857f441ab930e2f595aff3e87f992c769b8), closes [#249](https://github.com/Marwes/combine/issues/249))
<a name="4.0.0-alpha.1"></a>
## 4.0.0-alpha.1 (2019-05-07)
#### Breaking Changes
* Drop support for regex 0.2 ([bf6aeb06](https://github.com/Marwes/combine/commit/bf6aeb06494abe2f1890c5bf90db86ac01ec9772), closes [#247](https://github.com/Marwes/combine/issues/247), breaks [#](https://github.com/Marwes/combine/issues/))
* Rename tokens2 to tokens and tokens to tokens_cmp ([3dadbb4f](https://github.com/Marwes/combine/commit/3dadbb4f2adb0447c883ea56bdbfcfd53c58d384), closes [#166](https://github.com/Marwes/combine/issues/166), breaks [#](https://github.com/Marwes/combine/issues/))
* Remove the std::result::Result returning parse functions ([6ec094ef](https://github.com/Marwes/combine/commit/6ec094efd2eecefeb2281fb99687143e7a5580e8), closes [#244](https://github.com/Marwes/combine/issues/244), breaks [#](https://github.com/Marwes/combine/issues/))
* Make PointerOffset easier to understand and use ([5f6d65b3](https://github.com/Marwes/combine/commit/5f6d65b30679d39c4c8a41ef0877bca8dc199095), closes [#238](https://github.com/Marwes/combine/issues/238), breaks [#](https://github.com/Marwes/combine/issues/))
* Allow reset to return errors ([3055c810](https://github.com/Marwes/combine/commit/3055c810fd4904d2eba3f51ea232ef0232f8fbe7), closes [#231](https://github.com/Marwes/combine/issues/231), breaks [#](https://github.com/Marwes/combine/issues/))
* Remove the PartialEq bound from Item and Range ([24e1087c](https://github.com/Marwes/combine/commit/24e1087cbff4938d48ae3e5947e2eb0dcbb8cc87), closes [#219](https://github.com/Marwes/combine/issues/219), breaks [#](https://github.com/Marwes/combine/issues/))
#### Features
* Drop support for regex 0.2 ([bf6aeb06](https://github.com/Marwes/combine/commit/bf6aeb06494abe2f1890c5bf90db86ac01ec9772), closes [#247](https://github.com/Marwes/combine/issues/247), breaks [#](https://github.com/Marwes/combine/issues/))
* Rename tokens2 to tokens and tokens to tokens_cmp ([3dadbb4f](https://github.com/Marwes/combine/commit/3dadbb4f2adb0447c883ea56bdbfcfd53c58d384), closes [#166](https://github.com/Marwes/combine/issues/166), breaks [#](https://github.com/Marwes/combine/issues/))
* Remove the std::result::Result returning parse functions ([6ec094ef](https://github.com/Marwes/combine/commit/6ec094efd2eecefeb2281fb99687143e7a5580e8), closes [#244](https://github.com/Marwes/combine/issues/244), breaks [#](https://github.com/Marwes/combine/issues/))
* Allow reset to return errors ([3055c810](https://github.com/Marwes/combine/commit/3055c810fd4904d2eba3f51ea232ef0232f8fbe7), closes [#231](https://github.com/Marwes/combine/issues/231), breaks [#](https://github.com/Marwes/combine/issues/))
* Remove the PartialEq bound from Item and Range ([24e1087c](https://github.com/Marwes/combine/commit/24e1087cbff4938d48ae3e5947e2eb0dcbb8cc87), closes [#219](https://github.com/Marwes/combine/issues/219), breaks [#](https://github.com/Marwes/combine/issues/))
#### Bug Fixes
* Don't require macro_use when using parser! in rust 2018 ([72e4c70f](https://github.com/Marwes/combine/commit/72e4c70fda2f283e1feadfd926ed9bb0d384cb59), closes [#241](https://github.com/Marwes/combine/issues/241))
* Make PointerOffset easier to understand and use ([5f6d65b3](https://github.com/Marwes/combine/commit/5f6d65b30679d39c4c8a41ef0877bca8dc199095), closes [#238](https://github.com/Marwes/combine/issues/238), breaks [#](https://github.com/Marwes/combine/issues/))
<a name="3.8.1"></a>
### 3.8.1 (2019-03-13)
#### Bug Fixes
* Handle partial parsing in `take_fn/take_until*` ([bae1e3bb](https://github.com/Marwes/combine/commit/bae1e3bbe476fb3fe7c4ff6cc375e50f3d9cfd45))
<a name="3.8.0"></a>
## 3.8.0 (2019-03-12)
#### Features
* Add take_fn and take_until_bytes ([5f560780](https://github.com/Marwes/combine/commit/5f5607806f8e133485c990419a03de8c7531fa14))
<a name="3.6.7"></a>
### 3.6.7 (2019-02-13)
#### Bug Fixes
* Forward is_partial in all Stream adaptors ([121b3987](https://github.com/Marwes/combine/commit/121b39879543c074924185dfe17d6b4f434ce413))
<a name="3.6.6"></a>
### 3.6.6 (2019-01-12)
* Fix two inconsisties regarding EOI [#227](https://github.com/Marwes/combine/pull/227)
<a name="3.6.5"></a>
### 3.6.5 (2019-01-09)
#### Bug Fixes
* not_followed_by should fail on empty successful parses ([aa17e7d2](https://github.com/Marwes/combine/commit/aa17e7d25532eebdb6c447459c8550a4fb6bbe93))
* Propagate the PartialState through parser! uses ([ae888244](https://github.com/Marwes/combine/commit/ae8882449ee935f7ffb0dd3ef7f0e2d8d6932409), closes [#223](https://github.com/Marwes/combine/issues/223))
<a name="3.6.3"></a>
### 3.6.3 (2018-11-16)
<a name="3.6.2"></a>
### 3.6.2 (2018-11-06)
<a name="3.6.1"></a>
### 3.6.1 (2018-10-13)
#### Bug Fixes
* Allow clippy to run on stable rust ([6cb00803](https://github.com/Marwes/combine/commit/6cb00803d0135a3849a7f0b35dcf635764b32c06))
<a name="3.6.0"></a>
## 3.6.0 (2018-10-07)
#### Features
* Deprecates `try` in favor of `attempt` ([a9c79321](https://github.com/Marwes/combine/commit/a9c79321b28ea5332d30429936bfca0c034105ca))
* Adds `attempt`, a 2018-compatible alias for `try` ([18edaec4](https://github.com/Marwes/combine/commit/18edaec46f67a3803b96bac3762b6f809d0c2724))
<a name="3.5.3"></a>
### 3.5.3 (2018-10-06)
#### Features
* Allow regex parsers to use regex-1.0 ([77fe362c](https://github.com/Marwes/combine/commit/77fe362c56efa6ce20e3074388aeda33cfe91c4b))
* pub use unexpected_any ([8b2ca559](https://github.com/Marwes/combine/commit/8b2ca55971e01e8a87ec20ec7b9fe8476023117f))
#### Bug Fixes
* use unexpected_any inside `then` examples ([f2018db5](https://github.com/Marwes/combine/commit/f2018db5cf5487d02488d017c23f6993e6ed6f82))
<a name="3.5.2"></a>
### 3.5.2 (2018-09-12)
#### Bug Fixes
* Don't report previous errors from expected ([0048c5ef](https://github.com/Marwes/combine/commit/0048c5ef4577592df94e0c5296bee3a128117211))
<a name="3.5.1"></a>
### 3.5.1 (2018-08-13)
#### Bug Fixes
* Add all errors of nested choice parsers ([f2b8fbbf](https://github.com/Marwes/combine/commit/f2b8fbbfe300e2b219eb72116856c217fcec8b2b))
<a name="3.5.0"></a>
## 3.5.0 (2018-08-13)
#### Features
* Add opaque! as convenience over the opaque parser ([9855aa1f](https://github.com/Marwes/combine/commit/9855aa1f39b62b09addb1c7a25035616a8cbef42))
* Add the factory parser ([fdd38d46](https://github.com/Marwes/combine/commit/fdd38d46b5dbdeece7f6f3a99b12e470a08e9a92))
<a name="3.4.0"></a>
## 3.4.0 (2018-08-04)
#### Features
* Add the silent combinator ([b9bc28d3](https://github.com/Marwes/combine/commit/b9bc28d32b80644ba5a7fdc6969e7e8e734ee4de))
* Add the `opaque` parser ([1a1123f5](https://github.com/Marwes/combine/commit/1a1123f5b6970c88d99643d87d75c8fb63117607))
* Add tokens2 as a simpler version of tokens ([072a8c13](https://github.com/Marwes/combine/commit/072a8c13f90c640d2a98910276bc96d5aa27fae9))
* Add the from_str combinator ([908f9ebd](https://github.com/Marwes/combine/commit/908f9ebdc3593eda67e8eba8f5b467962076964e))
#### Bug Fixes
* Make (many1(p), end) report more expected messages ([9e26d38e](https://github.com/Marwes/combine/commit/9e26d38e06c8bac9dbaa547893b8f2aea01f047d))
* Report more errors in the middle of sequence parsers ([f9e404f6](https://github.com/Marwes/combine/commit/f9e404f6840eb4d42016122095416f76bee5abf3))
* Report more expected information from nested sequence parsers ([9aa1db92](https://github.com/Marwes/combine/commit/9aa1db92dcd17a35d5775bf78b6642c4e8c89d1d))
* Use the message variant in from_str to work on no_std ([edf5ff60](https://github.com/Marwes/combine/commit/edf5ff6002d6b6f2f8bd23692dce5084e9fc01dd))
* Remove redundant Any bound on easy::Errors ([16601046](https://github.com/Marwes/combine/commit/16601046f3c2aa8b4ad75ba2390486f7b5306219), closes [#177](https://github.com/Marwes/combine/issues/177))
<a name="3.3.6"></a>
### 3.3.6
* Don't forget the state in any_send_partial_state ([4e2eb928](https://github.com/Marwes/combine/commit/4e2eb928e6059a4182b2717bf2bfb7cccdc60127))
<a name="3.3.5"></a>
### 3.3.5 (2018-06-30)
#### Bug Fixes
* Propagate the ParseMode in range::recognize ([c330a737](https://github.com/Marwes/combine/commit/c330a73746f6adfa22c6b13b15d796d48f589614))
<a name="3.3.4"></a>
### 3.3.4 (2018-06-30)
* fix: Forward the partial mode through the parser! macro correctly
<a name="3.3.3"></a>
### 3.3.3 (2018-06-29)
#### Bug fixes
* Parse from the start in sequence parsers in first mode
<a name="3.3.1"></a>
### 3.3.1 (2018-06-01)
#### Bug Fixes
* support resuming streams in TakeUntilRange ([b54ff061](https://github.com/Marwes/combine/commit/b54ff0619663aaf7d3c33185b1a4b7ec73cc1f61))
* reset stream on error in take_until_range ([27449f21](https://github.com/Marwes/combine/commit/27449f2131ecb3d6ef956e2e67b588cae58a9810))
* support multi-byte `Item`s in take_until_range ([4a690d65](https://github.com/Marwes/combine/commit/4a690d65160ea6e1866fd6dbe8865acae4070c3a))
<a name="v3.3.0"></a>
## v3.3.0 (2018-05-19)
#### Features
* Add the escaped parser ([0db58a20](https://github.com/Marwes/combine/commit/0db58a20f227d923ffcd3451fbb07ace87dba07c))
* Add the lazy parser combinator ([496ac836](https://github.com/Marwes/combine/commit/496ac83628ba33ee9a886f989f8749388f918652))
#### Breaking Changes
* Remove redundant state comparison in Iter ([9d434c3f](https://github.com/Marwes/combine/commit/9d434c3ff89480aeb9c5552e439b465adc28e31d), breaks [#](https://github.com/Marwes/combine/issues/))
#### Performance
* Specialize uncons_while1 on all streams ([c995ad61](https://github.com/Marwes/combine/commit/c995ad6125996b594b8bf45da078fdebeb41b86a))
* Unroll the loop for <[T]>::uncons_while ([f593e85d](https://github.com/Marwes/combine/commit/f593e85d4d0ecb9c7c91b769685864fda3291646))
* Inline from/into for results ([fff248e4](https://github.com/Marwes/combine/commit/fff248e494361e04cedbf849df959193560416ff))
* Unroll take_while1 ([279a4526](https://github.com/Marwes/combine/commit/279a4526494e1cd7e90252c3bc1cfc8f8c35ebcb))
* Remove redundant state comparison in Iter ([9d434c3f](https://github.com/Marwes/combine/commit/9d434c3ff89480aeb9c5552e439b465adc28e31d), breaks [#](https://github.com/Marwes/combine/issues/))
* Add uncons_while1 as a default method on RangeStream ([5d154f15](https://github.com/Marwes/combine/commit/5d154f15a13091c26246627486b309cbdef06d14))
<a name="v3.2.0"></a>
## v3.2.0 (2018-04-24)
#### Features
* Add any_send_partial_state ([d4153d31](https://github.com/Marwes/combine/commit/d4153d31b074e950f4752f29c8ed188102534e91))
<a name="v3.1.0"></a>
## v3.1.0 (2018-03-26)
#### Features
* Allow the the `num` parsers to be used without RangeStream ([b1cb0668](https://github.com/Marwes/combine/commit/b1cb0668cc13df3a2f1b6cc35f221089d0279579))
* Add the take_until parser ([7b03b596](https://github.com/Marwes/combine/commit/7b03b596a58e4cdbe84d6008e277f0cc57394fae))
* Allow try parsers to be used with partial parsing ([cb2da7ad](https://github.com/Marwes/combine/commit/cb2da7ad74758bc89b17bedd90f2f53ea9f83e7d))
<a name="v3.0.0"></a>
## v3.0.0 (2018-03-18)
3.0.0 is the final stabilization of all the changes made in the 3.x-alpha/beta releases. You can read more about these changes
at https://marwes.github.io/2018/02/08/combine-3.html and https://www.reddit.com/r/rust/comments/6s792a/combine_250_and_300alpha1/
#### Features
* Let single element tuples work as sequence and choice parsers ([81e34d2d](https://github.com/Marwes/combine/commit/81e34d2d8d823b9962f4036e7576353252f211b9))
<a name="3.0.0-beta.1"></a>
## v3.0.0-beta.1 (2018-02-02)
#### Features
* Encode parsers as resumable state machines [342fc47](https://github.com/Marwes/combine/commit/342fc4770ee9dc62df51683ccca2e612d6e1ea33)
* Add the unexpected_any parser ([979e0d7e](https://github.com/Marwes/combine/commit/979e0d7e4ac63c41712352bc87b51001aa067879), closes [#126](https://github.com/Marwes/combine/issues/126))
* Don't have the error type be generic for uncons* ([df3e84f0](https://github.com/Marwes/combine/commit/df3e84f0275352fba44672b2701d452f6bb55596))
* Add a alias which helps to refer to the StreamError type ([95eb70cb](https://github.com/Marwes/combine/commit/95eb70cb025aec89925e3f1992a6d1b266328eb2))
* Add memchr optimized take_until_byte parsers ([30cc7d1d](https://github.com/Marwes/combine/commit/30cc7d1d39e754d2c2e8491eb2123dacec5d30f3))
* Add the `then_partial` parser ([5d402f6b](https://github.com/Marwes/combine/commit/5d402f6be0c1be4d69159d4df1d1d4589f5cd66e))
* Don't require `Clone` for `Stream` ([3fc0b540](https://github.com/Marwes/combine/commit/3fc0b540521aff959ce62628df1ac7554a9df861), breaks [#](https://github.com/Marwes/combine/issues/))
#### Breaking Changes
* Don't require `Clone` for `Stream` ([3fc0b540](https://github.com/Marwes/combine/commit/3fc0b540521aff959ce62628df1ac7554a9df861), breaks [#](https://github.com/Marwes/combine/issues/))
#### Bug Fixes
* Allow `parser!` to be used within functions ([916bb824](https://github.com/Marwes/combine/commit/916bb824741f054ed2f0686dcce316d0a770d9db))
* Allow multiple unnamed public parsers to be in the same scope ([c04e2247](https://github.com/Marwes/combine/commit/c04e2247ab04f098ef0eae887c7a7739d99212fc))
* Return the correct distance for slices with larger than 1 byte items ([6b3c661a](https://github.com/Marwes/combine/commit/6b3c661af23fd221d2b28c37ed207d37409c2491))
* Don't add extra expected errors when erroring in the middle of a sequence ([44eac24d](https://github.com/Marwes/combine/commit/44eac24d46f265a1b3a94d5587d4f200ebebc18f))
#### Performance
* Avoid cloning input in satisfy ([9aeaefa9](https://github.com/Marwes/combine/commit/9aeaefa95f97bd8b9d186923e16cd8def98d8e81))
<a name="3.0.0-alpha.4"></a>
## v3.0.0-alpha.4 (2017-10-11)
#### Breaking Changes
* Rename EasyStream -> Stream, ParsingError => ParseError ... ([d2f4ab14](https://github.com/Marwes/combine/commit/d2f4ab1471cc0616a46bfe965a611d465434d19a), breaks [#](https://github.com/Marwes/combine/issues/))
* Make the RangeStreamOnce function generic over the returned error ([818d8629](https://github.com/Marwes/combine/commit/818d8629116fec8eef64494a938f0340c04d6ad6), breaks [#](https://github.com/Marwes/combine/issues/))
* Re-export the type generated by parser! if it is public ([61469f0a](https://github.com/Marwes/combine/commit/61469f0a2db899a1144d0335dd47b9bb8d3105f2), breaks [#](https://github.com/Marwes/combine/issues/))
#### Performance
* Add inline annotations on error traits ([c8b495b4](https://github.com/Marwes/combine/commit/c8b495b41a21cd71b62782f62bbae77f13f92fb7))
#### Features
* Rename EasyStream -> Stream, ParsingError => ParseError ... ([d2f4ab14](https://github.com/Marwes/combine/commit/d2f4ab1471cc0616a46bfe965a611d465434d19a), breaks [#](https://github.com/Marwes/combine/issues/))
* Make the RangeStreamOnce function generic over the returned error ([818d8629](https://github.com/Marwes/combine/commit/818d8629116fec8eef64494a938f0340c04d6ad6), breaks [#](https://github.com/Marwes/combine/issues/))
* Allow combine to be used in no_std environments ([9fd310ac](https://github.com/Marwes/combine/commit/9fd310ac6b795f8f4152892a698dcf29d9c72b7b))
* Re-export the type generated by parser! if it is public ([61469f0a](https://github.com/Marwes/combine/commit/61469f0a2db899a1144d0335dd47b9bb8d3105f2), breaks [#](https://github.com/Marwes/combine/issues/))
<a name=""></a>
## v3.0.0-alpha.3 (2017-08-20)
* Implement Copy for more types ([e60395d6](https://github.com/Marwes/combine/commit/e60395d683faf52be772d222f28a5d38aec05f5c))
<a name="v3.0.0-alpha.1"></a>
## v3.0.0-alpha.1 (2017-08-07)
#### Features
* Remove the old State type and Positioner trait ([ae43f8ae](https://github.com/Marwes/combine/commit/ae43f8ae2b303aca3b5ae9fbb1a87475349f2745), breaks [#](https://github.com/Marwes/combine/issues/))
* Teach the choice parser to take tuples ([96da7ee0](https://github.com/Marwes/combine/commit/96da7ee0cf8a112e60747a0be8a4dbd90efbecba), breaks [#](https://github.com/Marwes/combine/issues/))
* Add the range_of parser ([7e692086](https://github.com/Marwes/combine/commit/7e69208650f7fdc75279370b193030b09ccdbc7a), closes [#83](https://github.com/Marwes/combine/issues/83), breaks [#](https://github.com/Marwes/combine/issues/))
* Add map_token and map_range methods to ParseError ([2f92b296](https://github.com/Marwes/combine/commit/2f92b29669b618535bcd7533b7dd39b7daa8579b), closes [#86](https://github.com/Marwes/combine/issues/86))
* Allow ParseError to be used without the StreamOnce constraint ([520da8e8](https://github.com/Marwes/combine/commit/520da8e89f7162b4d6ba3a3bca05a05f3bd37999), breaks [#](https://github.com/Marwes/combine/issues/))
#### Bug Fixes
* Remove depreceated items ([9107342a](https://github.com/Marwes/combine/commit/9107342a89a5efc664bac9c2919a93a992ca6809), breaks [#](https://github.com/Marwes/combine/issues/))
* Don't forward tuple parsers to frunk to prevent a performance loss ([7e27c523](https://github.com/Marwes/combine/commit/7e27c523da46828b254ee4fc7c1f9750623e5aff))
* Add the correct errors after sequencing has returned EmptyOk ([54fecc62](https://github.com/Marwes/combine/commit/54fecc62938445aae15373a6b1ec7c4419582025), closes [#95](https://github.com/Marwes/combine/issues/95))
* Renamed SharedBufferedStream and BufferedStream to be less confusing ([3add407e](https://github.com/Marwes/combine/commit/3add407eecf886cc72ce05414d58a2b3b19a0bb9), breaks [#](https://github.com/Marwes/combine/issues/))
* Add From<u8> for Info ([4cf8cff6](https://github.com/Marwes/combine/commit/4cf8cff64466519bf2d4a4dc1dcbe8deb449e004))
* Make the positions of slice streams harder to misuse ([f50ab9e2](https://github.com/Marwes/combine/commit/f50ab9e2f42ec2465368bfb11a60b2339b699fc4), closes [#104](https://github.com/Marwes/combine/issues/104), breaks [#](https://github.com/Marwes/combine/issues/))
#### Breaking Changes
* Remove depreceated items ([9107342a](https://github.com/Marwes/combine/commit/9107342a89a5efc664bac9c2919a93a992ca6809), breaks [#](https://github.com/Marwes/combine/issues/))
* Renamed SharedBufferedStream and BufferedStream to be less confusing ([3add407e](https://github.com/Marwes/combine/commit/3add407eecf886cc72ce05414d58a2b3b19a0bb9), breaks [#](https://github.com/Marwes/combine/issues/))
* Remove the old State type and Positioner trait ([ae43f8ae](https://github.com/Marwes/combine/commit/ae43f8ae2b303aca3b5ae9fbb1a87475349f2745), breaks [#](https://github.com/Marwes/combine/issues/))
* Teach the choice parser to take tuples ([96da7ee0](https://github.com/Marwes/combine/commit/96da7ee0cf8a112e60747a0be8a4dbd90efbecba), breaks [#](https://github.com/Marwes/combine/issues/))
* Add the range_of parser ([7e692086](https://github.com/Marwes/combine/commit/7e69208650f7fdc75279370b193030b09ccdbc7a), closes [#83](https://github.com/Marwes/combine/issues/83), breaks [#](https://github.com/Marwes/combine/issues/))
* Make the positions of slice streams harder to misuse ([f50ab9e2](https://github.com/Marwes/combine/commit/f50ab9e2f42ec2465368bfb11a60b2339b699fc4), closes [#104](https://github.com/Marwes/combine/issues/104), breaks [#](https://github.com/Marwes/combine/issues/))
* Allow ParseError to be used without the StreamOnce constraint ([520da8e8](https://github.com/Marwes/combine/commit/520da8e89f7162b4d6ba3a3bca05a05f3bd37999), breaks [#](https://github.com/Marwes/combine/issues/))
<a name="v2.5.0"></a>
## v2.5.0 (2017-08-07)
#### Features
* Rename captures to captures_many and add a captures parser ([9d301e42](https://github.com/Marwes/combine/commit/9d301e42ee2da23c90ce78982d9dbef6d7586b4c))
* Add regex parsers (match_, find_many) ([5ac12b98](https://github.com/Marwes/combine/commit/5ac12b9883c49b345341ad47aeac2c8accd52c33))
* Add a macro to parse values directly into structs ([1656a620](https://github.com/Marwes/combine/commit/1656a620960e2b6256e724058cf39892d6e16944))
* add count_min_max and skip_count_min_max ([8f3413a7](https://github.com/Marwes/combine/commit/8f3413a7431f4459d67695156f0b259df422bf09))
* Add the skip_count parser ([15171d10](https://github.com/Marwes/combine/commit/15171d10495a5a221713ca0f67f3afc0b0eaf580))
* Add the recognize parser ([61c9b269](https://github.com/Marwes/combine/commit/61c9b269826707e7fa7409512f21122c9fd8f137))
* Add a macro for declaring parsers ([7fe1d9f7](https://github.com/Marwes/combine/commit/7fe1d9f723a14d20c9879849e104283ee24d254e), closes [#70](https://github.com/Marwes/combine/issues/70))
* Provide parsers for decoding big-endian and little-endian numbers ([05ec0bc8](https://github.com/Marwes/combine/commit/05ec0bc8675a2de0a71268a458ceefa7ee99f7a0))
#### Bug Fixes
* Report and_then errors as if at the start of the parse ([b71a78f1](https://github.com/Marwes/combine/commit/b71a78f12a40e90425d59f72d28c628d28aebe1d))
* Return EmptyErr when the any parser fails ([93208e9c](https://github.com/Marwes/combine/commit/93208e9c6fd92628eb02c0b32a0d6d3120a9af7f), closes [#99](https://github.com/Marwes/combine/issues/99))
* **doc:** regex find consumes input until the end of the first match ([d1bbf1d4](https://github.com/Marwes/combine/commit/d1bbf1d4198cb71d9c4b9e6d13399e38078518f0))
<a name="v2.3.0"></a>
## v2.3.0 (2017-02-22)
#### Performance
* Don't call parse_stream in optional ([a4bf28d2](a4bf28d2))
#### Features
* Add the choice! macro ([6f2cec69](6f2cec69))
* Add map functions for Error<> and Info<> ranges. (#86)
* Add Parser::boxed ([3af9c9b3](3af9c9b3))
<a name="2.1.0"></a>
## 2.1.0 (2016-10-30)
#### Features
* Add a read adapter for the stream trait ([a2a9f214](a2a9f214))
<a name="2.0.0"></a>
## 2.0.0 (2016-10-19)
#### Features
* Version 2.0.0 ([80b24186](https://github.com/Marwes/combine/commit/80b24186fb4854d3242f32abc727107545e08c7b))
* Add the count parser ([a7949f3a](https://github.com/Marwes/combine/commit/a7949f3aef8585523e730e2c1224c3725b360d32))
* Add the Parser::by_ref method ([15554d0c](https://github.com/Marwes/combine/commit/15554d0c64a2415e8c234708595cc544ada6c585))
* Add the one_of and none_of parsers ([941b277c](https://github.com/Marwes/combine/commit/941b277c8f4d8e8af804c88678181be7743f912b))
* Add the position parser ([d6c65f6d](https://github.com/Marwes/combine/commit/d6c65f6da5a2af47254abe2db4b04c3ecbd74803))
* Add bytes_cmp and string_cmp ([ee6b430d](https://github.com/Marwes/combine/commit/ee6b430d17508daf305d5f48fabae2d662a94d34))
* Add the `tokens` parser ([886c4523](https://github.com/Marwes/combine/commit/886c45235be207241874a0a412ebcc0733959466))
* Version 2.0.0-beta3 ([55c59322](https://github.com/Marwes/combine/commit/55c59322f8ead037dad703a41e1f6d769c059f31))
* Break out the error formatting into a separate function ([b6ccb0c1](https://github.com/Marwes/combine/commit/b6ccb0c1807f0f182878b68d4dbdcfa739fd5157))
* Rename parse_state to parse_stream ([b375df48](https://github.com/Marwes/combine/commit/b375df4811570d14bbd8db7cb74a6834e54679cf))
* Simplify the flat_map parser ([08a91ce2](https://github.com/Marwes/combine/commit/08a91ce201b67f5528a18228bdfb079e7d86dd7f))
* Merge the ParserExt trait into Parser ([26a84154](https://github.com/Marwes/combine/commit/26a841540107b79542bb874a60abb83f99c78a58))
* Add the bytes parser ([9c73c053](https://github.com/Marwes/combine/commit/9c73c053f37b149c35d60377f6dcbbbfc145dda9))
* Add parsers specialized on byte streams ([01ba3759](https://github.com/Marwes/combine/commit/01ba375929daac2cb81a3e966e529f0909014620))
* Make ctry usable outside the crate ([f45740dd](https://github.com/Marwes/combine/commit/f45740dd71cf9c71e0900e932c2f10ccbefae35e))
* Add versions of parse_* which return an unpacked version of ParseResult ([2bbd14ab](https://github.com/Marwes/combine/commit/2bbd14abd2b372afbfda56fb73d4aa036bd427e1))
* Add the satisy_map parser ([4d97d296](https://github.com/Marwes/combine/commit/4d97d2968c48026e8369e1f0bcee3c6ef5784664))
* Replace the And parser with the pair parser ([b1f56113](https://github.com/Marwes/combine/commit/b1f561139169caa1a5a2e3e2d84248b28f22bb82))
* Remove reexport of the char module from the root module ([e39dacb5](https://github.com/Marwes/combine/commit/e39dacb57999c3cfb0bb4ae6d5db0b696da60a3f))
* Version 2.0.0-beta ([5bdbf584](https://github.com/Marwes/combine/commit/5bdbf58484800717c7d7c20b9161562520f425cb))
* Remove the buffered_stream feature ([3fdbf217](https://github.com/Marwes/combine/commit/3fdbf217ec0a66b052b8d11792ce3ff3d13b7463))
* Version 1.3.0 ([acea26cd](https://github.com/Marwes/combine/commit/acea26cda536ffc681ca4fa9e4c1bf28f5184582))
* Add the eof parser ([6a89cbf2](https://github.com/Marwes/combine/commit/6a89cbf2ef11ed5bf4145a296c208e5f5f90438c))
* Stabilize RangeStream and all functions using it ([d932375d](https://github.com/Marwes/combine/commit/d932375d13a196fc74602f8e76ad5bd3512ca370))
* Reexport Stream and StreamOnce from the crate root ([2c2b3f5c](https://github.com/Marwes/combine/commit/2c2b3f5cd21a04fbc157a95ce76fe72bfdc1a2c3))
* Merge the HasPosition trait into StreamOnce ([3bda4a16](https://github.com/Marwes/combine/commit/3bda4a163e8f3b57dd4efa65384c97f9c3554aeb))
* Add the StreamOnce trait ([9ea0ed5d](https://github.com/Marwes/combine/commit/9ea0ed5d6c8f8cead773a24b968d4a0bbb606721), breaks [#](https://github.com/Marwes/combine/issues/))
* Make Stream::uncons take &mut self ([4ddc4257](https://github.com/Marwes/combine/commit/4ddc4257d1e719a9f1c17a49c39f08ebf20d2999))
* Separate the Position type and position method from Stream ([9cfb9a89](https://github.com/Marwes/combine/commit/9cfb9a895be34b288ee9fc9f926cd1b9c5b97b03))
* Version 1.2.1 ([f737af27](https://github.com/Marwes/combine/commit/f737af27306160088188900a1cdad255b5ca58d3))
* Move the position handling inside the Stream trait ([f41f65e9](https://github.com/Marwes/combine/commit/f41f65e9f34b64481f81af078ecdb10a80e75f6f))
* **range_stream:** Implement RangeStream on State ([f5679dc9](https://github.com/Marwes/combine/commit/f5679dc954be093a7a0278d2311cf5a162396833))
#### Performance
* Specialize and_then, then and flat_map ([9dc7dc6b](https://github.com/Marwes/combine/commit/9dc7dc6b9bcb638888be448efb7002d362aded16))
* Specialize the tuple parser to avoid unnecessary branches ([2b294f80](https://github.com/Marwes/combine/commit/2b294f8009021897d9652981dfb107dd2102a902))
* Add inline annotations and more forwarding parse functions ([0e5ee38e](https://github.com/Marwes/combine/commit/0e5ee38e1b15847908f6676c0c4032dc844e3462))
* Avoid indirection in Skip and With ([52d335ca](https://github.com/Marwes/combine/commit/52d335caa2e698de9be50e46e8fbcf241d4e3081))
* Optimize Iter by simplifying the state machine ([9631700a](https://github.com/Marwes/combine/commit/9631700a306cb5546e37dfb8f05d54728fb3bc8c))
* Speedup tuple parsers by simplifying the expanded code ([5d86dcf2](https://github.com/Marwes/combine/commit/5d86dcf2d14f1cae078d1a4b8831d37041eaf7a2))
* Avoid creating an error when take_while1 parses no input ([9bad15c0](https://github.com/Marwes/combine/commit/9bad15c0f79e3ff897fb92cdca6b92f988c69347))
* Possibly improve performance of the RangeStream impl for &str ([abb1de7f](https://github.com/Marwes/combine/commit/abb1de7f15b65b9bc2c40572319269191bd0819f))
#### Bug Fixes
* Rename the String parser to Str ([d846bf0e](https://github.com/Marwes/combine/commit/d846bf0e7ddb3350ce9245b3682d7c054ff5cdd8))
* Use five copies in the large http test anyway to match nom_benchmarks ([eb089f5b](https://github.com/Marwes/combine/commit/eb089f5bef175b96e097286b9c8c3e7d5f6e3922))
* Avoid storing the position in primitives::uncons_while ([9912507a](https://github.com/Marwes/combine/commit/9912507a80e178737e16d4ff3d19d7a1fee9fbc8))
* Calling uncons_range with the same size as is remaining should succeed ([cce6214e](https://github.com/Marwes/combine/commit/cce6214ed4722880881c8c6998e00f4509a22588))
* Add Sync to to the Error::Other variant ([22add3ec](https://github.com/Marwes/combine/commit/22add3eca62ff5e6f4d58122a4b366290b1d9385))
* Fix positions of BufferedStream being for the next token ([66eab92a](https://github.com/Marwes/combine/commit/66eab92a7dd63269f48cf0fbd0722a6eeea9135d))
* Fix the position handling of BufferedStream ([f21148b3](https://github.com/Marwes/combine/commit/f21148b3c4c5c6f10d8b6d90ce4a7925596879b3))
* Remove the Positioner bound from Stream::Token an Stream::Range ([fba3f1e7](https://github.com/Marwes/combine/commit/fba3f1e760505305b6a586b6ff5a53eff645e1d1))
* **buffered_stream:** BufferedStream no longer emits the last token after EOF ([6532884c](https://github.com/Marwes/combine/commit/6532884cc16307e1753584dd40b2b59e3daa6267))
* **travis:**
* Dont pass the test feature to travis ([382a608d](https://github.com/Marwes/combine/commit/382a608da2851c5cc2d3477025951e9a133732bc))
* Add travis_wait so travis does not time out the beta builds ([a3f0792a](https://github.com/Marwes/combine/commit/a3f0792ab347805e3f0ce619997a2c154f5e8c87))
#### Breaking Changes
* Add the StreamOnce trait ([9ea0ed5d](https://github.com/Marwes/combine/commit/9ea0ed5d6c8f8cead773a24b968d4a0bbb606721), breaks [#](https://github.com/Marwes/combine/issues/))

1709
vendor/combine/Cargo.lock generated vendored Normal file

File diff suppressed because it is too large Load Diff

253
vendor/combine/Cargo.toml vendored Normal file
View File

@@ -0,0 +1,253 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g., crates.io) dependencies.
#
# If you are reading this file be aware that the original Cargo.toml
# will likely look very different (and much more reasonable).
# See Cargo.toml.orig for the original contents.
[package]
edition = "2018"
name = "combine"
version = "4.6.7"
authors = ["Markus Westerlind <marwes91@gmail.com>"]
description = "Fast parser combinators on arbitrary streams with zero-copy support."
documentation = "https://docs.rs/combine"
readme = "README.md"
keywords = [
"parser",
"parsing",
"combinators",
"ll",
]
categories = [
"parsing",
"no-std",
]
license = "MIT"
repository = "https://github.com/Marwes/combine"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = [
"--cfg",
"docsrs",
]
[profile.bench]
lto = true
codegen-units = 1
[lib]
name = "combine"
path = "src/lib.rs"
[[example]]
name = "async"
required-features = [
"std",
"tokio",
]
[[example]]
name = "date"
[[example]]
name = "number"
[[example]]
name = "readme"
[[example]]
name = "ini"
[[test]]
name = "async"
required-features = [
"tokio-02",
"futures-io-03",
]
[[bench]]
name = "json"
harness = false
required-features = ["std"]
[[bench]]
name = "http"
harness = false
required-features = ["std"]
[[bench]]
name = "mp4"
harness = false
required-features = ["mp4"]
[[bench]]
name = "buffers"
harness = false
required-features = ["std"]
[dependencies.bytes]
version = "1"
optional = true
[dependencies.bytes_05]
version = "0.5"
optional = true
package = "bytes"
[dependencies.futures-core-03]
version = "0.3.1"
optional = true
default-features = false
package = "futures-core"
[dependencies.futures-io-03]
version = "0.3.1"
optional = true
default-features = false
package = "futures-io"
[dependencies.memchr]
version = "2.3"
default-features = false
[dependencies.pin-project-lite]
version = "0.2"
optional = true
[dependencies.regex]
version = "1"
optional = true
[dependencies.tokio-02-dep]
version = "0.2.3"
features = ["io-util"]
optional = true
default-features = false
package = "tokio"
[dependencies.tokio-03-dep]
version = "0.3"
optional = true
default-features = false
package = "tokio"
[dependencies.tokio-dep]
version = "1"
optional = true
default-features = false
package = "tokio"
[dependencies.tokio-util]
version = "0.7"
features = ["codec"]
optional = true
default-features = false
[dev-dependencies.async-std]
version = "1"
[dev-dependencies.bytes]
version = "1"
[dev-dependencies.bytes_05]
version = "0.5"
package = "bytes"
[dev-dependencies.criterion]
version = "0.3"
default-features = false
[dev-dependencies.futures-03-dep]
version = "0.3.1"
package = "futures"
[dev-dependencies.once_cell]
version = "1.0"
[dev-dependencies.partial-io]
version = "0.3"
features = [
"tokio",
"quickcheck",
]
[dev-dependencies.quick-error]
version = "1.0"
[dev-dependencies.quickcheck]
version = "0.6"
[dev-dependencies.tokio-02-dep]
version = "0.2"
features = [
"fs",
"io-driver",
"io-util",
"macros",
]
package = "tokio"
[dev-dependencies.tokio-03-dep]
version = "0.3"
features = [
"fs",
"macros",
"rt-multi-thread",
]
package = "tokio"
[dev-dependencies.tokio-dep]
version = "1"
features = [
"fs",
"macros",
"rt",
"rt-multi-thread",
"io-util",
]
package = "tokio"
[features]
alloc = []
default = ["std"]
futures-03 = [
"pin-project",
"std",
"futures-core-03",
"futures-io-03",
"pin-project-lite",
]
mp4 = []
pin-project = ["pin-project-lite"]
std = [
"memchr/std",
"bytes",
"alloc",
]
tokio = [
"tokio-dep",
"tokio-util/io",
"futures-core-03",
"pin-project-lite",
]
tokio-02 = [
"pin-project",
"std",
"tokio-02-dep",
"futures-core-03",
"pin-project-lite",
"bytes_05",
]
tokio-03 = [
"pin-project",
"std",
"tokio-03-dep",
"futures-core-03",
"pin-project-lite",
]

22
vendor/combine/LICENSE vendored Normal file
View File

@@ -0,0 +1,22 @@
The MIT License (MIT)
Copyright (c) 2015 Markus Westerlind
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

108
vendor/combine/README.md vendored Normal file
View File

@@ -0,0 +1,108 @@
# combine
[![Build Status](https://travis-ci.org/Marwes/combine.svg?branch=master)](https://travis-ci.org/Marwes/combine)
[![Docs](https://docs.rs/combine/badge.svg)](https://docs.rs/combine)
[![Gitter](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/Marwes/combine?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge)
An implementation of parser combinators for Rust, inspired by the Haskell library [Parsec](https://hackage.haskell.org/package/parsec). As in Parsec the parsers are [LL(1)](https://en.wikipedia.org/wiki/LL_parser) by default but they can opt-in to arbitrary lookahead using the [attempt combinator](https://docs.rs/combine/*/combine/fn.attempt.html).
## Example
```rust
extern crate combine;
use combine::{many1, Parser, sep_by};
use combine::parser::char::{letter, space};
// Construct a parser that parses *many* (and at least *1) *letter*s
let word = many1(letter());
// Construct a parser that parses many *word*s where each word is *separated by* a (white)*space*
let mut parser = sep_by(word, space())
// Combine can collect into any type implementing `Default + Extend` so we need to assist rustc
// by telling it that `sep_by` should collect into a `Vec` and `many1` should collect to a `String`
.map(|mut words: Vec<String>| words.pop());
let result = parser.parse("Pick up that word!");
// `parse` returns `Result` where `Ok` contains a tuple of the parsers output and any remaining input.
assert_eq!(result, Ok((Some("word".to_string()), "!")));
```
Larger examples can be found in the [examples][], [tests][] and [benches][] folders.
[examples]:https://github.com/Marwes/combine/tree/master/examples
[tests]:https://github.com/Marwes/combine/tree/master/tests
[benches]:https://github.com/Marwes/combine/tree/master/benches
## Tutorial
A tutorial as well as explanations on what goes on inside combine can be found in [the wiki](https://github.com/Marwes/combine/wiki).
### Translation
[Japanese](https://github.com/sadnessOjisan/combine-ja)
## Links
[Documentation and examples](https://docs.rs/crate/combine)
[crates.io](https://crates.io/crates/combine)
## Features
* __Parse arbitrary streams__ - Combine can parse anything from `&[u8]` and `&str` to iterators and `Read` instances. If none of the builtin streams fit your use case you can even implement a couple traits your self to create your own custom [stream](https://docs.rs/combine/*/combine/stream/index.html)!
* __zero-copy parsing__ - When parsing in memory data, combine can parse without copying. See the [range module](https://docs.rs/combine/*/combine/parser/range/index.html) for parsers specialized for zero-copy parsing.
* __partial parsing__ - Combine parsers can be stopped at any point during parsing and later be resumed without losing any progress. This makes it possible to start parsing partial data coming from an io device such as a socket without worrying about if enough data is present to complete the parse. If more data is needed the parser will stop and may be resumed at the same point once more data is available. See the [async example](https://github.com/Marwes/combine/blob/master/examples/async.rs) for an example and [this post](https://marwes.github.io/2018/02/08/combine-3.html) for an introduction.
## About
A parser combinator is, broadly speaking, a function which takes several parsers as arguments and returns a new parser, created by combining those parsers. For instance, the [many](https://docs.rs/combine/*/combine/fn.many.html) parser takes one parser, `p`, as input and returns a new parser which applies `p` zero or more times. Thanks to the modularity that parser combinators gives it is possible to define parsers for a wide range of tasks without needing to implement the low level plumbing while still having the full power of Rust when you need it.
The library adheres to [semantic versioning](https://semver.org/).
If you end up trying it I welcome any feedback from your experience with it. I am usually reachable within a day by opening an issue, sending an email or posting a message on Gitter.
## FAQ
### Why does my errors contain inscrutable positions?
Since `combine` aims to crate parsers with little to no overhead, streams over `&str` and `&[T]` do not carry any extra position information, but instead, they only rely on comparing the pointer of the buffer to check which `Stream` is further ahead than another `Stream`. To retrieve a better position, either call `translate_position` on the `PointerOffset` which represents the position or wrap your stream with `State`.
### How does it compare to nom?
https://github.com/Marwes/combine/issues/73 contains discussion and links to comparisons to [nom](https://github.com/Geal/nom).
## Parsers written in combine
### Formats and protocols
* GraphQL https://github.com/graphql-rust/graphql-parser (Uses a custom tokenizer as input)
* DiffX https://github.com/brennie/diffx-rs
* Redis https://github.com/mitsuhiko/redis-rs/pull/141 (Uses partial parsing)
* Toml https://github.com/ordian/toml_edit
* Maker Interchange Format https://github.com/aidanhs/frametool (Uses combine as a lexer)
* Javascript https://github.com/freemasen/ress
* JPEG Metadata https://github.com/vadixidav/exifsd
### Miscellaneous
* Template language https://github.com/tailhook/trimmer
* Code exercises https://github.com/dgel/adventOfCode2017
* Programming language
* https://github.com/MaikKlein/spire-lang
* https://github.com/vadixidav/typeflow/tree/master/lang
* Query parser (+ more) https://github.com/mozilla/mentat
* Query parser https://github.com/tantivy-search/tantivy
## Extra
There is an additional crate which has parsers to lex and parse programming languages in [combine-language](https://github.com/Marwes/combine-language).
## Contributing
The easiest way to contribute is to just open an issue about any problems you encounter using combine but if you are interested in adding something to the library here is a list of some of the easier things to work on to get started.
* __Add additional parsers__ If you have a suggestion for another parser just open an issue or a PR with an implementation.
* __Add additional examples__ More examples for using combine will always be useful!
* __Add and improve the docs__ Not the fanciest of work but one cannot overstate the importance of good documentation.

86
vendor/combine/benches/buffers.rs vendored Normal file
View File

@@ -0,0 +1,86 @@
#![cfg(feature = "std")]
use {
combine::{
parser::{
byte::take_until_bytes,
combinator::{any_send_sync_partial_state, recognize, AnySendSyncPartialState},
},
Parser, RangeStream,
},
criterion::{black_box, criterion_group, criterion_main, Bencher, Criterion},
partial_io::{PartialOp, PartialRead},
std::io::Cursor,
};
fn test_data() -> Vec<u8> {
let mut input = vec![b' '; 5_000_000];
input.push(b'1');
input
}
fn parser<'a, I>() -> impl combine::Parser<I, Output = usize, PartialState = AnySendSyncPartialState>
where
I: RangeStream<Token = u8, Range = &'a [u8]>,
I::Error: combine::ParseError<u8, &'a [u8], I::Position>,
{
any_send_sync_partial_state(
recognize(take_until_bytes(&b"1"[..])).map(|spaces: Vec<u8>| spaces.len()),
)
}
fn bench_small_buf(bencher: &mut Bencher<'_>) {
let input = test_data();
let mut decoder = combine::stream::decoder::Decoder::new();
bencher.iter(|| {
let cursor = Cursor::new(&input);
let mut partial_read =
PartialRead::new(cursor, std::iter::repeat(PartialOp::Limited(1000)));
let mut ref_decoder = &mut decoder;
let result = combine::decode!(ref_decoder, partial_read, parser(), |input, _position| {
combine::easy::Stream::from(input)
},);
match result {
Ok(usize) => black_box(usize),
Err(err) => {
println!("{:?}", err);
panic!();
}
};
});
}
fn bench_big_buf(bencher: &mut Bencher<'_>) {
let input = test_data();
let mut decoder = combine::stream::decoder::Decoder::new();
bencher.iter(|| {
let cursor = Cursor::new(&input);
let mut partial_read = PartialRead::new(cursor, std::iter::repeat(PartialOp::Unlimited));
let mut ref_decoder = &mut decoder;
let result = combine::decode!(ref_decoder, partial_read, parser(), |input, _position| {
combine::easy::Stream::from(input)
},);
match result {
Ok(usize) => black_box(usize),
Err(err) => {
println!("{:?}", err);
panic!();
}
};
});
}
fn bench(c: &mut Criterion) {
c.bench_function("buffers_small", bench_small_buf);
c.bench_function("buffers_big", bench_big_buf);
}
criterion_group!(buffers, bench);
criterion_main!(buffers);

317
vendor/combine/benches/data.json vendored Normal file
View File

@@ -0,0 +1,317 @@
[
{
"_id": "54d38af178bf1fbfe80a59e5",
"index": 0,
"guid": "cc631fea-7bf2-4595-950c-097880a818bc",
"isActive": false,
"balance": "$3,397.06",
"picture": "http://placehold.it/32x32",
"age": 39,
"eyeColor": "brown",
"name": "Corina Tyler",
"gender": "female",
"company": "GEEKOL",
"email": "corinatyler@geekol.com",
"phone": "+1 (899) 452-2754",
"address": "608 Leonard Street, Yettem, Connecticut, 6707",
"about": "Ea dolore pariatur aliqua veniam officia est et fugiat ipsum do sunt mollit id aute. Nisi ad elit ut et et. Duis in aliquip id labore ex et laboris anim magna proident Lorem est ut. Consectetur sint aliqua eu exercitation anim cupidatat fugiat. Enim exercitation amet ex irure quis anim est fugiat et laborum.\r\n",
"registered": "2014-03-27T05:29:11 -01:00",
"latitude": -57.157547,
"longitude": -23.548119,
"tags": [
"est",
"est",
"exercitation",
"ipsum",
"tempor",
"id",
"aliqua"
],
"friends": [
{
"id": 0,
"name": "Bates Lynch"
},
{
"id": 1,
"name": "Duffy Townsend"
},
{
"id": 2,
"name": "Nelson Good"
}
],
"greeting": "Hello, Corina Tyler! You have 8 unread messages.",
"favoriteFruit": "apple"
},
{
"_id": "54d38af1c9a8d04d5e35f211",
"index": 1,
"guid": "d8dd5231-048f-4a5a-b392-8f7c3492ad15",
"isActive": false,
"balance": "$1,646.89",
"picture": "http://placehold.it/32x32",
"age": 22,
"eyeColor": "brown",
"name": "Santos Boyd",
"gender": "male",
"company": "AUTOGRATE",
"email": "santosboyd@autograte.com",
"phone": "+1 (872) 545-2605",
"address": "428 Homecrest Avenue, Century, Kentucky, 1905",
"about": "Quis cillum mollit adipisicing duis sunt. Eiusmod culpa reprehenderit proident magna laborum voluptate incididunt et ipsum in laboris consectetur. Nostrud consequat excepteur nisi magna officia.\r\n",
"registered": "2015-02-03T03:03:32 -01:00",
"latitude": 47.505093,
"longitude": -138.153509,
"tags": [
"aliquip",
"et",
"eu",
"minim",
"non",
"eiusmod",
"deserunt"
],
"friends": [
{
"id": 0,
"name": "Annmarie Larson"
},
{
"id": 1,
"name": "Cherie Potts"
},
{
"id": 2,
"name": "Catalina Bass"
}
],
"greeting": "Hello, Santos Boyd! You have 1 unread messages.",
"favoriteFruit": "apple"
},
{
"_id": "54d38af171fedb95c2841d64",
"index": 2,
"guid": "f4b131d7-871f-4503-8674-275c4c8e3121",
"isActive": false,
"balance": "$3,311.55",
"picture": "http://placehold.it/32x32",
"age": 23,
"eyeColor": "green",
"name": "Jeannie Daugherty",
"gender": "female",
"company": "COMTRAK",
"email": "jeanniedaugherty@comtrak.com",
"phone": "+1 (853) 445-3806",
"address": "189 Narrows Avenue, Cascades, Arkansas, 5738",
"about": "Velit labore mollit in sint culpa dolor consequat voluptate cupidatat ut. Laborum dolore incididunt deserunt adipisicing aliquip nisi cupidatat. Aliqua cillum consequat voluptate nulla velit deserunt cillum do reprehenderit cupidatat quis labore anim nulla. Irure do nisi et ea excepteur culpa mollit aliqua occaecat id dolore ullamco ad. Magna enim consectetur ea ullamco tempor magna eu consequat sint irure. In Lorem est id est do anim ex.\r\n",
"registered": "2014-01-29T22:48:53 -01:00",
"latitude": 72.451611,
"longitude": -89.847471,
"tags": [
"consequat",
"deserunt",
"ipsum",
"commodo",
"magna",
"ut",
"ut"
],
"friends": [
{
"id": 0,
"name": "Kenya Alford"
},
{
"id": 1,
"name": "Francesca Carpenter"
},
{
"id": 2,
"name": "Celina Petty"
}
],
"greeting": "Hello, Jeannie Daugherty! You have 3 unread messages.",
"favoriteFruit": "strawberry"
},
{
"_id": "54d38af16b1001dc2ee7f7b0",
"index": 3,
"guid": "c905d47f-8e7a-488d-a1a0-df61bf1af2eb",
"isActive": true,
"balance": "$2,311.19",
"picture": "http://placehold.it/32x32",
"age": 39,
"eyeColor": "green",
"name": "Jordan Horton",
"gender": "female",
"company": "ENERSOL",
"email": "jordanhorton@enersol.com",
"phone": "+1 (910) 467-2211",
"address": "123 Harwood Place, Northchase, Guam, 5138",
"about": "Cupidatat aliqua nisi minim ad culpa cupidatat proident fugiat veniam aliquip minim anim et. Ullamco consequat consequat aute exercitation aliqua eiusmod sunt ea. Et laborum dolor ex proident do non adipisicing nostrud voluptate qui reprehenderit elit ad nostrud. Excepteur exercitation laborum nulla laboris incididunt. Est sunt nisi eu id fugiat excepteur. In sunt laborum aliquip aute ipsum.\r\n",
"registered": "2014-10-08T23:32:53 -02:00",
"latitude": -52.78747,
"longitude": -121.825343,
"tags": [
"voluptate",
"non",
"dolor",
"nisi",
"minim",
"mollit",
"minim"
],
"friends": [
{
"id": 0,
"name": "Fields Ballard"
},
{
"id": 1,
"name": "Peterson Reese"
},
{
"id": 2,
"name": "Montoya Mccullough"
}
],
"greeting": "Hello, Jordan Horton! You have 3 unread messages.",
"favoriteFruit": "banana"
},
{
"_id": "54d38af1ae98fd5f8860a1a0",
"index": 4,
"guid": "d6f73551-9672-4f4a-b832-1d2942a53203",
"isActive": false,
"balance": "$1,178.62",
"picture": "http://placehold.it/32x32",
"age": 36,
"eyeColor": "green",
"name": "Mae Roy",
"gender": "female",
"company": "BUGSALL",
"email": "maeroy@bugsall.com",
"phone": "+1 (859) 452-3883",
"address": "266 Berriman Street, Logan, Rhode Island, 2566",
"about": "Adipisicing id incididunt ut excepteur officia incididunt enim quis dolor incididunt esse esse. Nulla laboris pariatur eiusmod veniam duis ipsum Lorem ex cupidatat do sunt commodo cillum. Laboris enim aute irure qui officia laborum. Veniam ullamco ad laboris nulla minim laboris ut ad minim non duis.\r\n",
"registered": "2014-07-31T12:08:03 -02:00",
"latitude": 49.234132,
"longitude": -86.652077,
"tags": [
"ut",
"mollit",
"duis",
"ea",
"cillum",
"in",
"ut"
],
"friends": [
{
"id": 0,
"name": "Acevedo Kent"
},
{
"id": 1,
"name": "Wood Edwards"
},
{
"id": 2,
"name": "Kris Brennan"
}
],
"greeting": "Hello, Mae Roy! You have 3 unread messages.",
"favoriteFruit": "strawberry"
},
{
"_id": "54d38af1638743947b6d15c2",
"index": 5,
"guid": "db331420-0216-4a92-93eb-23aa0f69a533",
"isActive": true,
"balance": "$2,375.71",
"picture": "http://placehold.it/32x32",
"age": 20,
"eyeColor": "blue",
"name": "Mooney Manning",
"gender": "male",
"company": "HOMELUX",
"email": "mooneymanning@homelux.com",
"phone": "+1 (865) 585-2829",
"address": "833 Madison Street, Mulino, Iowa, 249",
"about": "Et ex irure magna culpa fugiat magna exercitation laborum velit quis nostrud et minim in. Sint nulla laboris non non. Do excepteur dolor ipsum proident anim.\r\n",
"registered": "2014-12-23T16:46:55 -01:00",
"latitude": -37.873914,
"longitude": 11.797406,
"tags": [
"cupidatat",
"labore",
"culpa",
"esse",
"amet",
"nostrud",
"irure"
],
"friends": [
{
"id": 0,
"name": "Avila Lawrence"
},
{
"id": 1,
"name": "Wright Stokes"
},
{
"id": 2,
"name": "Joann Ramirez"
}
],
"greeting": "Hello, Mooney Manning! You have 8 unread messages.",
"favoriteFruit": "banana"
},
{
"_id": "54d38af187f8159129b215ef",
"index": 6,
"guid": "0a7a0751-3ed3-480e-8b87-082f440d4bde",
"isActive": false,
"balance": "$2,258.43",
"picture": "http://placehold.it/32x32",
"age": 26,
"eyeColor": "brown",
"name": "Sylvia Roberts",
"gender": "female",
"company": "ZOLARITY",
"email": "sylviaroberts@zolarity.com",
"phone": "+1 (852) 580-3720",
"address": "604 Boardwalk , Greensburg, Pennsylvania, 9876",
"about": "Amet mollit mollit nostrud dolor ut et. Eiusmod amet id nulla commodo qui ullamco pariatur nulla magna nisi proident. Irure aliquip eu excepteur incididunt nisi consectetur.\r\n",
"registered": "2014-09-20T02:58:52 -02:00",
"latitude": 3.989497,
"longitude": -111.05834,
"tags": [
"culpa",
"quis",
"commodo",
"laboris",
"consectetur",
"pariatur",
"enim"
],
"friends": [
{
"id": 0,
"name": "Logan Rivas"
},
{
"id": 1,
"name": "Bryan Sullivan"
},
{
"id": 2,
"name": "Bobbie Cleveland"
}
],
"greeting": "Hello, Sylvia Roberts! You have 9 unread messages.",
"favoriteFruit": "banana"
}
]

494
vendor/combine/benches/http-requests.txt vendored Normal file
View File

@@ -0,0 +1,494 @@
GET / HTTP/1.1
Host: www.reddit.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
GET /reddit.v_EZwRzV-Ns.css HTTP/1.1
Host: www.redditstatic.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: text/css,*/*;q=0.1
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://www.reddit.com/
GET /reddit-init.en-us.O1zuMqOOQvY.js HTTP/1.1
Host: www.redditstatic.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: */*
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://www.reddit.com/
GET /reddit.en-us.31yAfSoTsfo.js HTTP/1.1
Host: www.redditstatic.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: */*
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://www.reddit.com/
GET /kill.png HTTP/1.1
Host: www.redditstatic.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: image/png,image/*;q=0.8,*/*;q=0.5
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://www.reddit.com/
GET /icon.png HTTP/1.1
Host: www.redditstatic.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
GET /favicon.ico HTTP/1.1
Host: www.redditstatic.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
GET /AMZM4CWd6zstSC8y.jpg HTTP/1.1
Host: b.thumbs.redditmedia.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: image/png,image/*;q=0.8,*/*;q=0.5
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://www.reddit.com/
GET /jz1d5Nm0w97-YyNm.jpg HTTP/1.1
Host: b.thumbs.redditmedia.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: image/png,image/*;q=0.8,*/*;q=0.5
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://www.reddit.com/
GET /aWGO99I6yOcNUKXB.jpg HTTP/1.1
Host: a.thumbs.redditmedia.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: image/png,image/*;q=0.8,*/*;q=0.5
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://www.reddit.com/
GET /rZ_rD5TjrJM0E9Aj.css HTTP/1.1
Host: e.thumbs.redditmedia.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: text/css,*/*;q=0.1
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://www.reddit.com/
GET /tmsPwagFzyTvrGRx.jpg HTTP/1.1
Host: a.thumbs.redditmedia.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: image/png,image/*;q=0.8,*/*;q=0.5
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://www.reddit.com/
GET /KYgUaLvXCK3TCEJx.jpg HTTP/1.1
Host: a.thumbs.redditmedia.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: image/png,image/*;q=0.8,*/*;q=0.5
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://www.reddit.com/
GET /81pzxT5x2ozuEaxX.jpg HTTP/1.1
Host: e.thumbs.redditmedia.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: image/png,image/*;q=0.8,*/*;q=0.5
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://www.reddit.com/
GET /MFqCUiUVPO5V8t6x.jpg HTTP/1.1
Host: a.thumbs.redditmedia.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: image/png,image/*;q=0.8,*/*;q=0.5
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://www.reddit.com/
GET /TFpYTiAO5aEowokv.jpg HTTP/1.1
Host: e.thumbs.redditmedia.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: image/png,image/*;q=0.8,*/*;q=0.5
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://www.reddit.com/
GET /eMWMpmm9APNeNqcF.jpg HTTP/1.1
Host: e.thumbs.redditmedia.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: image/png,image/*;q=0.8,*/*;q=0.5
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://www.reddit.com/
GET /S-IpsJrOKuaK9GZ8.jpg HTTP/1.1
Host: c.thumbs.redditmedia.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: image/png,image/*;q=0.8,*/*;q=0.5
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://www.reddit.com/
GET /3V6dj9PDsNnheDXn.jpg HTTP/1.1
Host: c.thumbs.redditmedia.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: image/png,image/*;q=0.8,*/*;q=0.5
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://www.reddit.com/
GET /wQ3-VmNXhv8sg4SJ.jpg HTTP/1.1
Host: c.thumbs.redditmedia.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: image/png,image/*;q=0.8,*/*;q=0.5
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://www.reddit.com/
GET /ixd1C1njpczEWC22.jpg HTTP/1.1
Host: c.thumbs.redditmedia.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: image/png,image/*;q=0.8,*/*;q=0.5
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://www.reddit.com/
GET /nGsQj15VyOHMwmq8.jpg HTTP/1.1
Host: c.thumbs.redditmedia.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: image/png,image/*;q=0.8,*/*;q=0.5
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://www.reddit.com/
GET /zT4yQmDxQLbIxK1b.jpg HTTP/1.1
Host: c.thumbs.redditmedia.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: image/png,image/*;q=0.8,*/*;q=0.5
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://www.reddit.com/
GET /L5e1HcZLv1iu4nrG.jpg HTTP/1.1
Host: f.thumbs.redditmedia.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: image/png,image/*;q=0.8,*/*;q=0.5
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://www.reddit.com/
GET /WJFFPxD8X4JO_lIG.jpg HTTP/1.1
Host: f.thumbs.redditmedia.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: image/png,image/*;q=0.8,*/*;q=0.5
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://www.reddit.com/
GET /hVMVTDdjuY3bQox5.jpg HTTP/1.1
Host: f.thumbs.redditmedia.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: image/png,image/*;q=0.8,*/*;q=0.5
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://www.reddit.com/
GET /rnWf8CjBcyPQs5y_.jpg HTTP/1.1
Host: f.thumbs.redditmedia.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: image/png,image/*;q=0.8,*/*;q=0.5
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://www.reddit.com/
GET /gZJL1jNylKbGV4d-.jpg HTTP/1.1
Host: d.thumbs.redditmedia.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: image/png,image/*;q=0.8,*/*;q=0.5
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://www.reddit.com/
GET /aNd2zNRLXiMnKUFh.jpg HTTP/1.1
Host: c.thumbs.redditmedia.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: image/png,image/*;q=0.8,*/*;q=0.5
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://www.reddit.com/
GET /droparrowgray.gif HTTP/1.1
Host: www.redditstatic.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: image/png,image/*;q=0.8,*/*;q=0.5
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://www.redditstatic.com/reddit.v_EZwRzV-Ns.css
GET /sprite-reddit.an0Lnf61Ap4.png HTTP/1.1
Host: www.redditstatic.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: image/png,image/*;q=0.8,*/*;q=0.5
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://www.redditstatic.com/reddit.v_EZwRzV-Ns.css
GET /ga.js HTTP/1.1
Host: www.google-analytics.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: */*
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://www.reddit.com/
If-Modified-Since: Tue, 29 Oct 2013 19:33:51 GMT
GET /reddit/ads.html?sr=-reddit.com&bust2 HTTP/1.1
Host: static.adzerk.net
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://www.reddit.com/
GET /pixel/of_destiny.png?v=hOlmDALJCWWdjzfBV4ZxJPmrdCLWB%2Ftq7Z%2Ffp4Q%2FxXbVPPREuMJMVGzKraTuhhNWxCCwi6yFEZg%3D&r=783333388 HTTP/1.1
Host: pixel.redditmedia.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: image/png,image/*;q=0.8,*/*;q=0.5
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://www.reddit.com/
GET /UNcO-h_QcS9PD-Gn.jpg HTTP/1.1
Host: c.thumbs.redditmedia.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: image/png,image/*;q=0.8,*/*;q=0.5
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://e.thumbs.redditmedia.com/rZ_rD5TjrJM0E9Aj.css
GET /welcome-lines.png HTTP/1.1
Host: www.redditstatic.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: image/png,image/*;q=0.8,*/*;q=0.5
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://www.redditstatic.com/reddit.v_EZwRzV-Ns.css
GET /welcome-upvote.png HTTP/1.1
Host: www.redditstatic.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: image/png,image/*;q=0.8,*/*;q=0.5
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://www.redditstatic.com/reddit.v_EZwRzV-Ns.css
GET /__utm.gif?utmwv=5.5.1&utms=1&utmn=720496082&utmhn=www.reddit.com&utme=8(site*srpath*usertype*uitype)9(%20reddit.com*%20reddit.com-GET_listing*guest*web)11(3!2)&utmcs=UTF-8&utmsr=2560x1600&utmvp=1288x792&utmsc=24-bit&utmul=en-us&utmje=1&utmfl=13.0%20r0&utmdt=reddit%3A%20the%20front%20page%20of%20the%20internet&utmhid=2129416330&utmr=-&utmp=%2F&utmht=1400862512705&utmac=UA-12131688-1&utmcc=__utma%3D55650728.585571751.1400862513.1400862513.1400862513.1%3B%2B__utmz%3D55650728.1400862513.1.1.utmcsr%3D(direct)%7Cutmccn%3D(direct)%7Cutmcmd%3D(none)%3B&utmu=qR~ HTTP/1.1
Host: www.google-analytics.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: image/png,image/*;q=0.8,*/*;q=0.5
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://www.reddit.com/
GET /ImnpOQhbXUPkwceN.png HTTP/1.1
Host: a.thumbs.redditmedia.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: image/png,image/*;q=0.8,*/*;q=0.5
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://www.reddit.com/
GET /ajax/libs/jquery/1.7.1/jquery.min.js HTTP/1.1
Host: ajax.googleapis.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: */*
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://static.adzerk.net/reddit/ads.html?sr=-reddit.com&bust2
GET /__utm.gif?utmwv=5.5.1&utms=2&utmn=1493472678&utmhn=www.reddit.com&utmt=event&utme=5(AdBlock*enabled*false)(0)8(site*srpath*usertype*uitype)9(%20reddit.com*%20reddit.com-GET_listing*guest*web)11(3!2)&utmcs=UTF-8&utmsr=2560x1600&utmvp=1288x792&utmsc=24-bit&utmul=en-us&utmje=1&utmfl=13.0%20r0&utmdt=reddit%3A%20the%20front%20page%20of%20the%20internet&utmhid=2129416330&utmr=-&utmp=%2F&utmht=1400862512708&utmac=UA-12131688-1&utmni=1&utmcc=__utma%3D55650728.585571751.1400862513.1400862513.1400862513.1%3B%2B__utmz%3D55650728.1400862513.1.1.utmcsr%3D(direct)%7Cutmccn%3D(direct)%7Cutmcmd%3D(none)%3B&utmu=6R~ HTTP/1.1
Host: www.google-analytics.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: image/png,image/*;q=0.8,*/*;q=0.5
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://www.reddit.com/
GET /ados.js?q=43 HTTP/1.1
Host: secure.adzerk.net
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: */*
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://static.adzerk.net/reddit/ads.html?sr=-reddit.com&bust2
GET /fetch-trackers?callback=jQuery111005268222517967478_1400862512407&ids%5B%5D=t3_25jzeq-t8_k2ii&_=1400862512408 HTTP/1.1
Host: tracker.redditmedia.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: */*
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://www.reddit.com/
GET /ados?t=1400862512892&request={%22Placements%22:[{%22A%22:5146,%22S%22:24950,%22D%22:%22main%22,%22AT%22:5},{%22A%22:5146,%22S%22:24950,%22D%22:%22sponsorship%22,%22AT%22:8}],%22Keywords%22:%22-reddit.com%22,%22Referrer%22:%22http%3A%2F%2Fwww.reddit.com%2F%22,%22IsAsync%22:true,%22WriteResults%22:true} HTTP/1.1
Host: engine.adzerk.net
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: */*
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://static.adzerk.net/reddit/ads.html?sr=-reddit.com&bust2
GET /pixel/of_doom.png?id=t3_25jzeq-t8_k2ii&hash=da31d967485cdbd459ce1e9a5dde279fef7fc381&r=1738649500 HTTP/1.1
Host: pixel.redditmedia.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: image/png,image/*;q=0.8,*/*;q=0.5
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://www.reddit.com/
GET /Extensions/adFeedback.js HTTP/1.1
Host: static.adzrk.net
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: */*
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://static.adzerk.net/reddit/ads.html?sr=-reddit.com&bust2
GET /Extensions/adFeedback.css HTTP/1.1
Host: static.adzrk.net
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: text/css,*/*;q=0.1
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://static.adzerk.net/reddit/ads.html?sr=-reddit.com&bust2
GET /reddit/ads-load.html?bust2 HTTP/1.1
Host: static.adzerk.net
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://www.reddit.com/
GET /Advertisers/a774d7d6148046efa89403a8db635a81.jpg HTTP/1.1
Host: static.adzerk.net
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: image/png,image/*;q=0.8,*/*;q=0.5
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://static.adzerk.net/reddit/ads.html?sr=-reddit.com&bust2
GET /i.gif?e=eyJhdiI6NjIzNTcsImF0Ijo1LCJjbSI6MTE2MzUxLCJjaCI6Nzk4NCwiY3IiOjMzNzAxNSwiZGkiOiI4NmI2Y2UzYWM5NDM0MjhkOTk2ZTg4MjYwZDE5ZTE1YyIsImRtIjoxLCJmYyI6NDE2MTI4LCJmbCI6MjEwNDY0LCJrdyI6Ii1yZWRkaXQuY29tIiwibWsiOiItcmVkZGl0LmNvbSIsIm53Ijo1MTQ2LCJwYyI6MCwicHIiOjIwMzYyLCJydCI6MSwicmYiOiJodHRwOi8vd3d3LnJlZGRpdC5jb20vIiwic3QiOjI0OTUwLCJ1ayI6InVlMS01ZWIwOGFlZWQ5YTc0MDFjOTE5NWNiOTMzZWI3Yzk2NiIsInRzIjoxNDAwODYyNTkzNjQ1fQ&s=lwlbFf2Uywt7zVBFRj_qXXu7msY HTTP/1.1
Host: engine.adzerk.net
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: image/png,image/*;q=0.8,*/*;q=0.5
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://static.adzerk.net/reddit/ads.html?sr=-reddit.com&bust2
Cookie: azk=ue1-5eb08aeed9a7401c9195cb933eb7c966
GET /BurstingPipe/adServer.bs?cn=tf&c=19&mc=imp&pli=9994987&PluID=0&ord=1400862593644&rtu=-1 HTTP/1.1
Host: bs.serving-sys.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: image/png,image/*;q=0.8,*/*;q=0.5
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://static.adzerk.net/reddit/ads.html?sr=-reddit.com&bust2
GET /Advertisers/63cfd0044ffd49c0a71a6626f7a1d8f0.jpg HTTP/1.1
Host: static.adzerk.net
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: image/png,image/*;q=0.8,*/*;q=0.5
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://static.adzerk.net/reddit/ads-load.html?bust2
GET /BurstingPipe/adServer.bs?cn=tf&c=19&mc=imp&pli=9962555&PluID=0&ord=1400862593645&rtu=-1 HTTP/1.1
Host: bs.serving-sys.com
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: image/png,image/*;q=0.8,*/*;q=0.5
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://static.adzerk.net/reddit/ads-load.html?bust2
Cookie: S_9994987=6754579095859875029; A4=01fmFvgRnI09SF00000; u2=d1263d39-874b-4a89-86cd-a2ab0860ed4e3Zl040
GET /i.gif?e=eyJhdiI6NjIzNTcsImF0Ijo4LCJjbSI6MTE2MzUxLCJjaCI6Nzk4NCwiY3IiOjMzNzAxOCwiZGkiOiI3OTdlZjU3OWQ5NjE0ODdiODYyMGMyMGJkOTE4YzNiMSIsImRtIjoxLCJmYyI6NDE2MTMxLCJmbCI6MjEwNDY0LCJrdyI6Ii1yZWRkaXQuY29tIiwibWsiOiItcmVkZGl0LmNvbSIsIm53Ijo1MTQ2LCJwYyI6MCwicHIiOjIwMzYyLCJydCI6MSwicmYiOiJodHRwOi8vd3d3LnJlZGRpdC5jb20vIiwic3QiOjI0OTUwLCJ1ayI6InVlMS01ZWIwOGFlZWQ5YTc0MDFjOTE5NWNiOTMzZWI3Yzk2NiIsInRzIjoxNDAwODYyNTkzNjQ2fQ&s=OjzxzXAgQksbdQOHNm-bjZcnZPA HTTP/1.1
Host: engine.adzerk.net
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
Accept: image/png,image/*;q=0.8,*/*;q=0.5
Accept-Language: en-us,en;q=0.5
Accept-Encoding: gzip, deflate
Connection: keep-alive
Referer: http://static.adzerk.net/reddit/ads-load.html?bust2
Cookie: azk=ue1-5eb08aeed9a7401c9195cb933eb7c966
GET /subscribe?host_int=1042356184&ns_map=571794054_374233948806,464381511_13349283399&user_id=245722467&nid=1399334269710011966&ts=1400862514 HTTP/1.1
Host: notify8.dropbox.com
Accept-Encoding: identity
Connection: keep-alive
X-Dropbox-Locale: en_US
User-Agent: DropboxDesktopClient/2.7.54 (Macintosh; 10.8; ('i32',); en_US)

181
vendor/combine/benches/http.rs vendored Normal file
View File

@@ -0,0 +1,181 @@
#![cfg(feature = "std")]
#[macro_use]
extern crate criterion;
#[macro_use]
extern crate combine;
use std::fmt;
use {
combine::{
many, many1,
parser::range::{range, take_while1},
stream::easy,
token, ParseError, Parser, RangeStream,
},
criterion::{black_box, Bencher, Criterion},
};
#[allow(dead_code)]
#[derive(Debug)]
struct Request<'a> {
method: &'a [u8],
uri: &'a [u8],
version: &'a [u8],
}
#[allow(dead_code)]
#[derive(Debug)]
struct Header<'a> {
name: &'a [u8],
value: Vec<&'a [u8]>,
}
fn is_token(c: u8) -> bool {
!matches!(
c,
128..=255
| 0..=31
| b'('
| b')'
| b'<'
| b'>'
| b'@'
| b','
| b';'
| b':'
| b'\\'
| b'"'
| b'/'
| b'['
| b']'
| b'?'
| b'='
| b'{'
| b'}'
| b' '
)
}
fn is_horizontal_space(c: u8) -> bool {
c == b' ' || c == b'\t'
}
fn is_space(c: u8) -> bool {
c == b' '
}
fn is_not_space(c: u8) -> bool {
c != b' '
}
fn is_http_version(c: u8) -> bool {
(b'0'..=b'9').contains(&c) || c == b'.'
}
fn end_of_line<'a, Input>() -> impl Parser<Input, Output = u8>
where
Input: RangeStream<Token = u8, Range = &'a [u8]>,
{
(token(b'\r'), token(b'\n')).map(|_| b'\r').or(token(b'\n'))
}
fn message_header<'a, Input>() -> impl Parser<Input, Output = Header<'a>>
where
Input: RangeStream<Token = u8, Range = &'a [u8]>,
{
let message_header_line = (
take_while1(is_horizontal_space),
take_while1(|c| c != b'\r' && c != b'\n'),
end_of_line(),
)
.map(|(_, line, _)| line);
struct_parser!(Header {
name: take_while1(is_token),
_: token(b':'),
value: many1(message_header_line),
})
}
type HttpRequest<'a> = (Request<'a>, Vec<Header<'a>>);
fn parse_http_request<'a, Input>(input: Input) -> Result<(HttpRequest<'a>, Input), Input::Error>
where
Input: RangeStream<Token = u8, Range = &'a [u8]>,
{
let http_version = range(&b"HTTP/"[..]).with(take_while1(is_http_version));
let request_line = struct_parser!(Request {
method: take_while1(is_token),
_: take_while1(is_space),
uri: take_while1(is_not_space),
_: take_while1(is_space),
version: http_version,
});
let mut request = (
request_line,
end_of_line(),
many(message_header()),
end_of_line(),
)
.map(|(request, _, headers, _)| (request, headers));
request.parse(input)
}
static REQUESTS: &[u8] = include_bytes!("http-requests.txt");
fn http_requests_small(b: &mut Bencher<'_>) {
http_requests_bench(b, easy::Stream(REQUESTS))
}
fn http_requests_large(b: &mut Bencher<'_>) {
use std::iter;
let mut buffer = Vec::with_capacity(REQUESTS.len() * 5);
for buf in iter::repeat(REQUESTS).take(5) {
buffer.extend_from_slice(buf);
}
http_requests_bench(b, easy::Stream(&buffer[..]))
}
fn http_requests_large_cheap_error(b: &mut Bencher<'_>) {
use std::iter;
let mut buffer = Vec::with_capacity(REQUESTS.len() * 5);
for buf in iter::repeat(REQUESTS).take(5) {
buffer.extend_from_slice(buf);
}
http_requests_bench(b, &buffer[..])
}
fn http_requests_bench<'a, Input>(b: &mut Bencher<'_>, buffer: Input)
where
Input: RangeStream<Token = u8, Range = &'a [u8]> + Clone,
Input::Error: fmt::Debug,
{
b.iter(|| {
let mut buf = black_box(buffer.clone());
while buf.clone().uncons().is_ok() {
match parse_http_request(buf) {
Ok(((_, _), b)) => {
buf = b;
}
Err(err) => panic!("{:?}", err),
}
}
});
}
fn http_requests(c: &mut Criterion) {
c.bench_function("http_requests_small", http_requests_small);
c.bench_function("http_requests_large", http_requests_large);
c.bench_function(
"http_requests_large_cheap_error",
http_requests_large_cheap_error,
);
}
criterion_group!(http, http_requests,);
criterion_main!(http);

316
vendor/combine/benches/json.rs vendored Normal file
View File

@@ -0,0 +1,316 @@
#![cfg(feature = "std")]
#[macro_use]
extern crate criterion;
#[macro_use]
extern crate combine;
use std::{collections::HashMap, fs::File, io::Read, path::Path};
use {
combine::{
error::{Commit, ParseError},
parser::{
char::{char, digit, spaces, string},
choice::{choice, optional},
function::parser,
repeat::{many, many1, sep_by},
sequence::between,
token::{any, satisfy, satisfy_map},
},
stream::{
buffered,
position::{self, SourcePosition},
IteratorStream,
},
EasyParser, Parser, Stream, StreamOnce,
},
criterion::{black_box, Bencher, Criterion},
};
#[derive(PartialEq, Debug)]
enum Value {
Number(f64),
String(String),
Bool(bool),
Null,
Object(HashMap<String, Value>),
Array(Vec<Value>),
}
fn lex<Input, P>(p: P) -> impl Parser<Input, Output = P::Output>
where
P: Parser<Input>,
Input: Stream<Token = char>,
<Input as StreamOnce>::Error: ParseError<
<Input as StreamOnce>::Token,
<Input as StreamOnce>::Range,
<Input as StreamOnce>::Position,
>,
{
p.skip(spaces())
}
fn integer<Input>() -> impl Parser<Input, Output = i64>
where
Input: Stream<Token = char>,
{
lex(many1(digit()))
.map(|s: String| {
let mut n = 0;
for c in s.chars() {
n = n * 10 + (c as i64 - '0' as i64);
}
n
})
.expected("integer")
}
fn number<Input>() -> impl Parser<Input, Output = f64>
where
Input: Stream<Token = char>,
{
let i = char('0').map(|_| 0.0).or(integer().map(|x| x as f64));
let fractional = many(digit()).map(|digits: String| {
let mut magnitude = 1.0;
digits.chars().fold(0.0, |acc, d| {
magnitude /= 10.0;
match d.to_digit(10) {
Some(d) => acc + (d as f64) * magnitude,
None => panic!("Not a digit"),
}
})
});
let exp = satisfy(|c| c == 'e' || c == 'E').with(optional(char('-')).and(integer()));
lex(optional(char('-'))
.and(i)
.map(|(sign, n)| if sign.is_some() { -n } else { n })
.and(optional(char('.')).with(fractional))
.map(|(x, y)| if x >= 0.0 { x + y } else { x - y })
.and(optional(exp))
.map(|(n, exp_option)| match exp_option {
Some((sign, e)) => {
let e = if sign.is_some() { -e } else { e };
n * 10.0f64.powi(e as i32)
}
None => n,
}))
.expected("number")
}
fn json_char<Input>() -> impl Parser<Input, Output = char>
where
Input: Stream<Token = char>,
{
parser(|input: &mut Input| {
let (c, committed) = any().parse_lazy(input).into_result()?;
let mut back_slash_char = satisfy_map(|c| {
Some(match c {
'"' => '"',
'\\' => '\\',
'/' => '/',
'b' => '\u{0008}',
'f' => '\u{000c}',
'n' => '\n',
'r' => '\r',
't' => '\t',
_ => return None,
})
});
match c {
'\\' => committed.combine(|_| back_slash_char.parse_stream(input).into_result()),
'"' => Err(Commit::Peek(Input::Error::empty(input.position()).into())),
_ => Ok((c, committed)),
}
})
}
fn json_string<Input>() -> impl Parser<Input, Output = String>
where
Input: Stream<Token = char>,
{
between(char('"'), lex(char('"')), many(json_char())).expected("string")
}
fn object<Input>() -> impl Parser<Input, Output = Value>
where
Input: Stream<Token = char>,
{
let field = (json_string(), lex(char(':')), json_value()).map(|t| (t.0, t.2));
let fields = sep_by(field, lex(char(',')));
between(lex(char('{')), lex(char('}')), fields)
.map(Value::Object)
.expected("object")
}
#[inline]
fn json_value<Input>() -> impl Parser<Input, Output = Value>
where
Input: Stream<Token = char>,
{
json_value_()
}
// We need to use `parser!` to break the recursive use of `value` to prevent the returned parser
// from containing itself
parser! {
#[inline]
fn json_value_[Input]()(Input) -> Value
where [ Input: Stream<Token = char> ]
{
let array = between(
lex(char('[')),
lex(char(']')),
sep_by(json_value(), lex(char(','))),
).map(Value::Array);
choice((
json_string().map(Value::String),
object(),
array,
number().map(Value::Number),
lex(string("false").map(|_| Value::Bool(false))),
lex(string("true").map(|_| Value::Bool(true))),
lex(string("null").map(|_| Value::Null)),
))
}
}
#[test]
fn json_test() {
use self::Value::*;
let input = r#"{
"array": [1, ""],
"object": {},
"number": 3.14,
"small_number": 0.59,
"int": -100,
"exp": -1e2,
"exp_neg": 23e-2,
"true": true,
"false" : false,
"null" : null
}"#;
let result = json_value().easy_parse(input);
let expected = Object(
vec![
("array", Array(vec![Number(1.0), String("".to_string())])),
("object", Object(HashMap::new())),
("number", Number(3.14)),
("small_number", Number(0.59)),
("int", Number(-100.)),
("exp", Number(-1e2)),
("exp_neg", Number(23E-2)),
("true", Bool(true)),
("false", Bool(false)),
("null", Null),
]
.into_iter()
.map(|(k, v)| (k.to_string(), v))
.collect(),
);
match result {
Ok(result) => assert_eq!(result, (expected, "")),
Err(e) => {
println!("{}", e);
panic!();
}
}
}
fn test_data() -> String {
let mut data = String::new();
File::open(&Path::new(&"benches/data.json"))
.and_then(|mut file| file.read_to_string(&mut data))
.unwrap();
data
}
fn bench_json(bencher: &mut Bencher<'_>) {
let data = test_data();
let mut parser = json_value();
match parser.easy_parse(position::Stream::new(&data[..])) {
Ok((Value::Array(_), _)) => (),
Ok(_) => panic!(),
Err(err) => {
println!("{}", err);
panic!();
}
}
bencher.iter(|| {
let result = parser.easy_parse(position::Stream::new(&data[..]));
black_box(result)
});
}
fn bench_json_core_error(bencher: &mut Bencher<'_>) {
let data = test_data();
let mut parser = json_value();
match parser.parse(position::Stream::new(&data[..])) {
Ok((Value::Array(_), _)) => (),
Ok(_) => panic!(),
Err(err) => {
println!("{}", err);
panic!();
}
}
bencher.iter(|| {
let result = parser.parse(position::Stream::new(&data[..]));
black_box(result)
});
}
fn bench_json_core_error_no_position(bencher: &mut Bencher<'_>) {
let data = test_data();
let mut parser = json_value();
match parser.parse(&data[..]) {
Ok((Value::Array(_), _)) => (),
Ok(_) => panic!(),
Err(err) => {
println!("{}", err);
panic!();
}
}
bencher.iter(|| {
let result = parser.parse(&data[..]);
black_box(result)
});
}
fn bench_buffered_json(bencher: &mut Bencher<'_>) {
let data = test_data();
bencher.iter(|| {
let buffer =
buffered::Stream::new(position::Stream::new(IteratorStream::new(data.chars())), 1);
let mut parser = json_value();
match parser.easy_parse(position::Stream::with_positioner(
buffer,
SourcePosition::default(),
)) {
Ok((Value::Array(v), _)) => {
black_box(v);
}
Ok(_) => panic!(),
Err(err) => {
println!("{}", err);
panic!();
}
}
});
}
fn bench(c: &mut Criterion) {
c.bench_function("json", bench_json);
c.bench_function("json_core_error", bench_json_core_error);
c.bench_function(
"json_core_error_no_position",
bench_json_core_error_no_position,
);
c.bench_function("buffered_json", bench_buffered_json);
}
criterion_group!(json, bench);
criterion_main!(json);

86
vendor/combine/benches/mp4.rs vendored Normal file
View File

@@ -0,0 +1,86 @@
#![cfg(feature = "mp4")]
#[macro_use]
extern crate criterion;
use std::{fs::File, io::Read, str::from_utf8};
use {
combine::{
parser::{
byte::num::be_u32,
range::{range, take},
},
stream::easy::ParseError,
*,
},
criterion::{black_box, Bencher, Criterion},
};
#[derive(Clone, PartialEq, Eq, Debug)]
struct FileType<'a> {
major_brand: &'a str,
major_brand_version: &'a [u8],
compatible_brands: Vec<&'a str>,
}
#[derive(Clone, Debug)]
enum MP4Box<'a> {
Ftyp(FileType<'a>),
Moov,
Mdat,
Free,
Skip,
Wide,
Unknown,
}
fn parse_mp4(data: &[u8]) -> Result<(Vec<MP4Box>, &[u8]), ParseError<&[u8]>> {
let brand_name = || take(4).and_then(from_utf8);
let filetype_box = (
range(&b"ftyp"[..]),
brand_name(),
take(4),
many(brand_name()),
)
.map(|(_, m, v, c)| {
MP4Box::Ftyp(FileType {
major_brand: m,
major_brand_version: v,
compatible_brands: c,
})
});
let mp4_box = be_u32().then(|offset| take(offset as usize - 4));
let mut box_parser = choice((
filetype_box,
range(&b"moov"[..]).map(|_| MP4Box::Moov),
range(&b"mdat"[..]).map(|_| MP4Box::Mdat),
range(&b"free"[..]).map(|_| MP4Box::Free),
range(&b"skip"[..]).map(|_| MP4Box::Skip),
range(&b"wide"[..]).map(|_| MP4Box::Wide),
value(MP4Box::Unknown),
));
let data_interpreter =
mp4_box.flat_map(|box_data| box_parser.easy_parse(box_data).map(|t| t.0));
many(data_interpreter).easy_parse(data)
}
fn run_test(b: &mut Bencher, data: &[u8]) {
b.iter(|| match parse_mp4(data) {
Ok(x) => black_box(x),
Err(err) => panic!("{}", err.map_range(|bytes| format!("{:?}", bytes))),
});
}
fn mp4_small_test(c: &mut Criterion) {
let mut mp4_small = Vec::new();
File::open("benches/small.mp4")
.and_then(|mut f| f.read_to_end(&mut mp4_small))
.expect("Unable to read benches/small.mp4");
c.bench_function("mp4_small", move |b| run_test(b, &mp4_small));
}
criterion_group!(mp4, mp4_small_test);
criterion_main!(mp4);

18
vendor/combine/ci.sh vendored Executable file
View File

@@ -0,0 +1,18 @@
#!/bin/bash -x
set -ex
cargo "$@" build
cargo "$@" test --all-features
cargo "$@" test --all-features --examples
cargo "$@" test --bench json --bench http -- --test
cargo "$@" check --bench mp4 --features mp4
cargo "$@" build --no-default-features --features alloc
cargo "$@" test --no-default-features --features alloc --examples
cargo "$@" build --no-default-features
cargo "$@" test --no-default-features --examples
cargo "$@" check --no-default-features --features tokio-02
cargo "$@" check --no-default-features --features tokio-03

187
vendor/combine/examples/async.rs vendored Normal file
View File

@@ -0,0 +1,187 @@
#![cfg(feature = "std")]
#![cfg(feature = "tokio")]
use std::{cell::Cell, io::Cursor, rc::Rc, str};
use {futures_03_dep as futures, tokio_dep as tokio};
use {
bytes::{Buf, BytesMut},
combine::{
error::{ParseError, StreamError},
parser::{
byte::digit,
combinator::{any_partial_state, AnyPartialState},
range::{range, recognize, take},
},
skip_many, skip_many1,
stream::{easy, PartialStream, RangeStream, StreamErrorFor},
Parser,
},
futures::prelude::*,
partial_io::PartialOp,
tokio_util::codec::{Decoder, FramedRead},
};
// Workaround partial_io not working with tokio-0.2
#[path = "../tests/support/mod.rs"]
mod support;
use support::*;
pub struct LanguageServerDecoder {
state: AnyPartialState,
content_length_parses: Rc<Cell<i32>>,
}
impl Default for LanguageServerDecoder {
fn default() -> Self {
LanguageServerDecoder {
state: Default::default(),
content_length_parses: Rc::new(Cell::new(0)),
}
}
}
/// Parses blocks of data with length headers
///
/// ```
/// Content-Length: 18
///
/// { "some": "data" }
/// ```
// The `content_length_parses` parameter only exists to demonstrate that `content_length` only
// gets parsed once per message
fn decode_parser<'a, Input>(
content_length_parses: Rc<Cell<i32>>,
) -> impl Parser<Input, Output = Vec<u8>, PartialState = AnyPartialState> + 'a
where
Input: RangeStream<Token = u8, Range = &'a [u8]> + 'a,
{
let content_length = range(&b"Content-Length: "[..])
.with(recognize(skip_many1(digit())).and_then(|digits: &[u8]| {
str::from_utf8(digits)
.unwrap()
.parse::<usize>()
// Convert the error from `.parse` into an error combine understands
.map_err(StreamErrorFor::<Input>::other)
}))
.map(move |x| {
content_length_parses.set(content_length_parses.get() + 1);
x
});
// `any_partial_state` boxes the state which hides the type and lets us store it in
// `self`
any_partial_state(
(
skip_many(range(&b"\r\n"[..])),
content_length,
range(&b"\r\n\r\n"[..]).map(|_| ()),
)
.then_partial(|&mut (_, message_length, _)| {
take(message_length).map(|bytes: &[u8]| bytes.to_owned())
}),
)
}
impl Decoder for LanguageServerDecoder {
type Item = String;
type Error = Box<dyn std::error::Error + Send + Sync>;
fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {
println!("Decoding `{:?}`", str::from_utf8(src).unwrap_or("NOT UTF8"));
let (opt, removed_len) = combine::stream::decode(
decode_parser(self.content_length_parses.clone()),
// easy::Stream gives us nice error messages
// (the same error messages that combine has had since its inception)
// PartialStream lets the parser know that more input should be
// expected if end of input is unexpectedly reached
&mut easy::Stream(PartialStream(&src[..])),
&mut self.state,
)
.map_err(|err| {
// Since err contains references into `src` we must replace these before
// we can return an error or call `advance` to remove the input we
// just committed
let err = err
.map_range(|r| {
str::from_utf8(r)
.ok()
.map_or_else(|| format!("{:?}", r), |s| s.to_string())
})
.map_position(|p| p.translate_position(&src[..]));
format!("{}\nIn input: `{}`", err, str::from_utf8(src).unwrap())
})?;
println!(
"Accepted {} bytes: `{:?}`",
removed_len,
str::from_utf8(&src[..removed_len]).unwrap_or("NOT UTF8")
);
// Remove the input we just committed.
// Ideally this would be done automatically by the call to
// `stream::decode` but it does unfortunately not work due
// to lifetime issues (Non lexical lifetimes might fix it!)
src.advance(removed_len);
match opt {
// `None` means we did not have enough input and we require that the
// caller of `decode` supply more before calling us again
None => {
println!("Requesting more input!");
Ok(None)
}
// `Some` means that a message was successfully decoded
// (and that we are ready to start decoding the next message)
Some(output) => {
let value = String::from_utf8(output)?;
println!("Decoded `{}`", value);
Ok(Some(value))
}
}
}
}
#[tokio::main]
async fn main() {
let input = "Content-Length: 6\r\n\
\r\n\
123456\r\n\
Content-Length: 4\r\n\
\r\n\
true";
let seq = vec![
PartialOp::Limited(20),
PartialOp::Limited(1),
PartialOp::Limited(2),
PartialOp::Limited(3),
];
let reader = &mut Cursor::new(input.as_bytes());
// Using the `partial_io` crate we emulate the partial reads that would happen when reading
// asynchronously from an io device.
let partial_reader = PartialAsyncRead::new(reader, seq);
let decoder = LanguageServerDecoder::default();
let content_length_parses = decoder.content_length_parses.clone();
let result = FramedRead::new(partial_reader, decoder).try_collect().await;
assert!(result.as_ref().is_ok(), "{}", result.unwrap_err());
let values: Vec<_> = result.unwrap();
let expected_values = ["123456", "true"];
assert_eq!(values, expected_values);
assert_eq!(content_length_parses.get(), expected_values.len() as i32);
println!("Successfully parsed: `{}`", input);
println!(
"Found {} items and never repeated a completed parse!",
values.len(),
);
println!("Result: {:?}", values);
}

228
vendor/combine/examples/date.rs vendored Normal file
View File

@@ -0,0 +1,228 @@
//! Parser example for ISO8601 dates. This does not handle the entire specification but it should
//! show the gist of it and be easy to extend to parse additional forms.
use std::{
env, fmt,
fs::File,
io::{self, Read},
};
use combine::{
choice,
many, optional,
parser::char::{char, digit},
stream::position,
Parser, Stream,
};
#[cfg(feature = "std")]
use combine::{
stream::{easy, position::SourcePosition},
EasyParser,
};
enum Error<E> {
Io(io::Error),
Parse(E),
}
impl<E> fmt::Display for Error<E>
where
E: fmt::Display,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
Error::Io(ref err) => write!(f, "{}", err),
Error::Parse(ref err) => write!(f, "{}", err),
}
}
}
#[derive(PartialEq, Debug)]
pub struct Date {
pub year: i32,
pub month: i32,
pub day: i32,
}
#[derive(PartialEq, Debug)]
pub struct Time {
pub hour: i32,
pub minute: i32,
pub second: i32,
pub time_zone: i32,
}
#[derive(PartialEq, Debug)]
pub struct DateTime {
pub date: Date,
pub time: Time,
}
fn two_digits<Input>() -> impl Parser<Input, Output = i32>
where
Input: Stream<Token = char>,
{
(digit(), digit()).map(|(x, y): (char, char)| {
let x = x.to_digit(10).expect("digit");
let y = y.to_digit(10).expect("digit");
(x * 10 + y) as i32
})
}
/// Parses a time zone
/// +0012
/// -06:30
/// -01
/// Z
fn time_zone<Input>() -> impl Parser<Input, Output = i32>
where
Input: Stream<Token = char>,
{
let utc = char('Z').map(|_| 0);
let offset = (
choice([char('-'), char('+')]),
two_digits(),
optional(optional(char(':')).with(two_digits())),
)
.map(|(sign, hour, minute)| {
let offset = hour * 60 + minute.unwrap_or(0);
if sign == '-' {
-offset
} else {
offset
}
});
utc.or(offset)
}
/// Parses a date
/// 2010-01-30
fn date<Input>() -> impl Parser<Input, Output = Date>
where
Input: Stream<Token = char>,
{
(
many::<String, _, _>(digit()),
char('-'),
two_digits(),
char('-'),
two_digits(),
)
.map(|(year, _, month, _, day)| {
// Its ok to just unwrap since we only parsed digits
Date {
year: year.parse().unwrap(),
month,
day,
}
})
}
/// Parses a time
/// 12:30:02
fn time<Input>() -> impl Parser<Input, Output = Time>
where
Input: Stream<Token = char>,
{
(
two_digits(),
char(':'),
two_digits(),
char(':'),
two_digits(),
time_zone(),
)
.map(|(hour, _, minute, _, second, time_zone)| {
// Its ok to just unwrap since we only parsed digits
Time {
hour,
minute,
second,
time_zone,
}
})
}
/// Parses a date time according to ISO8601
/// 2015-08-02T18:54:42+02
fn date_time<Input>() -> impl Parser<Input, Output = DateTime>
where
Input: Stream<Token = char>,
{
(date(), char('T'), time()).map(|(date, _, time)| DateTime { date, time })
}
#[test]
fn test() {
// A parser for
let result = date_time().parse("2015-08-02T18:54:42+02");
let d = DateTime {
date: Date {
year: 2015,
month: 8,
day: 2,
},
time: Time {
hour: 18,
minute: 54,
second: 42,
time_zone: 2 * 60,
},
};
assert_eq!(result, Ok((d, "")));
let result = date_time().parse("50015-12-30T08:54:42Z");
let d = DateTime {
date: Date {
year: 50015,
month: 12,
day: 30,
},
time: Time {
hour: 8,
minute: 54,
second: 42,
time_zone: 0,
},
};
assert_eq!(result, Ok((d, "")));
}
fn main() {
let result = match env::args().nth(1) {
Some(file) => File::open(file).map_err(Error::Io).and_then(main_),
None => main_(io::stdin()),
};
match result {
Ok(_) => println!("OK"),
Err(err) => println!("{}", err),
}
}
#[cfg(feature = "std")]
fn main_<R>(mut read: R) -> Result<(), Error<easy::Errors<char, String, SourcePosition>>>
where
R: Read,
{
let mut text = String::new();
read.read_to_string(&mut text).map_err(Error::Io)?;
date_time()
.easy_parse(position::Stream::new(&*text))
.map_err(|err| Error::Parse(err.map_range(|s| s.to_string())))?;
Ok(())
}
#[cfg(not(feature = "std"))]
fn main_<R>(mut read: R) -> Result<(), Error<::combine::error::StringStreamError>>
where
R: Read,
{
let mut text = String::new();
read.read_to_string(&mut text).map_err(Error::Io)?;
date_time()
.parse(position::Stream::new(&*text))
.map_err(Error::Parse)?;
Ok(())
}

173
vendor/combine/examples/ini.rs vendored Normal file
View File

@@ -0,0 +1,173 @@
//! Parser example for INI files.
use std::{
collections::HashMap,
env, fmt,
fs::File,
io::{self, Read},
};
use combine::{parser::char::space, stream::position, *};
#[cfg(feature = "std")]
use combine::stream::easy;
#[cfg(feature = "std")]
use combine::stream::position::SourcePosition;
enum Error<E> {
Io(io::Error),
Parse(E),
}
impl<E> fmt::Display for Error<E>
where
E: fmt::Display,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
Error::Io(ref err) => write!(f, "{}", err),
Error::Parse(ref err) => write!(f, "{}", err),
}
}
}
#[derive(PartialEq, Debug)]
pub struct Ini {
pub global: HashMap<String, String>,
pub sections: HashMap<String, HashMap<String, String>>,
}
fn property<Input>() -> impl Parser<Input, Output = (String, String)>
where
Input: Stream<Token = char>,
{
(
many1(satisfy(|c| c != '=' && c != '[' && c != ';')),
token('='),
many1(satisfy(|c| c != '\n' && c != ';')),
)
.map(|(key, _, value)| (key, value))
.message("while parsing property")
}
fn whitespace<Input>() -> impl Parser<Input>
where
Input: Stream<Token = char>,
{
let comment = (token(';'), skip_many(satisfy(|c| c != '\n'))).map(|_| ());
// Wrap the `spaces().or(comment)` in `skip_many` so that it skips alternating whitespace and
// comments
skip_many(skip_many1(space()).or(comment))
}
fn properties<Input>() -> impl Parser<Input, Output = HashMap<String, String>>
where
Input: Stream<Token = char>,
{
// After each property we skip any whitespace that followed it
many(property().skip(whitespace()))
}
fn section<Input>() -> impl Parser<Input, Output = (String, HashMap<String, String>)>
where
Input: Stream<Token = char>,
{
(
between(token('['), token(']'), many(satisfy(|c| c != ']'))),
whitespace(),
properties(),
)
.map(|(name, _, properties)| (name, properties))
.message("while parsing section")
}
fn ini<Input>() -> impl Parser<Input, Output = Ini>
where
Input: Stream<Token = char>,
{
(whitespace(), properties(), many(section()))
.map(|(_, global, sections)| Ini { global, sections })
}
#[test]
fn ini_ok() {
let text = r#"
language=rust
[section]
name=combine; Comment
type=LL(1)
"#;
let mut expected = Ini {
global: HashMap::new(),
sections: HashMap::new(),
};
expected
.global
.insert(String::from("language"), String::from("rust"));
let mut section = HashMap::new();
section.insert(String::from("name"), String::from("combine"));
section.insert(String::from("type"), String::from("LL(1)"));
expected.sections.insert(String::from("section"), section);
let result = ini().parse(text).map(|t| t.0);
assert_eq!(result, Ok(expected));
}
#[cfg(feature = "std")]
#[test]
fn ini_error() {
let text = "[error";
let result = ini().easy_parse(position::Stream::new(text)).map(|t| t.0);
assert_eq!(
result,
Err(easy::Errors {
position: SourcePosition { line: 1, column: 7 },
errors: vec![
easy::Error::end_of_input(),
easy::Error::Expected(']'.into()),
easy::Error::Message("while parsing section".into()),
],
})
);
}
fn main() {
let result = match env::args().nth(1) {
Some(file) => File::open(file).map_err(Error::Io).and_then(main_),
None => main_(io::stdin()),
};
match result {
Ok(_) => println!("OK"),
Err(err) => println!("{}", err),
}
}
#[cfg(feature = "std")]
fn main_<R>(mut read: R) -> Result<(), Error<easy::Errors<char, String, SourcePosition>>>
where
R: Read,
{
let mut text = String::new();
read.read_to_string(&mut text).map_err(Error::Io)?;
ini()
.easy_parse(position::Stream::new(&*text))
.map_err(|err| Error::Parse(err.map_range(|s| s.to_string())))?;
Ok(())
}
#[cfg(not(feature = "std"))]
fn main_<R>(mut read: R) -> Result<(), Error<::combine::error::StringStreamError>>
where
R: Read,
{
let mut text = String::new();
read.read_to_string(&mut text).map_err(Error::Io)?;
ini()
.parse(position::Stream::new(&*text))
.map_err(Error::Parse)?;
Ok(())
}

33
vendor/combine/examples/number.rs vendored Normal file
View File

@@ -0,0 +1,33 @@
#![cfg_attr(not(feature = "std"), no_std)]
#[cfg(not(feature = "std"))]
use core::str;
#[cfg(feature = "std")]
use std::str;
use combine::{
error::UnexpectedParse,
parser::{
byte::digit,
choice::optional,
range::recognize,
repeat::{skip_many, skip_many1},
token::token,
},
Parser,
};
fn main() {
let mut parser = recognize((
skip_many1(digit()),
optional((token(b'.'), skip_many(digit()))),
))
.and_then(|bs: &[u8]| {
// `bs` only contains digits which are ascii and thus UTF-8
let s = unsafe { str::from_utf8_unchecked(bs) };
s.parse::<f64>().map_err(|_| UnexpectedParse::Unexpected)
});
let result = parser.parse(&b"123.45"[..]);
assert_eq!(result, Ok((123.45, &b""[..])));
}

18
vendor/combine/examples/readme.rs vendored Normal file
View File

@@ -0,0 +1,18 @@
use combine::{
many1,
parser::char::{letter, space},
sep_by, Parser,
};
#[test]
fn readme() {
main();
}
fn main() {
let word = many1(letter());
let mut parser = sep_by(word, space()).map(|mut words: Vec<String>| words.pop());
let result = parser.parse("Pick up that word!");
assert_eq!(result, Ok((Some("word".to_string()), "!")));
}

12
vendor/combine/release.sh vendored Executable file
View File

@@ -0,0 +1,12 @@
#!/bin/bash
VERSION=$1
if [ -z "$VERSION" ]; then
echo "Expected patch, minor or major"
exit 1
fi
clog --$VERSION && \
git add CHANGELOG.md && \
git commit -m "Updated changelog" && \
cargo release --execute $VERSION

1
vendor/combine/rustfmt.toml vendored Normal file
View File

@@ -0,0 +1 @@
edition = "2018"

1100
vendor/combine/src/error.rs vendored Normal file

File diff suppressed because it is too large Load Diff

29
vendor/combine/src/future_ext.rs vendored Normal file
View File

@@ -0,0 +1,29 @@
use crate::lib::future::Future;
use crate::lib::marker::Unpin;
use crate::lib::pin::Pin;
use crate::lib::task::{Context, Poll};
// Replace usage of this with std::future::poll_fn once it stabilizes
pub struct PollFn<F> {
f: F,
}
impl<F> Unpin for PollFn<F> {}
pub fn poll_fn<T, F>(f: F) -> PollFn<F>
where
F: FnMut(&mut Context<'_>) -> Poll<T>,
{
PollFn { f }
}
impl<T, F> Future for PollFn<F>
where
F: FnMut(&mut Context<'_>) -> Poll<T>,
{
type Output = T;
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<T> {
(&mut self.f)(cx)
}
}

1006
vendor/combine/src/lib.rs vendored Normal file

File diff suppressed because it is too large Load Diff

661
vendor/combine/src/parser/byte.rs vendored Normal file
View File

@@ -0,0 +1,661 @@
//! Module containing parsers specialized on byte streams.
use crate::{
error::{self, ParseResult::*},
parser::{
combinator::no_partial,
range::{take_fn, TakeRange},
repeat::skip_many,
token::{satisfy, token, tokens_cmp, Token},
},
stream::{RangeStream, Stream},
Parser,
};
/// Parses a byte and succeeds if the byte is equal to `c`.
///
/// ```
/// use combine::Parser;
/// use combine::parser::byte::byte;
/// assert_eq!(byte(b'!').parse(&b"!"[..]), Ok((b'!', &b""[..])));
/// assert!(byte(b'A').parse(&b""[..]).is_err());
/// assert!(byte(b'A').parse(&b"!"[..]).is_err());
/// ```
pub fn byte<Input>(c: u8) -> Token<Input>
where
Input: Stream<Token = u8>,
{
token(c)
}
macro_rules! byte_parser {
($name:ident, $ty:ident, $f: ident) => {{
satisfy(|c: u8| c.$f())
.expected(stringify!($name))
}};
($name:ident, $ty:ident, $f: ident $($args:tt)+) => {{
satisfy(|c: u8| c.$f $($args)+)
.expected(stringify!($name))
}};
}
/// Parses a base-10 digit (09).
///
/// ```
/// use combine::Parser;
/// use combine::parser::byte::digit;
/// assert_eq!(digit().parse(&b"9"[..]), Ok((b'9', &b""[..])));
/// assert!(digit().parse(&b"A"[..]).is_err());
/// ```
pub fn digit<Input>() -> impl Parser<Input, Output = u8, PartialState = ()>
where
Input: Stream<Token = u8>,
{
byte_parser!(digit, Digit, is_ascii_digit())
}
/// Parses a `b' '`, `b'\t'`, `b'\n'` or `'b\'r'`.
///
/// ```
/// use combine::Parser;
/// use combine::parser::byte::space;
/// assert_eq!(space().parse(&b" "[..]), Ok((b' ', &b""[..])));
/// assert_eq!(space().parse(&b" "[..]), Ok((b' ', &b" "[..])));
/// assert!(space().parse(&b"!"[..]).is_err());
/// assert!(space().parse(&b""[..]).is_err());
/// ```
pub fn space<Input>() -> impl Parser<Input, Output = u8, PartialState = ()>
where
Input: Stream<Token = u8>,
{
byte_parser!(space, Space, is_ascii_whitespace)
}
/// Skips over [`space`] zero or more times
///
/// [`space`]: fn.space.html
///
/// ```
/// use combine::Parser;
/// use combine::parser::byte::spaces;
/// assert_eq!(spaces().parse(&b""[..]), Ok(((), &b""[..])));
/// assert_eq!(spaces().parse(&b" "[..]), Ok(((), &b""[..])));
/// ```
pub fn spaces<Input>() -> impl Parser<Input, Output = ()>
where
Input: Stream<Token = u8>,
{
skip_many(space()).expected("whitespaces")
}
/// Parses a newline byte (`b'\n'`).
///
/// ```
/// use combine::Parser;
/// use combine::parser::byte::newline;
/// assert_eq!(newline().parse(&b"\n"[..]), Ok((b'\n', &b""[..])));
/// assert!(newline().parse(&b"\r"[..]).is_err());
/// ```
pub fn newline<Input>() -> impl Parser<Input, Output = u8, PartialState = ()>
where
Input: Stream<Token = u8>,
{
satisfy(|ch: u8| ch == b'\n').expected("lf newline")
}
/// Parses carriage return and newline (`&b"\r\n"`), returning the newline byte.
///
/// ```
/// use combine::Parser;
/// use combine::parser::byte::crlf;
/// assert_eq!(crlf().parse(&b"\r\n"[..]), Ok((b'\n', &b""[..])));
/// assert!(crlf().parse(&b"\r"[..]).is_err());
/// assert!(crlf().parse(&b"\n"[..]).is_err());
/// ```
pub fn crlf<Input>() -> impl Parser<Input, Output = u8, PartialState = ()>
where
Input: Stream<Token = u8>,
{
no_partial(satisfy(|ch: u8| ch == b'\r').with(newline())).expected("crlf newline")
}
/// Parses a tab byte (`b'\t'`).
///
/// ```
/// use combine::Parser;
/// use combine::parser::byte::tab;
/// assert_eq!(tab().parse(&b"\t"[..]), Ok((b'\t', &b""[..])));
/// assert!(tab().parse(&b" "[..]).is_err());
/// ```
pub fn tab<Input>() -> impl Parser<Input, Output = u8, PartialState = ()>
where
Input: Stream<Token = u8>,
{
satisfy(|ch| ch == b'\t').expected("tab")
}
/// Parses an uppercase ASCII letter (AZ).
///
/// ```
/// use combine::Parser;
/// use combine::parser::byte::upper;
/// assert_eq!(upper().parse(&b"A"[..]), Ok((b'A', &b""[..])));
/// assert!(upper().parse(&b"a"[..]).is_err());
/// ```
pub fn upper<Input>() -> impl Parser<Input, Output = u8, PartialState = ()>
where
Input: Stream<Token = u8>,
{
byte_parser!(upper, Upper, is_ascii_uppercase)
}
/// Parses an lowercase ASCII letter (az).
///
/// ```
/// use combine::Parser;
/// use combine::parser::byte::lower;
/// assert_eq!(lower().parse(&b"a"[..]), Ok((b'a', &b""[..])));
/// assert!(lower().parse(&b"A"[..]).is_err());
/// ```
pub fn lower<Input>() -> impl Parser<Input, Output = u8, PartialState = ()>
where
Input: Stream<Token = u8>,
{
byte_parser!(lower, Lower, is_ascii_lowercase)
}
/// Parses either an ASCII alphabet letter or digit (az, AZ, 09).
///
/// ```
/// use combine::Parser;
/// use combine::parser::byte::alpha_num;
/// assert_eq!(alpha_num().parse(&b"A"[..]), Ok((b'A', &b""[..])));
/// assert_eq!(alpha_num().parse(&b"1"[..]), Ok((b'1', &b""[..])));
/// assert!(alpha_num().parse(&b"!"[..]).is_err());
/// ```
pub fn alpha_num<Input>() -> impl Parser<Input, Output = u8, PartialState = ()>
where
Input: Stream<Token = u8>,
{
byte_parser!(alpha_num, AlphaNum, is_ascii_alphanumeric)
}
/// Parses an ASCII alphabet letter (az, AZ).
///
/// ```
/// use combine::Parser;
/// use combine::parser::byte::letter;
/// assert_eq!(letter().parse(&b"a"[..]), Ok((b'a', &b""[..])));
/// assert_eq!(letter().parse(&b"A"[..]), Ok((b'A', &b""[..])));
/// assert!(letter().parse(&b"9"[..]).is_err());
/// ```
pub fn letter<Input>() -> impl Parser<Input, Output = u8, PartialState = ()>
where
Input: Stream<Token = u8>,
{
byte_parser!(letter, Letter, is_ascii_alphabetic)
}
/// Parses an octal digit.
///
/// ```
/// use combine::Parser;
/// use combine::parser::byte::oct_digit;
/// assert_eq!(oct_digit().parse(&b"7"[..]), Ok((b'7', &b""[..])));
/// assert!(oct_digit().parse(&b"8"[..]).is_err());
/// ```
pub fn oct_digit<Input>() -> impl Parser<Input, Output = u8, PartialState = ()>
where
Input: Stream<Token = u8>,
{
satisfy(|ch| (b'0'..=b'7').contains(&ch)).expected("octal digit")
}
/// Parses an ASCII hexdecimal digit (accepts both uppercase and lowercase).
///
/// ```
/// use combine::Parser;
/// use combine::parser::byte::hex_digit;
/// assert_eq!(hex_digit().parse(&b"F"[..]), Ok((b'F', &b""[..])));
/// assert!(hex_digit().parse(&b"H"[..]).is_err());
/// ```
pub fn hex_digit<Input>() -> impl Parser<Input, Output = u8, PartialState = ()>
where
Input: Stream<Token = u8>,
{
byte_parser!(hex_digit, HexDigit, is_ascii_hexdigit())
}
parser! {
/// Parses the bytes `s`.
///
/// If you have a stream implementing [`RangeStream`] such as `&[u8]` you can also use the
/// [`range`] parser which may be more efficient.
///
/// ```
/// # extern crate combine;
/// # use combine::*;
/// # use combine::parser::byte::bytes;
/// # fn main() {
/// let result = bytes(&b"rust"[..])
/// .parse(&b"rust"[..])
/// .map(|x| x.0);
/// assert_eq!(result, Ok(&b"rust"[..]));
/// # }
/// ```
///
/// [`RangeStream`]: super::super::stream::RangeStream
/// [`range`]: super::range::range
pub fn bytes['a, 'b, Input](s: &'static [u8])(Input) -> &'a [u8]
where [
Input: Stream<Token = u8, Range = &'b [u8]>,
]
{
bytes_cmp(s, |l: u8, r: u8| l == r)
}
}
parser! {
/// Parses the bytes `s` using `cmp` to compare each token.
///
/// If you have a stream implementing [`RangeStream`] such as `&[u8]` you can also use the
/// [`range`] parser which may be more efficient.
///
/// ```
/// # extern crate combine;
/// # use combine::*;
/// # use combine::parser::byte::bytes_cmp;
/// # use combine::stream::easy::Info;
/// # fn main() {
/// let result = bytes_cmp(&b"abc"[..], |l, r| l.eq_ignore_ascii_case(&r))
/// .parse(&b"AbC"[..]);
/// assert_eq!(result, Ok((&b"abc"[..], &b""[..])));
/// # }
/// ```
///
/// [`RangeStream`]: super::super::stream::RangeStream
/// [`range`]: super::range::range
pub fn bytes_cmp['a, 'b, C, Input](s: &'static [u8], cmp: C)(Input) -> &'a [u8]
where [
C: FnMut(u8, u8) -> bool,
Input: Stream<Token = u8, Range = &'b [u8]>,
]
{
let s = *s;
tokens_cmp(s.iter().cloned(), cmp)
.map(move |_| s)
.expected(error::Range(s))
}
}
macro_rules! take_until {
(
$(#[$attr:meta])*
$type_name: ident, $func_name: ident, $memchr: ident, $($param: ident),+
) => {
parser!{
#[derive(Clone)]
pub struct $type_name;
type PartialState = usize;
$(#[$attr])*
pub fn $func_name[Input]($($param : u8),*)(Input) -> Input::Range
where [
Input: RangeStream,
Input::Range: AsRef<[u8]> + crate::stream::Range,
]
{
take_fn(move |haystack: Input::Range| {
let haystack = haystack.as_ref();
match ::memchr::$memchr( $(*$param),+ , haystack) {
Some(i) => TakeRange::Found(i),
None => TakeRange::NotFound(haystack.len()),
}
})
}
}
}
}
take_until! {
/// Zero-copy parser which reads a range of 0 or more tokens until `a` is found.
///
/// If `a` is not found, the parser will return an error.
///
/// ```
/// # extern crate combine;
/// # use combine::parser::byte::take_until_byte;
/// # use combine::*;
/// # fn main() {
/// let mut parser = take_until_byte(b'\r');
/// let result = parser.parse("To: user@example.com\r\n");
/// assert_eq!(result, Ok(("To: user@example.com", "\r\n")));
/// let result = parser.parse("Hello, world\n");
/// assert!(result.is_err());
/// # }
/// ```
TakeUntilByte, take_until_byte, memchr, a
}
take_until! {
/// Zero-copy parser which reads a range of 0 or more tokens until `a` or `b` is found.
///
/// If `a` or `b` is not found, the parser will return an error.
///
/// ```
/// # extern crate combine;
/// # use combine::parser::byte::take_until_byte2;
/// # use combine::*;
/// # fn main() {
/// let mut parser = take_until_byte2(b'\r', b'\n');
/// let result = parser.parse("To: user@example.com\r\n");
/// assert_eq!(result, Ok(("To: user@example.com", "\r\n")));
/// let result = parser.parse("Hello, world\n");
/// assert_eq!(result, Ok(("Hello, world", "\n")));
/// # }
/// ```
TakeUntilByte2, take_until_byte2, memchr2, a, b
}
take_until! {
/// Zero-copy parser which reads a range of 0 or more tokens until `a`, 'b' or `c` is found.
///
/// If `a`, 'b' or `c` is not found, the parser will return an error.
///
/// ```
/// # extern crate combine;
/// # use combine::parser::byte::take_until_byte3;
/// # use combine::*;
/// # fn main() {
/// let mut parser = take_until_byte3(b'\r', b'\n', b' ');
/// let result = parser.parse("To: user@example.com\r\n");
/// assert_eq!(result, Ok(("To:", " user@example.com\r\n")));
/// let result = parser.parse("Helloworld");
/// assert!(result.is_err());
/// # }
/// ```
TakeUntilByte3, take_until_byte3, memchr3, a, b, c
}
parser! {
type PartialState = usize;
/// Zero-copy parser which reads a range of 0 or more tokens until `needle` is found.
///
/// If `a`, 'b' or `c` is not found, the parser will return an error.
///
/// Optimized variant of [`take_until_range`](../range/fn.take_until_range.html)
///
/// ```
/// use combine::*;
/// use combine::parser::byte::take_until_bytes;
/// assert_eq!(
/// take_until_bytes(&b"\r\n"[..]).easy_parse(&b"abc\r\n"[..]).map(|(x, _)| x),
/// Ok((&b"abc"[..]))
/// );
/// // Also works on strings as long as `needle` is UTF-8
/// assert_eq!(
/// take_until_bytes("\r\n".as_bytes()).easy_parse("abc\r\n").map(|(x, _)| x),
/// Ok(("abc"))
/// );
/// ```
pub fn take_until_bytes['a, Input](needle: &'a [u8])(Input) -> Input::Range
where [
Input: RangeStream,
Input::Range: AsRef<[u8]> + crate::stream::Range,
]
{
take_fn(move |haystack: Input::Range| {
let haystack = haystack.as_ref();
match memslice(needle, haystack) {
Some(i) => TakeRange::Found(i),
None => TakeRange::NotFound(haystack.len().saturating_sub(needle.len() - 1)),
}
})
}
}
fn memslice(needle: &[u8], haystack: &[u8]) -> Option<usize> {
let (&prefix, suffix) = match needle.split_first() {
Some(x) => x,
None => return Some(0),
};
for i in memchr::memchr_iter(prefix, haystack) {
if haystack[i + 1..].starts_with(suffix) {
return Some(i);
}
}
None
}
/// Parsers for decoding numbers in big-endian or little-endian order.
pub mod num {
use crate::{error::ResultExt, lib::mem::size_of, parser::function::parser, stream::uncons};
use super::*;
macro_rules! integer_parser {
(
$(#[$attr:meta])*
pub $type_name: ident,
$output_type: ident, $be_name: ident, $le_name: ident, $read_name: ident
) => {
$(#[$attr])*
pub fn $be_name<'a, Input>() -> impl Parser<Input, Output = $output_type, PartialState = ()>
where
Input: Stream<Token = u8>,
{
parser(|input: &mut Input| {
let checkpoint = input.checkpoint();
let result = (|input: &mut Input| {
let mut buffer = [0u8; size_of::<$output_type>()];
for elem in &mut buffer[..] {
*elem = ctry!(uncons(input)).0;
}
CommitOk($output_type::from_be_bytes(buffer))
})(input);
if result.is_err() {
input.reset(checkpoint).committed().into_result()?;
}
result.into_result()
})
}
$(#[$attr])*
pub fn $le_name<'a, Input>() -> impl Parser<Input, Output = $output_type, PartialState = ()>
where
Input: Stream<Token = u8>,
{
parser(|input: &mut Input| {
let checkpoint = input.checkpoint();
let result = (|input: &mut Input| {
let mut buffer = [0u8; size_of::<$output_type>()];
for elem in &mut buffer[..] {
*elem = ctry!(uncons(input)).0;
}
CommitOk($output_type::from_le_bytes(buffer))
})(input);
if result.is_err() {
input.reset(checkpoint).committed().into_result()?;
}
result.into_result()
})
}
}
}
integer_parser!(
/// Reads a u16 out of the byte stream with the specified endianess
///
/// ```
/// use combine::Parser;
/// use combine::parser::byte::num::le_u16;
///
/// assert_eq!(le_u16().parse(&b"\x01\0"[..]), Ok((1, &b""[..])));
/// assert!(le_u16().parse(&b"\0"[..]).is_err());
/// ```
pub U16, u16, be_u16, le_u16, read_u16
);
integer_parser!(
/// Reads a u32 out of the byte stream with the specified endianess
///
/// ```
/// use combine::Parser;
/// use combine::parser::byte::num::le_u32;
///
/// assert_eq!(le_u32().parse(&b"\x01\0\0\0"[..]), Ok((1, &b""[..])));
/// assert!(le_u32().parse(&b"\x01\0\0"[..]).is_err());
/// ```
pub U32, u32, be_u32, le_u32, read_u32
);
integer_parser!(
/// Reads a u64 out of the byte stream with the specified endianess
///
/// ```
/// use combine::Parser;
/// use combine::parser::byte::num::le_u64;
///
/// assert_eq!(le_u64().parse(&b"\x01\0\0\0\0\0\0\0"[..]), Ok((1, &b""[..])));
/// assert!(le_u64().parse(&b"\x01\0\0\0\0\0\0"[..]).is_err());
/// ```
pub U64, u64, be_u64, le_u64, read_u64
);
integer_parser!(
/// Reads a i16 out of the byte stream with the specified endianess
///
/// ```
/// use combine::Parser;
/// use combine::parser::byte::num::le_i16;
///
/// assert_eq!(le_i16().parse(&b"\x01\0"[..]), Ok((1, &b""[..])));
/// assert!(le_i16().parse(&b"\x01"[..]).is_err());
/// ```
pub I16, i16, be_i16, le_i16, read_i16
);
integer_parser!(
/// Reads a i32 out of the byte stream with the specified endianess
///
/// ```
/// use combine::Parser;
/// use combine::parser::byte::num::le_i32;
///
/// assert_eq!(le_i32().parse(&b"\x01\0\0\0"[..]), Ok((1, &b""[..])));
/// assert!(le_i32().parse(&b"\x01\0\0"[..]).is_err());
/// ```
pub I32, i32, be_i32, le_i32, read_i32
);
integer_parser!(
/// Reads a i64 out of the byte stream with the specified endianess
///
/// ```
/// use combine::Parser;
/// use combine::parser::byte::num::le_i64;
///
/// assert_eq!(le_i64().parse(&b"\x01\0\0\0\0\0\0\0"[..]), Ok((1, &b""[..])));
/// assert!(le_i64().parse(&b"\x01\0\0\0\0\0\0"[..]).is_err());
/// ```
pub I64, i64, be_i64, le_i64, read_i64
);
integer_parser!(
/// Reads a i32 out of the byte stream with the specified endianess
///
/// ```
/// use combine::Parser;
/// use combine::parser::byte::num::le_f32;
///
/// let buf = 123.45f32.to_le_bytes();
/// assert_eq!(le_f32().parse(&buf[..]), Ok((123.45, &b""[..])));
/// assert!(le_f32().parse(&b"\x01\0\0"[..]).is_err());
/// ```
pub F32, f32, be_f32, le_f32, read_f32
);
integer_parser!(
/// Reads a i64 out of the byte stream with the specified endianess
///
/// ```
/// use combine::Parser;
/// use combine::parser::byte::num::le_f64;
///
/// let buf = 123.45f64.to_le_bytes();
/// assert_eq!(le_f64().parse(&buf[..]), Ok((123.45, &b""[..])));
/// assert!(le_f64().parse(&b"\x01\0\0\0\0\0\0"[..]).is_err());
/// ```
pub F64, f64, be_f64, le_f64, read_f64
);
#[cfg(all(feature = "std", test))]
mod tests {
use crate::stream::{buffered, position, IteratorStream};
use super::*;
#[test]
fn no_rangestream() {
let buf = 123.45f64.to_le_bytes();
assert_eq!(
le_f64()
.parse(buffered::Stream::new(
position::Stream::new(IteratorStream::new(buf.iter().cloned())),
1
))
.map(|(t, _)| t),
Ok(123.45)
);
assert_eq!(
le_f64()
.parse(buffered::Stream::new(
position::Stream::new(IteratorStream::new(buf.iter().cloned())),
1
))
.map(|(t, _)| t),
Ok(123.45)
);
let buf = 123.45f64.to_be_bytes();
assert_eq!(
be_f64()
.parse(buffered::Stream::new(
position::Stream::new(IteratorStream::new(buf.iter().cloned())),
1
))
.map(|(t, _)| t),
Ok(123.45)
);
}
}
}
#[cfg(all(feature = "std", test))]
mod tests {
use crate::stream::{buffered, position, read};
use super::*;
#[test]
fn memslice_basic() {
let haystack = b"abc123";
assert_eq!(memslice(b"", haystack), Some(0));
assert_eq!(memslice(b"a", haystack), Some(0));
assert_eq!(memslice(b"ab", haystack), Some(0));
assert_eq!(memslice(b"c12", haystack), Some(2));
let haystack2 = b"abcab2";
assert_eq!(memslice(b"abc", haystack2), Some(0));
assert_eq!(memslice(b"ab2", haystack2), Some(3));
let haystack3 = b"aaabaaaa";
assert_eq!(memslice(b"aaaa", haystack3), Some(4));
}
#[test]
fn bytes_read_stream() {
assert!(bytes(b"abc")
.parse(buffered::Stream::new(
position::Stream::new(read::Stream::new("abc".as_bytes())),
1
))
.is_ok());
}
}

319
vendor/combine/src/parser/char.rs vendored Normal file
View File

@@ -0,0 +1,319 @@
//! Module containing parsers specialized on character streams.
use crate::{
parser::{
combinator::no_partial,
repeat::skip_many,
token::{satisfy, token, tokens_cmp, Token},
},
stream::Stream,
Parser,
};
/// Parses a character and succeeds if the character is equal to `c`.
///
/// ```
/// use combine::Parser;
/// use combine::parser::char::char;
/// assert_eq!(char('!').parse("!"), Ok(('!', "")));
/// assert!(char('A').parse("!").is_err());
/// ```
pub fn char<Input>(c: char) -> Token<Input>
where
Input: Stream<Token = char>,
{
token(c)
}
parser! {
#[derive(Copy, Clone)]
pub struct Digit;
/// Parses a base-10 digit.
///
/// ```
/// use combine::Parser;
/// use combine::parser::char::digit;
/// assert_eq!(digit().parse("9"), Ok(('9', "")));
/// assert!(digit().parse("A").is_err());
/// ```
pub fn digit[Input]()(Input) -> char
where
[Input: Stream<Token = char>,]
{
satisfy(|c: char| c.is_digit(10)).expected("digit")
}
}
/// Parse a single whitespace according to [`std::char::is_whitespace`].
///
/// This includes space characters, tabs and newlines.
///
/// [`std::char::is_whitespace`]: https://doc.rust-lang.org/std/primitive.char.html#method.is_whitespace
///
/// ```
/// use combine::Parser;
/// use combine::parser::char::space;
/// assert_eq!(space().parse(" "), Ok((' ', "")));
/// assert_eq!(space().parse(" "), Ok((' ', " ")));
/// assert!(space().parse("!").is_err());
/// assert!(space().parse("").is_err());
/// ```
pub fn space<Input>() -> impl Parser<Input, Output = char, PartialState = ()>
where
Input: Stream<Token = char>,
{
let f: fn(char) -> bool = char::is_whitespace;
satisfy(f).expected("whitespace")
}
/// Skips over zero or more spaces according to [`std::char::is_whitespace`].
///
/// This includes space characters, tabs and newlines.
///
/// [`std::char::is_whitespace`]: https://doc.rust-lang.org/std/primitive.char.html#method.is_whitespace
///
/// ```
/// use combine::Parser;
/// use combine::parser::char::spaces;
/// assert_eq!(spaces().parse(""), Ok(((), "")));
/// assert_eq!(spaces().parse(" "), Ok(((), "")));
/// ```
pub fn spaces<Input>() -> impl Parser<Input, Output = ()>
where
Input: Stream<Token = char>,
{
skip_many(space()).expected("whitespaces")
}
/// Parses a newline character (`'\n'`).
///
/// ```
/// use combine::Parser;
/// use combine::parser::char::newline;
/// assert_eq!(newline().parse("\n"), Ok(('\n', "")));
/// assert!(newline().parse("\r").is_err());
/// ```
pub fn newline<Input>() -> impl Parser<Input, Output = char, PartialState = ()>
where
Input: Stream<Token = char>,
{
satisfy(|ch: char| ch == '\n').expected("lf newline")
}
/// Parses carriage return and newline (`"\r\n"`), returning the newline character.
///
/// ```
/// use combine::Parser;
/// use combine::parser::char::crlf;
/// assert_eq!(crlf().parse("\r\n"), Ok(('\n', "")));
/// assert!(crlf().parse("\r").is_err());
/// assert!(crlf().parse("\n").is_err());
/// ```
pub fn crlf<Input>() -> impl Parser<Input, Output = char, PartialState = ()>
where
Input: Stream<Token = char>,
{
no_partial(satisfy(|ch: char| ch == '\r').with(newline())).expected("crlf newline")
}
/// Parses a tab character (`'\t'`).
///
/// ```
/// use combine::Parser;
/// use combine::parser::char::tab;
/// assert_eq!(tab().parse("\t"), Ok(('\t', "")));
/// assert!(tab().parse(" ").is_err());
/// ```
pub fn tab<Input>() -> impl Parser<Input, Output = char, PartialState = ()>
where
Input: Stream<Token = char>,
{
satisfy(|ch: char| ch == '\t').expected("tab")
}
/// Parses an uppercase letter according to [`std::char::is_uppercase`].
///
/// [`std::char::is_uppercase`]: https://doc.rust-lang.org/std/primitive.char.html#method.is_uppercase
///
/// ```
/// use combine::Parser;
/// use combine::parser::char::upper;
/// assert_eq!(upper().parse("A"), Ok(('A', "")));
/// assert!(upper().parse("a").is_err());
/// ```
pub fn upper<Input>() -> impl Parser<Input, Output = char, PartialState = ()>
where
Input: Stream<Token = char>,
{
satisfy(|ch: char| ch.is_uppercase()).expected("uppercase letter")
}
/// Parses an lowercase letter according to [`std::char::is_lowercase`].
///
/// [`std::char::is_lowercase`]: https://doc.rust-lang.org/std/primitive.char.html#method.is_lowercase
///
/// ```
/// use combine::Parser;
/// use combine::parser::char::lower;
/// assert_eq!(lower().parse("a"), Ok(('a', "")));
/// assert!(lower().parse("A").is_err());
/// ```
pub fn lower<Input>() -> impl Parser<Input, Output = char, PartialState = ()>
where
Input: Stream<Token = char>,
{
satisfy(|ch: char| ch.is_lowercase()).expected("lowercase letter")
}
/// Parses either an alphabet letter or digit according to [`std::char::is_alphanumeric`].
///
/// [`std::char::is_alphanumeric`]: https://doc.rust-lang.org/std/primitive.char.html#method.is_alphanumeric
///
/// ```
/// use combine::Parser;
/// use combine::parser::char::alpha_num;
/// assert_eq!(alpha_num().parse("A"), Ok(('A', "")));
/// assert_eq!(alpha_num().parse("1"), Ok(('1', "")));
/// assert!(alpha_num().parse("!").is_err());
/// ```
pub fn alpha_num<Input>() -> impl Parser<Input, Output = char, PartialState = ()>
where
Input: Stream<Token = char>,
{
satisfy(|ch: char| ch.is_alphanumeric()).expected("letter or digit")
}
/// Parses an alphabet letter according to [`std::char::is_alphabetic`].
///
/// [`std::char::is_alphabetic`]: https://doc.rust-lang.org/std/primitive.char.html#method.is_alphabetic
///
/// ```
/// use combine::Parser;
/// use combine::parser::char::letter;
/// assert_eq!(letter().parse("a"), Ok(('a', "")));
/// assert_eq!(letter().parse("A"), Ok(('A', "")));
/// assert!(letter().parse("9").is_err());
/// ```
pub fn letter<Input>() -> impl Parser<Input, Output = char, PartialState = ()>
where
Input: Stream<Token = char>,
{
satisfy(|ch: char| ch.is_alphabetic()).expected("letter")
}
/// Parses an octal digit.
///
/// ```
/// use combine::Parser;
/// use combine::parser::char::oct_digit;
/// assert_eq!(oct_digit().parse("7"), Ok(('7', "")));
/// assert!(oct_digit().parse("8").is_err());
/// ```
pub fn oct_digit<Input>() -> impl Parser<Input, Output = char, PartialState = ()>
where
Input: Stream<Token = char>,
{
satisfy(|ch: char| ch.is_digit(8)).expected("octal digit")
}
/// Parses a hexdecimal digit with uppercase and lowercase.
///
/// ```
/// use combine::Parser;
/// use combine::parser::char::hex_digit;
/// assert_eq!(hex_digit().parse("F"), Ok(('F', "")));
/// assert!(hex_digit().parse("H").is_err());
/// ```
pub fn hex_digit<Input>() -> impl Parser<Input, Output = char, PartialState = ()>
where
Input: Stream<Token = char>,
{
satisfy(|ch: char| ch.is_digit(0x10)).expected("hexadecimal digit")
}
/// Parses the string `s`.
///
/// ```
/// # extern crate combine;
/// # use combine::*;
/// # use combine::parser::char::string;
/// # fn main() {
/// let result = string("rust")
/// .parse("rust")
/// .map(|x| x.0);
/// assert_eq!(result, Ok("rust"));
/// # }
/// ```
pub fn string<'a, Input>(s: &'static str) -> impl Parser<Input, Output = &'a str>
where
Input: Stream<Token = char>,
{
string_cmp(s, |l, r| l == r)
}
/// Parses the string `s`, using `cmp` to compare each character.
///
/// ```
/// # extern crate combine;
/// # use combine::*;
/// # use combine::parser::char::string_cmp;
/// # fn main() {
/// let result = string_cmp("rust", |l, r| l.eq_ignore_ascii_case(&r))
/// .parse("RusT")
/// .map(|x| x.0);
/// assert_eq!(result, Ok("rust"));
/// # }
/// ```
pub fn string_cmp<'a, C, Input>(s: &'static str, cmp: C) -> impl Parser<Input, Output = &'a str>
where
C: FnMut(char, char) -> bool,
Input: Stream<Token = char>,
{
tokens_cmp(s.chars(), cmp).map(move |_| s).expected(s)
}
#[cfg(all(feature = "std", test))]
mod tests {
use crate::{
parser::EasyParser,
stream::{
easy::{Error, Errors},
position::{self, SourcePosition},
},
};
use super::*;
#[test]
fn space_error() {
let result = space().easy_parse("");
assert!(result.is_err());
assert_eq!(
result.unwrap_err().errors,
vec![Error::end_of_input(), Error::Expected("whitespace".into())]
);
}
#[test]
fn string_committed() {
let result = string("a").easy_parse(position::Stream::new("b"));
assert!(result.is_err());
assert_eq!(
result.unwrap_err().position,
SourcePosition { line: 1, column: 1 }
);
}
#[test]
fn string_error() {
let result = string("abc").easy_parse(position::Stream::new("bc"));
assert_eq!(
result,
Err(Errors {
position: SourcePosition { line: 1, column: 1 },
errors: vec![Error::Unexpected('b'.into()), Error::Expected("abc".into())],
})
);
}
}

849
vendor/combine/src/parser/choice.rs vendored Normal file
View File

@@ -0,0 +1,849 @@
//! Combinators which take one or more parsers and attempts to parse successfully with at least one
//! of them.
use crate::{
error::{
ParseError,
ParseResult::{self, *},
ResultExt, StreamError, Tracked,
},
parser::ParseMode,
ErrorOffset, Parser, Stream, StreamOnce,
};
/// Takes a number of parsers and tries to apply them each in order.
/// Fails if all the parsers fails or if an applied parser fails after it has committed to its
/// parse.
///
/// ```
/// # #[macro_use]
/// # extern crate combine;
/// # use combine::*;
/// # use combine::parser::char::{digit, letter, string};
/// # use combine::stream::easy::Error;
/// # fn main() {
/// let mut parser = choice!(
/// many1(digit()),
/// string("let").map(|s| s.to_string()),
/// many1(letter()));
/// assert_eq!(parser.parse("let"), Ok(("let".to_string(), "")));
/// assert_eq!(parser.parse("123abc"), Ok(("123".to_string(), "abc")));
/// assert!(parser.parse(":123").is_err());
/// # }
/// ```
#[macro_export]
macro_rules! choice {
($first : expr) => {
$first
};
($first : expr, $($rest : expr),+) => {
$first.or(choice!($($rest),+))
}
}
#[macro_export]
#[doc(hidden)]
macro_rules! parse_mode_choice {
(Input) => {
fn parse_partial(
&mut self,
input: &mut Input,
state: &mut Self::PartialState,
) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
self.parse_mode_choice($crate::parser::PartialMode::default(), input, state)
}
fn parse_first(
&mut self,
input: &mut Input,
state: &mut Self::PartialState,
) -> ParseResult<Self::Output, Input::Error> {
self.parse_mode_choice($crate::parser::FirstMode, input, state)
}
};
}
/// `ChoiceParser` represents a parser which may parse one of several different choices depending
/// on the input.
///
/// This is an internal trait used to overload the `choice` function.
pub trait ChoiceParser<Input: Stream> {
type Output;
type PartialState: Default;
fn parse_first(
&mut self,
input: &mut Input,
state: &mut Self::PartialState,
) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>;
fn parse_partial(
&mut self,
input: &mut Input,
state: &mut Self::PartialState,
) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>;
fn parse_mode_choice<M>(
&mut self,
mode: M,
input: &mut Input,
state: &mut Self::PartialState,
) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
where
M: ParseMode,
Self: Sized;
fn add_error_choice(&mut self, error: &mut Tracked<<Input as StreamOnce>::Error>);
}
impl<'a, Input, P> ChoiceParser<Input> for &'a mut P
where
Input: Stream,
P: ?Sized + ChoiceParser<Input>,
{
type Output = P::Output;
type PartialState = P::PartialState;
parse_mode_choice!(Input);
#[inline]
fn parse_mode_choice<M>(
&mut self,
mode: M,
input: &mut Input,
state: &mut Self::PartialState,
) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
where
M: ParseMode,
{
if mode.is_first() {
(**self).parse_first(input, state)
} else {
(**self).parse_partial(input, state)
}
}
fn add_error_choice(&mut self, error: &mut Tracked<<Input as StreamOnce>::Error>) {
(**self).add_error_choice(error)
}
}
macro_rules! merge {
($head: ident) => {
$head.error
};
($head: ident $($tail: ident)+) => {
$head.error.merge(merge!($($tail)+))
};
}
macro_rules! do_choice {
(
$input: ident
$before_position: ident
$before: ident
$partial_state: ident
$state: ident
( )
$($parser: ident $error: ident)+
) => { {
let mut error = Tracked::from(merge!($($error)+));
// If offset != 1 then the nested parser is a sequence of parsers where 1 or
// more parsers returned `PeekOk` before the parser finally failed with
// `PeekErr`. Since we lose the offsets of the nested parsers when we merge
// the errors we must first extract the errors before we do the merge.
// If the offset == 0 on the other hand (which should be the common case) then
// we can delay the addition of the error since we know for certain that only
// the first parser in the sequence were tried
$(
if $error.offset != ErrorOffset(1) {
error.offset = $error.offset;
$parser.add_error(&mut error);
error.offset = ErrorOffset(0);
}
)+
PeekErr(error)
} };
(
$input: ident
$before_position: ident
$before: ident
$partial_state: ident
$state: ident
( $head: ident $($tail: ident)* )
$($all: ident)*
) => { {
let parser = $head;
let mut state = $head::PartialState::default();
match parser.parse_mode(crate::parser::FirstMode, $input, &mut state) {
CommitOk(x) => CommitOk(x),
PeekOk(x) => PeekOk(x),
CommitErr(err) => {
// If we get `CommitErr` but the input is the same this is a partial parse we
// cannot commit to so leave the state as `Peek` to retry all the parsers
// on the next call to `parse_partial`
if $input.position() != $before_position {
*$state = self::$partial_state::$head(state);
}
CommitErr(err)
}
PeekErr($head) => {
ctry!($input.reset($before.clone()).committed());
do_choice!(
$input
$before_position
$before
$partial_state
$state
( $($tail)* )
$($all)*
parser
$head
)
}
}
} }
}
macro_rules! tuple_choice_parser {
($head: ident) => {
tuple_choice_parser_inner!($head; $head);
};
($head: ident $($id: ident)+) => {
tuple_choice_parser_inner!($head; $head $($id)+);
tuple_choice_parser!($($id)+);
};
}
macro_rules! tuple_choice_parser_inner {
($partial_state: ident; $($id: ident)+) => {
#[doc(hidden)]
pub enum $partial_state<$($id),+> {
Peek,
$(
$id($id),
)+
}
impl<$($id),+> Default for self::$partial_state<$($id),+> {
fn default() -> Self {
self::$partial_state::Peek
}
}
#[allow(non_snake_case)]
impl<Input, Output $(,$id)+> ChoiceParser<Input> for ($($id,)+)
where
Input: Stream,
$($id: Parser< Input, Output = Output>),+
{
type Output = Output;
type PartialState = self::$partial_state<$($id::PartialState),+>;
parse_mode_choice!(Input);
#[inline]
fn parse_mode_choice<Mode>(
&mut self,
mode: Mode,
input: &mut Input,
state: &mut Self::PartialState,
) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
where
Mode: ParseMode,
{
let ($(ref mut $id,)+) = *self;
let empty = match *state {
self::$partial_state::Peek => true,
_ => false,
};
if mode.is_first() || empty {
let before_position = input.position();
let before = input.checkpoint();
do_choice!(input before_position before $partial_state state ( $($id)+ ) )
} else {
match *state {
self::$partial_state::Peek => unreachable!(),
$(
self::$partial_state::$id(_) => {
let result = match *state {
self::$partial_state::$id(ref mut state) => {
$id.parse_mode(mode, input, state)
}
_ => unreachable!()
};
if result.is_ok() {
*state = self::$partial_state::Peek;
}
result
}
)+
}
}
}
fn add_error_choice(
&mut self,
error: &mut Tracked<<Input as StreamOnce>::Error>
) {
if error.offset != ErrorOffset(0) {
let ($(ref mut $id,)+) = *self;
// Reset the offset to 1 on every add so that we always (and only) takes the
// error of the first parser. If we don't do this the first parser will consume
// the offset to the detriment for all the other parsers.
$(
error.offset = ErrorOffset(1);
$id.add_error(error);
)+
}
}
}
}
}
tuple_choice_parser!(A B C D E F G H I J K L M N O P Q R S T U V X Y Z);
macro_rules! array_choice_parser {
($($t: tt)+) => {
$(
impl<Input, P> ChoiceParser<Input> for [P; $t]
where
Input: Stream,
P: Parser<Input>,
{
type Output = P::Output;
type PartialState = <[P] as ChoiceParser<Input>>::PartialState;
parse_mode_choice!(Input);
#[inline]
fn parse_mode_choice<M>(
&mut self,
mode: M,
input: &mut Input,
state: &mut Self::PartialState,
) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
where
M: ParseMode,
{
if mode.is_first() {
self[..].parse_first(input, state)
} else {
self[..].parse_partial(input, state)
}
}
fn add_error_choice(
&mut self,
error: &mut Tracked<<Input as StreamOnce>::Error>
) {
self[..].add_error_choice(error)
}
}
)+
};
}
#[rustfmt::skip]
array_choice_parser!(
0 1 2 3 4 5 6 7 8 9
10 11 12 13 14 15 16 17 18 19
20 21 22 23 24 25 26 27 28 29
30 31 32
);
#[derive(Copy, Clone)]
pub struct Choice<P>(P);
impl<Input, P> Parser<Input> for Choice<P>
where
Input: Stream,
P: ChoiceParser<Input>,
{
type Output = P::Output;
type PartialState = P::PartialState;
parse_mode!(Input);
#[inline]
fn parse_mode_impl<M>(
&mut self,
mode: M,
input: &mut Input,
state: &mut Self::PartialState,
) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
where
M: ParseMode,
{
self.0.parse_mode_choice(mode, input, state)
}
fn add_error(&mut self, error: &mut Tracked<<Input as StreamOnce>::Error>) {
let before = error.offset.0;
self.0.add_error_choice(error);
error.offset.0 = before.saturating_sub(1);
}
}
fn slice_parse_mode<Input, P, M>(
self_: &mut [P],
mode: M,
input: &mut Input,
state: &mut (usize, P::PartialState),
) -> ParseResult<P::Output, <Input as StreamOnce>::Error>
where
P: Parser<Input>,
Input: Stream,
M: ParseMode,
{
let mut prev_err = None;
let mut last_parser_having_non_1_offset = 0;
let before = input.checkpoint();
let (ref mut index_state, ref mut child_state) = *state;
if !mode.is_first() && *index_state != 0 {
return self_[*index_state - 1]
.parse_partial(input, child_state)
.map(|x| {
*index_state = 0;
x
});
}
for i in 0..self_.len() {
ctry!(input.reset(before.clone()).committed());
match self_[i].parse_mode(mode, input, child_state) {
committed_err @ CommitErr(_) => {
*index_state = i + 1;
return committed_err;
}
PeekErr(err) => {
prev_err = match prev_err {
None => Some(err),
Some(mut prev_err) => {
if prev_err.offset != ErrorOffset(1) {
// First add the errors of all the preceding parsers which did not
// have a sequence of parsers returning `PeekOk` before failing
// with `PeekErr`.
let offset = prev_err.offset;
for p in &mut self_[last_parser_having_non_1_offset..(i - 1)] {
prev_err.offset = ErrorOffset(1);
p.add_error(&mut prev_err);
}
// Then add the errors if the current parser
prev_err.offset = offset;
self_[i - 1].add_error(&mut prev_err);
last_parser_having_non_1_offset = i;
}
Some(Tracked {
error: prev_err.error.merge(err.error),
offset: err.offset,
})
}
};
}
ok @ CommitOk(_) | ok @ PeekOk(_) => {
*index_state = 0;
return ok;
}
}
}
PeekErr(match prev_err {
None => Input::Error::from_error(
input.position(),
StreamError::message_static_message("parser choice is empty"),
)
.into(),
Some(mut prev_err) => {
if prev_err.offset != ErrorOffset(1) {
let offset = prev_err.offset;
let len = self_.len();
for p in &mut self_[last_parser_having_non_1_offset..(len - 1)] {
prev_err.offset = ErrorOffset(1);
p.add_error(&mut prev_err);
}
prev_err.offset = offset;
self_.last_mut().unwrap().add_error(&mut prev_err);
prev_err.offset = ErrorOffset(0);
}
prev_err
}
})
}
impl<Input, O, P> ChoiceParser<Input> for [P]
where
Input: Stream,
P: Parser<Input, Output = O>,
{
type Output = O;
type PartialState = (usize, P::PartialState);
#[inline]
fn parse_partial(
&mut self,
input: &mut Input,
state: &mut Self::PartialState,
) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
slice_parse_mode(self, crate::parser::PartialMode::default(), input, state)
}
#[inline]
fn parse_first(
&mut self,
input: &mut Input,
state: &mut Self::PartialState,
) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
slice_parse_mode(self, crate::parser::FirstMode, input, state)
}
#[inline]
fn parse_mode_choice<M>(
&mut self,
_mode: M,
_input: &mut Input,
_state: &mut Self::PartialState,
) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
where
M: ParseMode,
{
unreachable!()
}
fn add_error_choice(&mut self, error: &mut Tracked<<Input as StreamOnce>::Error>) {
if error.offset != ErrorOffset(0) {
for p in self {
error.offset = ErrorOffset(1);
p.add_error(error);
}
}
}
}
/// Takes a tuple, a slice or an array of parsers and tries to apply them each in order.
/// Fails if all the parsers fails or if an applied parser consumes input before failing.
///
/// ```
/// # extern crate combine;
/// # use combine::*;
/// # use combine::parser::char::{digit, string};
/// # fn main() {
/// // `choice` is overloaded on tuples so that different types of parsers can be used
/// // (each parser must still have the same input and output types)
/// let mut parser = choice((
/// string("Apple").map(|s| s.to_string()),
/// many1(digit()),
/// string("Orange").map(|s| s.to_string()),
/// ));
/// assert_eq!(parser.parse("1234"), Ok(("1234".to_string(), "")));
/// assert_eq!(parser.parse("Orangexx"), Ok(("Orange".to_string(), "xx")));
/// assert!(parser.parse("Appl").is_err());
/// assert!(parser.parse("Pear").is_err());
///
/// // If arrays or slices are used then all parsers must have the same type
/// // (`string` in this case)
/// let mut parser2 = choice([string("one"), string("two"), string("three")]);
/// // Fails as the parser for "two" consumes the first 't' before failing
/// assert!(parser2.parse("three").is_err());
///
/// // Use 'attempt' to make failing parsers always act as if they have not committed any input
/// let mut parser3 = choice([attempt(string("one")), attempt(string("two")), attempt(string("three"))]);
/// assert_eq!(parser3.parse("three"), Ok(("three", "")));
/// # }
/// ```
pub fn choice<Input, P>(ps: P) -> Choice<P>
where
Input: Stream,
P: ChoiceParser<Input>,
{
Choice(ps)
}
#[derive(Copy, Clone)]
pub struct Or<P1, P2>(Choice<(P1, P2)>);
impl<Input, O, P1, P2> Parser<Input> for Or<P1, P2>
where
Input: Stream,
P1: Parser<Input, Output = O>,
P2: Parser<Input, Output = O>,
{
type Output = O;
type PartialState = <Choice<(P1, P2)> as Parser<Input>>::PartialState;
parse_mode!(Input);
#[inline]
fn parse_mode_impl<M>(
&mut self,
mode: M,
input: &mut Input,
state: &mut Self::PartialState,
) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
where
M: ParseMode,
{
self.0.parse_mode(mode, input, state)
}
#[inline]
fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
if errors.offset != ErrorOffset(0) {
self.0.add_error(errors);
}
}
}
/// Equivalent to [`p1.or(p2)`].
///
/// If you are looking to chain 3 or more parsers using `or` you may consider using the
/// [`choice!`] macro instead, which can be clearer and may result in a faster parser.
///
/// ```
/// # extern crate combine;
/// # use combine::*;
/// # use combine::parser::choice::or;
/// # use combine::parser::char::{digit, string};
/// # fn main() {
/// let mut parser = or(
/// string("let"),
/// or(digit().map(|_| "digit"), string("led")),
/// );
/// assert_eq!(parser.parse("let"), Ok(("let", "")));
/// assert_eq!(parser.parse("1"), Ok(("digit", "")));
/// assert!(parser.parse("led").is_err());
///
/// let mut parser2 = or(string("two"), string("three"));
/// // Fails as the parser for "two" consumes the first 't' before failing
/// assert!(parser2.parse("three").is_err());
///
/// // Use 'attempt' to make failing parsers always act as if they have not committed any input
/// let mut parser3 = or(attempt(string("two")), attempt(string("three")));
/// assert_eq!(parser3.parse("three"), Ok(("three", "")));
/// # }
/// ```
///
/// [`choice!`]: ../../macro.choice.html
/// [`p1.or(p2)`]: ../trait.Parser.html#method.or
pub fn or<Input, P1, P2>(p1: P1, p2: P2) -> Or<P1, P2>
where
Input: Stream,
P1: Parser<Input>,
P2: Parser<Input, Output = P1::Output>,
{
Or(choice((p1, p2)))
}
#[derive(Copy, Clone)]
pub struct Optional<P>(P);
impl<Input, P> Parser<Input> for Optional<P>
where
Input: Stream,
P: Parser<Input>,
{
type Output = Option<P::Output>;
type PartialState = P::PartialState;
parse_mode!(Input);
#[inline]
fn parse_mode_impl<M>(
&mut self,
mode: M,
input: &mut Input,
state: &mut Self::PartialState,
) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
where
M: ParseMode,
{
let before = input.checkpoint();
match self.0.parse_mode(mode, input, state) {
PeekOk(x) => PeekOk(Some(x)),
CommitOk(x) => CommitOk(Some(x)),
CommitErr(err) => CommitErr(err),
PeekErr(_) => {
ctry!(input.reset(before).committed());
PeekOk(None)
}
}
}
forward_parser!(Input, add_error parser_count, 0);
}
/// Parses `parser` and outputs `Some(value)` if it succeeds, `None` if it fails without
/// consuming any input. Fails if `parser` fails after having committed some input.
///
/// ```
/// # extern crate combine;
/// # use combine::*;
/// # use combine::parser::char::string;
/// # fn main() {
/// let mut parser = optional(string("hello"));
/// assert_eq!(parser.parse("hello"), Ok((Some("hello"), "")));
/// assert_eq!(parser.parse("world"), Ok((None, "world")));
/// assert!(parser.parse("heya").is_err());
/// # }
/// ```
pub fn optional<Input, P>(parser: P) -> Optional<P>
where
Input: Stream,
P: Parser<Input>,
{
Optional(parser)
}
#[macro_export]
#[doc(hidden)]
macro_rules! parse_mode_dispatch {
() => {
fn parse_partial(
&mut self,
input: &mut Input,
state: &mut Self::PartialState,
) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
self.parse_mode_dispatch($crate::parser::PartialMode::default(), input, state)
}
fn parse_first(
&mut self,
input: &mut Input,
state: &mut Self::PartialState,
) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
self.parse_mode_dispatch($crate::parser::FirstMode, input, state)
}
};
}
#[macro_export]
#[doc(hidden)]
macro_rules! dispatch_parser_impl {
($parser_name: ident [$first_ident: ident $($id: ident)*] [$($collected_idents: ident)*] $expr: expr, $($rest: expr,)*) => {
$crate::dispatch_parser_impl!{ $parser_name [ $($id)* ] [$($collected_idents)* $first_ident] $($rest,)*}
};
($parser_name: ident [$($id: ident)*] [$($collected_idents: ident)*]) => {
$crate::dispatch_parser_impl!{ $parser_name; $($collected_idents)* }
};
($parser_name: ident; $($id: ident)*) => {
pub enum $parser_name<$($id),*> {
$(
$id($id),
)*
}
#[allow(non_snake_case)]
impl<Input, Output, $($id),*> $crate::Parser<Input> for $parser_name<$($id),*>
where
$( $id: $crate::Parser<Input, Output = Output>, )*
Input: $crate::Stream,
{
type Output = Output;
type PartialState = Option<$parser_name<$($id::PartialState),*>>;
$crate::parse_mode!(Input);
fn parse_mode<Mode>(
&mut self,
mode: Mode,
input: &mut Input,
state: &mut Self::PartialState,
) -> $crate::error::ParseResult<Self::Output, <Input as $crate::StreamOnce>::Error>
where
Mode: $crate::parser::ParseMode,
{
match self {
$(
$parser_name::$id($id) => {
let state = match state {
Some($parser_name::$id(s)) => s,
_ => {
*state = Some($parser_name::$id(Default::default()));
match state {
Some($parser_name::$id(s)) => s,
_ => unreachable!(),
}
}
};
$id.parse_mode(mode, input, state)
}
)*
}
}
fn add_error(&mut self, error: &mut $crate::error::Tracked<<Input as $crate::StreamOnce>::Error>) {
match self {
$(
$parser_name::$id($id) => $id.add_error(error),
)*
}
}
}
}
}
#[macro_export]
#[doc(hidden)]
macro_rules! dispatch_inner {
($expr_ident: ident [$first_ident: ident $($id: ident)*] [$($collected: tt)*] $($pat: pat)|+ $(if $pred:expr)? => $expr: expr, $($rest_alt: tt)*) => {
$crate::dispatch_inner!{ $expr_ident [ $($id)* ] [$($collected)* $first_ident $($pat)|+ $(if $pred)? => $expr,] $($rest_alt)*}
};
($expr_ident: ident [$($id: ident)*] [$($collected: tt)*]) => {
$crate::dispatch_inner!{ $expr_ident $($collected)* }
};
($expr_ident: ident [$($ident_tt: tt)*]) => {
unreachable!()
};
($expr_ident: ident $( $ident: ident $($pat: pat)|+ $(if $pred:expr)? => $expr: expr,)+ ) => {
match $expr_ident {
$(
$($pat)|+ $(if $pred)? => Dispatch::$ident(check_parser($expr)),
)+
}
}
}
/// `dispatch!` allows a parser to be constructed depending on earlier input, without forcing each
/// branch to have the same type of parser
///
/// ```
/// use combine::{dispatch, any, token, satisfy, EasyParser, Parser};
///
/// let mut parser = any().then(|e| {
/// dispatch!(e;
/// 'a' => token('a'),
/// 'b' => satisfy(|b| b == 'b'),
/// t if t == 'c' => any(),
/// _ => token('d')
/// )
/// });
/// assert_eq!(parser.easy_parse("aa"), Ok(('a', "")));
/// assert_eq!(parser.easy_parse("cc"), Ok(('c', "")));
/// assert_eq!(parser.easy_parse("cd"), Ok(('d', "")));
/// assert!(parser.easy_parse("ab").is_err());
/// ```
#[macro_export]
macro_rules! dispatch {
($match_expr: expr; $( $($pat: pat)|+ $(if $pred:expr)? => $expr: expr ),+ $(,)? ) => {
{
$crate::dispatch_parser_impl!{ Dispatch [A B C D E F G H I J K L M N O P Q R S T U V X Y Z] [] $($expr,)+ }
fn check_parser<Input, P>(p: P) -> P where P: $crate::Parser<Input>, Input: $crate::Stream { p }
let e = $match_expr;
let parser = $crate::dispatch_inner!(e [A B C D E F G H I J K L M N O P Q R S T U V X Y Z] []
$(
$($pat)|+ $(if $pred)? => $expr,
)*
);
parser
}
}
}
#[cfg(all(feature = "std", test))]
mod tests {
use crate::parser::{token::any, EasyParser};
use super::*;
#[test]
fn choice_single_parser() {
assert!(choice((any(),),).easy_parse("a").is_ok());
}
}

1558
vendor/combine/src/parser/combinator.rs vendored Normal file

File diff suppressed because it is too large Load Diff

245
vendor/combine/src/parser/error.rs vendored Normal file
View File

@@ -0,0 +1,245 @@
//! Parsers which cause errors or modifies the returned error on parse failure.
use crate::{
error::{
ErrorInfo, ParseError,
ParseResult::{self, *},
StreamError, Tracked,
},
lib::marker::PhantomData,
parser::ParseMode,
Parser, Stream, StreamOnce,
};
#[derive(Clone)]
pub struct Unexpected<I, T, E>(E, PhantomData<fn(I) -> (I, T)>)
where
I: Stream;
impl<Input, T, E> Parser<Input> for Unexpected<Input, T, E>
where
Input: Stream,
E: for<'s> ErrorInfo<'s, Input::Token, Input::Range>,
{
type Output = T;
type PartialState = ();
#[inline]
fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<T, <Input as StreamOnce>::Error> {
PeekErr(<Input as StreamOnce>::Error::empty(input.position()).into())
}
fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
errors.error.add(StreamError::unexpected(&self.0));
}
}
/// Always fails with `message` as an unexpected error.
/// Never consumes any input.
///
/// Has `()` the output type
///
/// ```
/// # extern crate combine;
/// # use combine::*;
/// # use combine::error::StreamError;
/// # fn main() {
/// let result = unexpected("token")
/// .easy_parse("a");
/// assert!(result.is_err());
/// assert!(
/// result.err()
/// .unwrap()
/// .errors
/// .iter()
/// .any(|m| *m == StreamError::unexpected("token"))
/// );
/// # }
/// ```
pub fn unexpected<Input, S>(message: S) -> Unexpected<Input, (), S>
where
Input: Stream,
S: for<'s> ErrorInfo<'s, Input::Token, Input::Range>,
{
unexpected_any(message)
}
/// Always fails with `message` as an unexpected error.
/// Never consumes any input.
///
/// May have anything as the output type but must be used such that the output type can inferred.
/// The `unexpected` parser can be used if the output type does not matter
///
/// ```
/// # extern crate combine;
/// # use combine::*;
/// # use combine::parser::error::unexpected_any;
/// # use combine::error::StreamError;
/// # fn main() {
/// let result = token('b').or(unexpected_any("token"))
/// .easy_parse("a");
/// assert!(result.is_err());
/// assert!(
/// result.err()
/// .unwrap()
/// .errors
/// .iter()
/// .any(|m| *m == StreamError::unexpected("token"))
/// );
/// # }
/// ```
pub fn unexpected_any<Input, S, T>(message: S) -> Unexpected<Input, T, S>
where
Input: Stream,
S: for<'s> ErrorInfo<'s, Input::Token, Input::Range>,
{
Unexpected(message, PhantomData)
}
#[derive(Clone)]
pub struct Message<P, S>(P, S);
impl<Input, P, S> Parser<Input> for Message<P, S>
where
Input: Stream,
P: Parser<Input>,
S: for<'s> ErrorInfo<'s, Input::Token, Input::Range>,
{
type Output = P::Output;
type PartialState = P::PartialState;
parse_mode!(Input);
#[inline]
fn parse_mode_impl<M>(
&mut self,
mode: M,
input: &mut Input,
state: &mut Self::PartialState,
) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
where
M: ParseMode,
{
match self.0.parse_mode(mode, input, state) {
CommitOk(x) => CommitOk(x),
PeekOk(x) => PeekOk(x),
// The message should always be added even if some input was committed before failing
CommitErr(mut err) => {
err.add_message(&self.1);
CommitErr(err)
}
// The message will be added in `add_error`
PeekErr(err) => PeekErr(err),
}
}
fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
self.0.add_error(errors);
errors.error.add_message(&self.1);
}
forward_parser!(Input, parser_count add_committed_expected_error, 0);
}
/// Equivalent to [`p1.message(msg)`].
///
/// [`p1.message(msg)`]: ../trait.Parser.html#method.message
pub fn message<Input, P, S>(p: P, msg: S) -> Message<P, S>
where
P: Parser<Input>,
Input: Stream,
S: for<'s> ErrorInfo<'s, Input::Token, Input::Range>,
{
Message(p, msg)
}
#[derive(Clone)]
pub struct Expected<P, S>(P, S);
impl<Input, P, S> Parser<Input> for Expected<P, S>
where
P: Parser<Input>,
Input: Stream,
S: for<'s> ErrorInfo<'s, Input::Token, Input::Range>,
{
type Output = P::Output;
type PartialState = P::PartialState;
parse_mode!(Input);
#[inline]
fn parse_mode_impl<M>(
&mut self,
mode: M,
input: &mut Input,
state: &mut Self::PartialState,
) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
where
M: ParseMode,
{
self.0.parse_mode(mode, input, state)
}
fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
ParseError::set_expected(errors, StreamError::expected(&self.1), |errors| {
self.0.add_error(errors);
})
}
forward_parser!(Input, parser_count add_committed_expected_error, 0);
}
/// Equivalent to [`p.expected(info)`].
///
/// [`p.expected(info)`]: ../trait.Parser.html#method.expected
pub fn expected<Input, P, S>(p: P, info: S) -> Expected<P, S>
where
P: Parser<Input>,
Input: Stream,
S: for<'s> ErrorInfo<'s, Input::Token, Input::Range>,
{
Expected(p, info)
}
#[derive(Clone)]
pub struct Silent<P>(P);
impl<Input, P> Parser<Input> for Silent<P>
where
P: Parser<Input>,
Input: Stream,
{
type Output = P::Output;
type PartialState = P::PartialState;
parse_mode!(Input);
#[inline]
fn parse_mode_impl<M>(
&mut self,
mode: M,
input: &mut Input,
state: &mut Self::PartialState,
) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
where
M: ParseMode,
{
self.0.parse_mode(mode, input, state).map_err(|mut err| {
err.clear_expected();
err
})
}
fn add_error(&mut self, _errors: &mut Tracked<<Input as StreamOnce>::Error>) {}
fn add_committed_expected_error(
&mut self,
_errors: &mut Tracked<<Input as StreamOnce>::Error>,
) {
}
forward_parser!(Input, parser_count, 0);
}
/// Equivalent to [`p.silent()`].
///
/// [`p.silent()`]: ../trait.Parser.html#method.silent
pub fn silent<Input, P>(p: P) -> Silent<P>
where
P: Parser<Input>,
Input: Stream,
{
Silent(p)
}

178
vendor/combine/src/parser/function.rs vendored Normal file
View File

@@ -0,0 +1,178 @@
//! Parsers constructor from regular functions
use crate::{
error::{ParseResult, StdParseResult},
lib::marker::PhantomData,
stream::Stream,
Parser,
};
impl<'a, Input: Stream, O> Parser<Input>
for dyn FnMut(&mut Input) -> StdParseResult<O, Input> + 'a
{
type Output = O;
type PartialState = ();
#[inline]
fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<O, Input::Error> {
self(input).into()
}
}
#[derive(Copy, Clone)]
pub struct FnParser<Input, F>(F, PhantomData<fn(Input) -> Input>);
/// Wraps a function, turning it into a parser.
///
/// Mainly needed to turn closures into parsers as function types can be casted to function pointers
/// to make them usable as a parser.
///
/// ```
/// extern crate combine;
/// # use combine::*;
/// # use combine::parser::char::digit;
/// # use combine::error::{Commit, StreamError};
/// # use combine::stream::easy;
/// # fn main() {
/// let mut even_digit = parser(|input| {
/// // Help type inference out
/// let _: &mut easy::Stream<&str> = input;
/// let position = input.position();
/// let (char_digit, committed) = digit().parse_stream(input).into_result()?;
/// let d = (char_digit as i32) - ('0' as i32);
/// if d % 2 == 0 {
/// Ok((d, committed))
/// }
/// else {
/// //Return an empty error since we only tested the first token of the stream
/// let errors = easy::Errors::new(
/// position,
/// StreamError::expected("even number")
/// );
/// Err(Commit::Peek(errors.into()))
/// }
/// });
/// let result = even_digit
/// .easy_parse("8")
/// .map(|x| x.0);
/// assert_eq!(result, Ok(8));
/// # }
/// ```
pub fn parser<Input, O, F>(f: F) -> FnParser<Input, F>
where
Input: Stream,
F: FnMut(&mut Input) -> StdParseResult<O, Input>,
{
FnParser(f, PhantomData)
}
impl<Input, O, F> Parser<Input> for FnParser<Input, F>
where
Input: Stream,
F: FnMut(&mut Input) -> StdParseResult<O, Input>,
{
type Output = O;
type PartialState = ();
#[inline]
fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<O, Input::Error> {
(self.0)(input).into()
}
}
impl<Input, O> Parser<Input> for fn(&mut Input) -> StdParseResult<O, Input>
where
Input: Stream,
{
type Output = O;
type PartialState = ();
#[inline]
fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<O, Input::Error> {
self(input).into()
}
}
#[derive(Copy)]
pub struct EnvParser<E, Input, T>
where
Input: Stream,
{
env: E,
parser: fn(E, &mut Input) -> StdParseResult<T, Input>,
}
impl<E, Input, T> Clone for EnvParser<E, Input, T>
where
Input: Stream,
E: Clone,
{
fn clone(&self) -> Self {
EnvParser {
env: self.env.clone(),
parser: self.parser,
}
}
}
impl<Input, E, O> Parser<Input> for EnvParser<E, Input, O>
where
E: Clone,
Input: Stream,
{
type Output = O;
type PartialState = ();
#[inline]
fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<O, Input::Error> {
(self.parser)(self.env.clone(), input).into()
}
}
/// Constructs a parser out of an environment and a function which needs the given environment to
/// do the parsing. This is commonly useful to allow multiple parsers to share some environment
/// while still allowing the parsers to be written in separate functions.
///
/// ```
/// # extern crate combine;
/// # use std::collections::HashMap;
/// # use combine::*;
/// # use combine::parser::function::env_parser;
/// # use combine::parser::char::letter;
/// # fn main() {
/// struct Interner(HashMap<String, u32>);
/// impl Interner {
/// fn string<Input>(&self, input: &mut Input) -> StdParseResult<u32, Input>
/// where Input: Stream<Token = char>,
/// {
/// many(letter())
/// .map(|s: String| self.0.get(&s).cloned().unwrap_or(0))
/// .parse_stream(input)
/// .into_result()
/// }
/// }
///
/// let mut map = HashMap::new();
/// map.insert("hello".into(), 1);
/// map.insert("test".into(), 2);
///
/// let env = Interner(map);
/// let mut parser = env_parser(&env, Interner::string);
///
/// let result = parser.parse("hello");
/// assert_eq!(result, Ok((1, "")));
///
/// let result = parser.parse("world");
/// assert_eq!(result, Ok((0, "")));
/// # }
/// ```
pub fn env_parser<E, Input, O>(
env: E,
parser: fn(E, &mut Input) -> StdParseResult<O, Input>,
) -> EnvParser<E, Input, O>
where
E: Clone,
Input: Stream,
{
EnvParser { env, parser }
}

1207
vendor/combine/src/parser/mod.rs vendored Normal file

File diff suppressed because it is too large Load Diff

767
vendor/combine/src/parser/range.rs vendored Normal file
View File

@@ -0,0 +1,767 @@
//! Module containing zero-copy parsers.
//!
//! These parsers require the [`RangeStream`][] bound instead of a plain [`Stream`][].
//!
//! [`RangeStream`]: ../../stream/trait.RangeStream.html
//! [`Stream`]: ../../stream/trait.Stream.html
use crate::{
error::{
self, ParseError,
ParseResult::{self, *},
ResultExt, StreamError, Tracked,
},
lib::{convert::TryFrom, marker::PhantomData},
parser::ParseMode,
};
#[cfg(feature = "std")]
use crate::lib::error::Error as StdError;
#[cfg(not(feature = "std"))]
use crate::lib::fmt;
use crate::stream::{
uncons_range, uncons_while, uncons_while1, wrap_stream_error, Range as StreamRange,
RangeStream, StreamErrorFor, StreamOnce,
};
use crate::Parser;
pub struct Range<Input>(Input::Range)
where
Input: RangeStream;
impl<Input> Parser<Input> for Range<Input>
where
Input: RangeStream,
Input::Range: PartialEq + crate::stream::Range,
{
type Output = Input::Range;
type PartialState = ();
#[inline]
fn parse_lazy(
&mut self,
input: &mut Input,
) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
use crate::stream::Range;
let position = input.position();
match input.uncons_range(self.0.len()) {
Ok(other) => {
if other == self.0 {
CommitOk(other)
} else {
PeekErr(Input::Error::empty(position).into())
}
}
Err(err) => wrap_stream_error(input, err),
}
}
fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
// TODO Add unexpected message?
errors.error.add_expected(error::Range(self.0.clone()));
}
}
parser! {
#[derive(Clone)]
pub struct Recognize;
type PartialState = <RecognizeWithValue<P> as Parser<Input>>::PartialState;
/// Zero-copy parser which returns committed input range.
///
/// [`combinator::recognize`][] is a non-`RangeStream` alternative.
///
/// [`combinator::recognize`]: ../../parser/combinator/fn.recognize.html
/// ```
/// # extern crate combine;
/// # use combine::parser::range::recognize;
/// # use combine::parser::char::letter;
/// # use combine::*;
/// # fn main() {
/// let mut parser = recognize(skip_many1(letter()));
/// assert_eq!(parser.parse("hello world"), Ok(("hello", " world")));
/// assert!(parser.parse("!").is_err());
/// # }
/// ```
pub fn recognize[Input, P](parser: P)(Input) -> <Input as StreamOnce>::Range
where [
P: Parser<Input>,
Input: RangeStream,
<Input as StreamOnce>::Range: crate::stream::Range,
]
{
recognize_with_value(parser).map(|(range, _)| range)
}
}
#[inline]
fn parse_partial_range<M, F, G, S, Input>(
mode: M,
input: &mut Input,
distance_state: &mut usize,
state: S,
first: F,
resume: G,
) -> ParseResult<Input::Range, Input::Error>
where
M: ParseMode,
F: FnOnce(&mut Input, S) -> ParseResult<Input::Range, <Input as StreamOnce>::Error>,
G: FnOnce(&mut Input, S) -> ParseResult<Input::Range, <Input as StreamOnce>::Error>,
Input: RangeStream,
{
let before = input.checkpoint();
if !input.is_partial() {
first(input, state)
} else if mode.is_first() || *distance_state == 0 {
let result = first(input, state);
if let CommitErr(_) = result {
*distance_state = input.distance(&before);
ctry!(input.reset(before).committed());
}
result
} else {
if input.uncons_range(*distance_state).is_err() {
panic!("recognize errored when restoring the input stream to its expected state");
}
match resume(input, state) {
CommitOk(_) | PeekOk(_) => (),
PeekErr(err) => return PeekErr(err),
CommitErr(err) => {
*distance_state = input.distance(&before);
ctry!(input.reset(before).committed());
return CommitErr(err);
}
}
let distance = input.distance(&before);
ctry!(input.reset(before).committed());
take(distance).parse_lazy(input).map(|range| {
*distance_state = 0;
range
})
}
}
#[derive(Clone)]
pub struct RecognizeWithValue<P>(P);
impl<Input, P> Parser<Input> for RecognizeWithValue<P>
where
P: Parser<Input>,
Input: RangeStream,
<Input as StreamOnce>::Range: crate::stream::Range,
{
type Output = (<Input as StreamOnce>::Range, P::Output);
type PartialState = (usize, P::PartialState);
parse_mode!(Input);
#[inline]
fn parse_mode<M>(
&mut self,
mode: M,
input: &mut Input,
state: &mut Self::PartialState,
) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
where
M: ParseMode,
{
let (ref mut distance_state, ref mut child_state) = *state;
let before = input.checkpoint();
if !mode.is_first() && input.uncons_range(*distance_state).is_err() {
panic!("recognize errored when restoring the input stream to its expected state");
}
let value = match self.0.parse_mode(mode, input, child_state) {
CommitOk(x) | PeekOk(x) => x,
PeekErr(err) => return PeekErr(err),
CommitErr(err) => {
*distance_state = input.distance(&before);
ctry!(input.reset(before).committed());
return CommitErr(err);
}
};
let distance = input.distance(&before);
ctry!(input.reset(before).committed());
take(distance).parse_lazy(input).map(|range| {
*distance_state = 0;
(range, value)
})
}
fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
self.0.add_error(errors)
}
}
/// Zero-copy parser which returns a pair: (committed input range, parsed value).
///
///
/// [`combinator::recognize_with_value`] is a non-`RangeStream` alternative.
///
/// [`combinator::recognize_with_value`]: recognize_with_value
/// ```
/// # extern crate combine;
/// # use combine::parser::range::recognize_with_value;
/// # use combine::parser::char::{digit, char};
/// # use combine::*;
/// # fn main() {
/// let mut parser = recognize_with_value((
/// skip_many1(digit()),
/// optional((attempt(char('.')), skip_many1(digit()))),
/// ).map(|(_, opt)| opt.is_some()));
///
/// assert_eq!(parser.parse("1234!"), Ok((("1234", false), "!")));
/// assert_eq!(parser.parse("1234.0001!"), Ok((("1234.0001", true), "!")));
/// assert!(parser.parse("!").is_err());
/// assert!(parser.parse("1234.").is_err());
/// # }
/// ```
pub fn recognize_with_value<Input, P>(parser: P) -> RecognizeWithValue<P>
where
P: Parser<Input>,
Input: RangeStream,
<Input as StreamOnce>::Range: crate::stream::Range,
{
RecognizeWithValue(parser)
}
/// Zero-copy parser which reads a range of length `i.len()` and succeeds if `i` is equal to that
/// range.
///
/// [`tokens`] is a non-`RangeStream` alternative.
///
/// [`tokens`]: super::token::tokens
/// ```
/// # extern crate combine;
/// # use combine::parser::range::range;
/// # use combine::*;
/// # fn main() {
/// let mut parser = range("hello");
/// let result = parser.parse("hello world");
/// assert_eq!(result, Ok(("hello", " world")));
/// let result = parser.parse("hel world");
/// assert!(result.is_err());
/// # }
/// ```
pub fn range<Input>(i: Input::Range) -> Range<Input>
where
Input: RangeStream,
Input::Range: PartialEq,
{
Range(i)
}
pub struct Take<Input>(usize, PhantomData<fn(Input)>);
impl<Input> Parser<Input> for Take<Input>
where
Input: RangeStream,
{
type Output = Input::Range;
type PartialState = ();
#[inline]
fn parse_lazy(
&mut self,
input: &mut Input,
) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
uncons_range(input, self.0)
}
}
/// Zero-copy parser which reads a range of length `n`.
///
/// [`count_min_max`][] is a non-`RangeStream` alternative.
///
/// [`count_min_max`]: ../../parser/repeat/fn.count_min_max.html
/// ```
/// # extern crate combine;
/// # use combine::parser::range::take;
/// # use combine::*;
/// # fn main() {
/// let mut parser = take(1);
/// let result = parser.parse("1");
/// assert_eq!(result, Ok(("1", "")));
/// let mut parser = take(4);
/// let result = parser.parse("123abc");
/// assert_eq!(result, Ok(("123a", "bc")));
/// let result = parser.parse("abc");
/// assert!(result.is_err());
/// # }
/// ```
pub fn take<Input>(n: usize) -> Take<Input>
where
Input: RangeStream,
{
Take(n, PhantomData)
}
pub struct TakeWhile<Input, F>(F, PhantomData<fn(Input) -> Input>);
impl<Input, F> Parser<Input> for TakeWhile<Input, F>
where
Input: RangeStream,
Input::Range: crate::stream::Range,
F: FnMut(Input::Token) -> bool,
{
type Output = Input::Range;
type PartialState = usize;
parse_mode!(Input);
#[inline]
fn parse_mode_impl<M>(
&mut self,
mode: M,
input: &mut Input,
state: &mut Self::PartialState,
) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
where
M: ParseMode,
{
parse_partial_range(
mode,
input,
state,
&mut self.0,
|input, predicate| uncons_while(input, predicate),
|input, predicate| uncons_while(input, predicate),
)
}
}
/// Zero-copy parser which reads a range of 0 or more tokens which satisfy `f`.
///
/// [`many`][] is a non-`RangeStream` alternative.
///
/// [`many`]: ../../parser/repeat/fn.many.html
/// ```
/// # extern crate combine;
/// # use combine::parser::range::take_while;
/// # use combine::*;
/// # fn main() {
/// let mut parser = take_while(|c: char| c.is_digit(10));
/// let result = parser.parse("123abc");
/// assert_eq!(result, Ok(("123", "abc")));
/// let result = parser.parse("abc");
/// assert_eq!(result, Ok(("", "abc")));
/// # }
/// ```
pub fn take_while<Input, F>(f: F) -> TakeWhile<Input, F>
where
Input: RangeStream,
Input::Range: crate::stream::Range,
F: FnMut(Input::Token) -> bool,
{
TakeWhile(f, PhantomData)
}
pub struct TakeWhile1<Input, F>(F, PhantomData<fn(Input) -> Input>);
impl<Input, F> Parser<Input> for TakeWhile1<Input, F>
where
Input: RangeStream,
Input::Range: crate::stream::Range,
F: FnMut(Input::Token) -> bool,
{
type Output = Input::Range;
type PartialState = usize;
parse_mode!(Input);
#[inline]
fn parse_mode_impl<M>(
&mut self,
mode: M,
input: &mut Input,
state: &mut Self::PartialState,
) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
where
M: ParseMode,
{
parse_partial_range(
mode,
input,
state,
&mut self.0,
|input, predicate| uncons_while1(input, predicate),
|input, predicate| uncons_while(input, predicate),
)
}
}
/// Zero-copy parser which reads a range of 1 or more tokens which satisfy `f`.
///
/// [`many1`][] is a non-`RangeStream` alternative.
///
/// [`many1`]: ../../parser/repeat/fn.many1.html
/// ```
/// # extern crate combine;
/// # use combine::parser::range::take_while1;
/// # use combine::*;
/// # fn main() {
/// let mut parser = take_while1(|c: char| c.is_digit(10));
/// let result = parser.parse("123abc");
/// assert_eq!(result, Ok(("123", "abc")));
/// let result = parser.parse("abc");
/// assert!(result.is_err());
/// # }
/// ```
pub fn take_while1<Input, F>(f: F) -> TakeWhile1<Input, F>
where
Input: RangeStream,
Input::Range: crate::stream::Range,
F: FnMut(Input::Token) -> bool,
{
TakeWhile1(f, PhantomData)
}
pub struct TakeUntilRange<Input>(Input::Range)
where
Input: RangeStream;
impl<Input> Parser<Input> for TakeUntilRange<Input>
where
Input: RangeStream,
Input::Range: PartialEq + crate::stream::Range,
{
type Output = Input::Range;
type PartialState = usize;
#[inline]
fn parse_partial(
&mut self,
input: &mut Input,
to_consume: &mut Self::PartialState,
) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
use crate::stream::Range;
let len = self.0.len();
let before = input.checkpoint();
let mut first_stream_error = None;
// Skip until the end of the last parse attempt
ctry!(uncons_range(input, *to_consume));
loop {
let look_ahead_input = input.checkpoint();
match input.uncons_range(len) {
Ok(xs) => {
if xs == self.0 {
let distance = input.distance(&before) - len;
ctry!(input.reset(before).committed());
if let Ok(committed) = input.uncons_range(distance) {
if distance == 0 {
return PeekOk(committed);
} else {
*to_consume = 0;
return CommitOk(committed);
}
}
// We are guaranteed able to uncons to_consume characters here
// because we've already done it on look_ahead_input.
unreachable!();
} else {
// Reset the stream back to where it was when we entered the top of the loop
ctry!(input.reset(look_ahead_input).committed());
// Advance the stream by one token
if input.uncons().is_err() {
unreachable!();
}
}
}
Err(first_error) => {
// If we are unable to find a successful parse even after advancing with `uncons`
// below we must reset the stream to its state before the first error.
// If we don't we may try and match the range `::` against `:<EOF>` which would
// fail as only one `:` is present at this parse attempt. But when we later resume
// with more input we must start parsing again at the first time we errored so we
// can see the entire `::`
if first_stream_error.is_none() {
first_stream_error = Some((first_error, input.distance(&before)));
}
// Reset the stream back to where it was when we entered the top of the loop
ctry!(input.reset(look_ahead_input).committed());
// See if we can advance anyway
if input.uncons().is_err() {
let (first_error, first_error_distance) = first_stream_error.unwrap();
// Reset the stream
ctry!(input.reset(before).committed());
*to_consume = first_error_distance;
// Return the original error if uncons failed
return wrap_stream_error(input, first_error);
}
}
};
}
}
}
/// Zero-copy parser which reads a range of 0 or more tokens until `r` is found.
///
/// The range `r` will not be committed. If `r` is not found, the parser will
/// return an error.
///
/// [`repeat::take_until`][] is a non-`RangeStream` alternative.
///
/// [`repeat::take_until`]: ../../parser/repeat/fn.take_until.html
/// ```
/// # extern crate combine;
/// # use combine::parser::range::{range, take_until_range};
/// # use combine::*;
/// # fn main() {
/// let mut parser = take_until_range("\r\n");
/// let result = parser.parse("To: user@example.com\r\n");
/// assert_eq!(result, Ok(("To: user@example.com", "\r\n")));
/// let result = parser.parse("Hello, world\n");
/// assert!(result.is_err());
/// # }
/// ```
pub fn take_until_range<Input>(r: Input::Range) -> TakeUntilRange<Input>
where
Input: RangeStream,
{
TakeUntilRange(r)
}
#[derive(Debug, PartialEq)]
pub enum TakeRange {
/// Found the pattern at this offset
Found(usize),
/// Did not find the pattern but the parser can skip ahead to this offset.
NotFound(usize),
}
impl From<Option<usize>> for TakeRange {
fn from(opt: Option<usize>) -> TakeRange {
match opt {
Some(i) => TakeRange::Found(i),
None => TakeRange::NotFound(0),
}
}
}
pub struct TakeFn<F, Input> {
searcher: F,
_marker: PhantomData<fn(Input)>,
}
impl<Input, F, R> Parser<Input> for TakeFn<F, Input>
where
F: FnMut(Input::Range) -> R,
R: Into<TakeRange>,
Input: RangeStream,
Input::Range: crate::stream::Range,
{
type Output = Input::Range;
type PartialState = usize;
parse_mode!(Input);
#[inline]
fn parse_mode<M>(
&mut self,
mode: M,
input: &mut Input,
offset: &mut Self::PartialState,
) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
where
M: ParseMode,
{
let checkpoint = input.checkpoint();
if mode.is_first() {
*offset = 0;
} else {
let _ = input.uncons_range(*offset);
}
match (self.searcher)(input.range()).into() {
TakeRange::Found(i) => {
ctry!(input.reset(checkpoint).committed());
let result = uncons_range(input, *offset + i);
if result.is_ok() {
*offset = 0;
}
result
}
TakeRange::NotFound(next_offset) => {
*offset = next_offset;
let range = input.range();
let _ = input.uncons_range(range.len());
let position = input.position();
ctry!(input.reset(checkpoint).committed());
let err = Input::Error::from_error(position, StreamError::end_of_input());
if !input.is_partial() && range.is_empty() {
PeekErr(err.into())
} else {
CommitErr(err)
}
}
}
}
}
/// Searches the entire range using `searcher` and then consumes a range of `Some(n)`.
/// If `f` can not find anything in the range it must return `None/NotFound` which indicates an end of input error.
///
/// If partial parsing is used the `TakeRange` enum can be returned instead of `Option`. By
/// returning `TakeRange::NotFound(n)` it indicates that the input can skip ahead until `n`
/// when parsing is next resumed.
///
/// See [`take_until_bytes`](../byte/fn.take_until_bytes.html) for a usecase.
pub fn take_fn<F, R, Input>(searcher: F) -> TakeFn<F, Input>
where
F: FnMut(Input::Range) -> R,
R: Into<TakeRange>,
Input: RangeStream,
Input::Range: crate::stream::Range,
{
TakeFn {
searcher,
_marker: PhantomData,
}
}
#[cfg(feature = "std")]
parser! {
/// Takes a parser which parses a `length` then extracts a range of that length and returns it.
/// Commonly used in binary formats
///
/// ```
/// # use combine::parser::{byte::num::be_u16, range::length_prefix};
/// # use combine::*;
/// # fn main() {
/// let mut input = Vec::new();
/// input.extend_from_slice(&3u16.to_be_bytes());
/// input.extend_from_slice(b"1234");
///
/// let mut parser = length_prefix(be_u16());
/// let result = parser.parse(&input[..]);
/// assert_eq!(result, Ok((&b"123"[..], &b"4"[..])));
/// # }
/// ```
pub fn length_prefix[Input, P](len: P)(Input) -> Input::Range
where [
Input: RangeStream,
P: Parser<Input>,
usize: TryFrom<P::Output>,
<usize as TryFrom<P::Output>>::Error: StdError + Send + Sync + 'static,
]
{
len
.and_then(|u| {
usize::try_from(u)
.map_err(StreamErrorFor::<Input>::other)
})
.then_partial(|&mut len| take(len))
}
}
#[cfg(not(feature = "std"))]
parser! {
/// Takes a parser which parses a `length` then extracts a range of that length and returns it.
/// Commonly used in binary formats
///
/// ```
/// # use combine::parser::{byte::num::be_u16, range::length_prefix};
/// # use combine::*;
/// # fn main() {
/// let mut input = Vec::new();
/// input.extend_from_slice(&3u16.to_be_bytes());
/// input.extend_from_slice(b"1234");
///
/// let mut parser = length_prefix(be_u16());
/// let result = parser.parse(&input[..]);
/// assert_eq!(result, Ok((&b"123"[..], &b"4"[..])));
/// # }
/// ```
pub fn length_prefix[Input, P](len: P)(Input) -> Input::Range
where [
Input: RangeStream,
P: Parser<Input>,
usize: TryFrom<P::Output>,
<usize as TryFrom<P::Output>>::Error: fmt::Display + Send + Sync + 'static,
]
{
len
.and_then(|u| {
usize::try_from(u)
.map_err(StreamErrorFor::<Input>::message_format)
})
.then_partial(|&mut len| take(len))
}
}
#[cfg(test)]
mod tests {
use crate::Parser;
use super::*;
#[test]
fn take_while_test() {
let result = take_while(|c: char| c.is_digit(10)).parse("123abc");
assert_eq!(result, Ok(("123", "abc")));
let result = take_while(|c: char| c.is_digit(10)).parse("abc");
assert_eq!(result, Ok(("", "abc")));
}
#[test]
fn take_while1_test() {
let result = take_while1(|c: char| c.is_digit(10)).parse("123abc");
assert_eq!(result, Ok(("123", "abc")));
let result = take_while1(|c: char| c.is_digit(10)).parse("abc");
assert!(result.is_err());
}
#[test]
fn range_string_no_char_boundary_error() {
let mut parser = range("hello");
let result = parser.parse("hell\u{00EE} world");
assert!(result.is_err());
}
#[test]
fn take_until_range_1() {
let result = take_until_range("\"").parse("Foo baz bar quux\"");
assert_eq!(result, Ok(("Foo baz bar quux", "\"")));
}
#[test]
fn take_until_range_2() {
let result = take_until_range("===").parse("if ((pointless_comparison == 3) === true) {");
assert_eq!(
result,
Ok(("if ((pointless_comparison == 3) ", "=== true) {"))
);
}
#[test]
fn take_until_range_unicode_1() {
let result = take_until_range("🦀")
.parse("😃 Ferris the friendly rustacean 🦀 and his snake friend 🐍");
assert_eq!(
result,
Ok((
"😃 Ferris the friendly rustacean ",
"🦀 and his snake friend 🐍"
))
);
}
#[test]
fn take_until_range_unicode_2() {
let result = take_until_range("⁘⁙/⁘").parse("⚙️🛠️🦀=🏎️⁘⁙⁘⁘⁙/⁘⁘⁙/⁘");
assert_eq!(result, Ok(("⚙️🛠️🦀=🏎️⁘⁙⁘", "⁘⁙/⁘⁘⁙/⁘")));
}
}

549
vendor/combine/src/parser/regex.rs vendored Normal file
View File

@@ -0,0 +1,549 @@
//! Module containing regex parsers on streams returning ranges of `&str` or `&[u8]`.
//!
//! All regex parsers are overloaded on `&str` and `&[u8]` ranges and can take a `Regex` by value
//! or shared reference (`&`).
//!
//! Enabled using the `regex` feature (for `regex-0.2`) or the `regex-1` feature for `regex-1.0`.
//!
//! ```
//! use once_cell::sync::Lazy;
//! use regex::{bytes, Regex};
//! use combine::Parser;
//! use combine::parser::regex::{find_many, match_};
//!
//! fn main() {
//! let regex = bytes::Regex::new("[0-9]+").unwrap();
//! // Shared references to any regex works as well
//! assert_eq!(
//! find_many(&regex).parse(&b"123 456 "[..]),
//! Ok((vec![&b"123"[..], &b"456"[..]], &b" "[..]))
//! );
//! assert_eq!(
//! find_many(regex).parse(&b""[..]),
//! Ok((vec![], &b""[..]))
//! );
//!
//! static REGEX: Lazy<Regex> = Lazy::new(|| Regex::new("[:alpha:]+").unwrap());
//! assert_eq!(
//! match_(&*REGEX).parse("abc123"),
//! Ok(("abc123", "abc123"))
//! );
//! }
//! ```
use std::{iter::FromIterator, marker::PhantomData};
use crate::{
error::{
ParseError,
ParseResult::{self, *},
StreamError, Tracked,
},
parser::range::take,
stream::{RangeStream, StreamOnce},
Parser,
};
struct First<T>(Option<T>);
impl<A> FromIterator<A> for First<A> {
fn from_iter<T>(iter: T) -> Self
where
T: IntoIterator<Item = A>,
{
First(iter.into_iter().next())
}
}
pub trait MatchFind {
type Range;
fn end(&self) -> usize;
fn as_match(&self) -> Self::Range;
}
pub trait Regex<Range> {
fn is_match(&self, range: Range) -> bool;
fn find_iter<F>(&self, range: Range) -> (usize, F)
where
F: FromIterator<Range>;
fn captures<F, G>(&self, range: Range) -> (usize, G)
where
F: FromIterator<Range>,
G: FromIterator<F>;
fn as_str(&self) -> &str;
}
impl<'a, R, Range> Regex<Range> for &'a R
where
R: Regex<Range>,
{
fn is_match(&self, range: Range) -> bool {
(**self).is_match(range)
}
fn find_iter<F>(&self, range: Range) -> (usize, F)
where
F: FromIterator<Range>,
{
(**self).find_iter(range)
}
fn captures<F, G>(&self, range: Range) -> (usize, G)
where
F: FromIterator<Range>,
G: FromIterator<F>,
{
(**self).captures(range)
}
fn as_str(&self) -> &str {
(**self).as_str()
}
}
fn find_iter<'a, Input, F>(iterable: Input) -> (usize, F)
where
Input: IntoIterator,
Input::Item: MatchFind,
F: FromIterator<<Input::Item as MatchFind>::Range>,
{
let mut end = 0;
let value = iterable
.into_iter()
.map(|m| {
end = m.end();
m.as_match()
})
.collect();
(end, value)
}
#[cfg(feature = "regex")]
mod regex {
pub extern crate regex;
use std::iter::FromIterator;
use super::{find_iter, MatchFind, Regex};
pub use self::regex::*;
impl<'t> MatchFind for regex::Match<'t> {
type Range = &'t str;
fn end(&self) -> usize {
regex::Match::end(self)
}
fn as_match(&self) -> Self::Range {
self.as_str()
}
}
impl<'t> MatchFind for regex::bytes::Match<'t> {
type Range = &'t [u8];
fn end(&self) -> usize {
regex::bytes::Match::end(self)
}
fn as_match(&self) -> Self::Range {
self.as_bytes()
}
}
impl<'a> Regex<&'a str> for regex::Regex {
fn is_match(&self, range: &'a str) -> bool {
regex::Regex::is_match(self, range)
}
fn find_iter<F>(&self, range: &'a str) -> (usize, F)
where
F: FromIterator<&'a str>,
{
find_iter(regex::Regex::find_iter(self, range))
}
fn captures<F, G>(&self, range: &'a str) -> (usize, G)
where
F: FromIterator<&'a str>,
G: FromIterator<F>,
{
let mut end = 0;
let value = regex::Regex::captures_iter(self, range)
.map(|captures| {
let mut captures_iter = captures.iter();
// The first group is the match on the entire regex
let first_match = captures_iter.next().unwrap().unwrap();
end = first_match.end();
Some(Some(first_match))
.into_iter()
.chain(captures_iter)
.filter_map(|match_| match_.map(|m| m.as_match()))
.collect()
})
.collect();
(end, value)
}
fn as_str(&self) -> &str {
regex::Regex::as_str(self)
}
}
impl<'a> Regex<&'a [u8]> for regex::bytes::Regex {
fn is_match(&self, range: &'a [u8]) -> bool {
regex::bytes::Regex::is_match(self, range)
}
fn find_iter<F>(&self, range: &'a [u8]) -> (usize, F)
where
F: FromIterator<&'a [u8]>,
{
find_iter(regex::bytes::Regex::find_iter(self, range))
}
fn captures<F, G>(&self, range: &'a [u8]) -> (usize, G)
where
F: FromIterator<&'a [u8]>,
G: FromIterator<F>,
{
let mut end = 0;
let value = regex::bytes::Regex::captures_iter(self, range)
.map(|captures| {
let mut captures_iter = captures.iter();
// The first group is the match on the entire regex
let first_match = captures_iter.next().unwrap().unwrap();
end = first_match.end();
Some(Some(first_match))
.into_iter()
.chain(captures_iter)
.filter_map(|match_| match_.map(|m| m.as_match()))
.collect()
})
.collect();
(end, value)
}
fn as_str(&self) -> &str {
regex::bytes::Regex::as_str(self)
}
}
}
pub struct Match<R, Input>(R, PhantomData<Input>);
impl<'a, Input, R> Parser<Input> for Match<R, Input>
where
R: Regex<Input::Range>,
Input: RangeStream,
{
type Output = Input::Range;
type PartialState = ();
#[inline]
fn parse_lazy(
&mut self,
input: &mut Input,
) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
if self.0.is_match(input.range()) {
PeekOk(input.range())
} else {
PeekErr(Input::Error::empty(input.position()).into())
}
}
fn add_error(&mut self, error: &mut Tracked<<Input as StreamOnce>::Error>) {
error.error.add(StreamError::expected_format(format_args!(
"/{}/",
self.0.as_str()
)))
}
}
/// Matches `regex` on the input returning the entire input if it matches.
/// Never consumes any input.
///
/// ```
/// extern crate regex;
/// extern crate combine;
/// use regex::Regex;
/// use combine::Parser;
/// use combine::parser::regex::match_;
///
/// fn main() {
/// let regex = Regex::new("[:alpha:]+").unwrap();
/// assert_eq!(
/// match_(&regex).parse("abc123"),
/// Ok(("abc123", "abc123"))
/// );
/// }
/// ```
pub fn match_<R, Input>(regex: R) -> Match<R, Input>
where
R: Regex<Input::Range>,
Input: RangeStream,
{
Match(regex, PhantomData)
}
#[derive(Clone)]
pub struct Find<R, Input>(R, PhantomData<fn() -> Input>);
impl<'a, Input, R> Parser<Input> for Find<R, Input>
where
R: Regex<Input::Range>,
Input: RangeStream,
Input::Range: crate::stream::Range,
{
type Output = Input::Range;
type PartialState = ();
#[inline]
fn parse_lazy(
&mut self,
input: &mut Input,
) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
let (end, First(value)) = self.0.find_iter(input.range());
match value {
Some(value) => take(end).parse_lazy(input).map(|_| value),
None => PeekErr(Input::Error::empty(input.position()).into()),
}
}
fn add_error(&mut self, error: &mut Tracked<<Input as StreamOnce>::Error>) {
error.error.add(StreamError::expected_format(format_args!(
"/{}/",
self.0.as_str()
)))
}
}
/// Matches `regex` on the input by running `find` on the input and returns the first match.
/// Consumes all input up until the end of the first match.
///
/// ```
/// extern crate regex;
/// extern crate combine;
/// use regex::Regex;
/// use combine::Parser;
/// use combine::parser::regex::find;
///
/// fn main() {
/// let mut digits = find(Regex::new("^[0-9]+").unwrap());
/// assert_eq!(digits.parse("123 456 "), Ok(("123", " 456 ")));
/// assert!(
/// digits.parse("abc 123 456 ").is_err());
///
/// let mut digits2 = find(Regex::new("[0-9]+").unwrap());
/// assert_eq!(digits2.parse("123 456 "), Ok(("123", " 456 ")));
/// assert_eq!(digits2.parse("abc 123 456 "), Ok(("123", " 456 ")));
/// }
/// ```
pub fn find<R, Input>(regex: R) -> Find<R, Input>
where
R: Regex<Input::Range>,
Input: RangeStream,
Input::Range: crate::stream::Range,
{
Find(regex, PhantomData)
}
#[derive(Clone)]
pub struct FindMany<F, R, Input>(R, PhantomData<fn() -> (Input, F)>);
impl<'a, Input, F, R> Parser<Input> for FindMany<F, R, Input>
where
F: FromIterator<Input::Range>,
R: Regex<Input::Range>,
Input: RangeStream,
Input::Range: crate::stream::Range,
{
type Output = F;
type PartialState = ();
#[inline]
fn parse_lazy(
&mut self,
input: &mut Input,
) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
let (end, value) = self.0.find_iter(input.range());
take(end).parse_lazy(input).map(|_| value)
}
fn add_error(&mut self, error: &mut Tracked<<Input as StreamOnce>::Error>) {
error.error.add(StreamError::expected_format(format_args!(
"/{}/",
self.0.as_str()
)))
}
}
/// Matches `regex` on the input by running `find_iter` on the input.
/// Returns all matches in a `F: FromIterator<Input::Range>`.
/// Consumes all input up until the end of the last match.
///
/// ```
/// extern crate regex;
/// extern crate combine;
/// use regex::Regex;
/// use regex::bytes;
/// use combine::Parser;
/// use combine::parser::regex::find_many;
///
/// fn main() {
/// let mut digits = find_many(Regex::new("[0-9]+").unwrap());
/// assert_eq!(digits.parse("123 456 "), Ok((vec!["123", "456"], " ")));
/// assert_eq!(digits.parse("abc 123 456 "), Ok((vec!["123", "456"], " ")));
/// assert_eq!(digits.parse("abc"), Ok((vec![], "abc")));
/// }
/// ```
pub fn find_many<F, R, Input>(regex: R) -> FindMany<F, R, Input>
where
F: FromIterator<Input::Range>,
R: Regex<Input::Range>,
Input: RangeStream,
Input::Range: crate::stream::Range,
{
FindMany(regex, PhantomData)
}
#[derive(Clone)]
pub struct Captures<F, R, Input>(R, PhantomData<fn() -> (Input, F)>);
impl<'a, Input, F, R> Parser<Input> for Captures<F, R, Input>
where
F: FromIterator<Input::Range>,
R: Regex<Input::Range>,
Input: RangeStream,
Input::Range: crate::stream::Range,
{
type Output = F;
type PartialState = ();
#[inline]
fn parse_lazy(
&mut self,
input: &mut Input,
) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
let (end, First(value)) = self.0.captures(input.range());
match value {
Some(value) => take(end).parse_lazy(input).map(|_| value),
None => PeekErr(Input::Error::empty(input.position()).into()),
}
}
fn add_error(&mut self, error: &mut Tracked<<Input as StreamOnce>::Error>) {
error.error.add(StreamError::expected_format(format_args!(
"/{}/",
self.0.as_str()
)))
}
}
/// Matches `regex` on the input by running `captures_iter` on the input.
/// Returns the captures of the first match and consumes the input up until the end of that match.
///
/// ```
/// extern crate regex;
/// extern crate combine;
/// use regex::Regex;
/// use combine::Parser;
/// use combine::parser::regex::captures;
///
/// fn main() {
/// let mut fields = captures(Regex::new("([a-z]+):([0-9]+)").unwrap());
/// assert_eq!(
/// fields.parse("test:123 field:456 "),
/// Ok((vec!["test:123", "test", "123"],
/// " field:456 "
/// ))
/// );
/// assert_eq!(
/// fields.parse("test:123 :456 "),
/// Ok((vec!["test:123", "test", "123"],
/// " :456 "
/// ))
/// );
/// }
/// ```
pub fn captures<F, R, Input>(regex: R) -> Captures<F, R, Input>
where
F: FromIterator<Input::Range>,
R: Regex<Input::Range>,
Input: RangeStream,
Input::Range: crate::stream::Range,
{
Captures(regex, PhantomData)
}
#[derive(Clone)]
pub struct CapturesMany<F, G, R, Input>(R, PhantomData<fn() -> (Input, F, G)>);
impl<'a, Input, F, G, R> Parser<Input> for CapturesMany<F, G, R, Input>
where
F: FromIterator<Input::Range>,
G: FromIterator<F>,
R: Regex<Input::Range>,
Input: RangeStream,
Input::Range: crate::stream::Range,
{
type Output = G;
type PartialState = ();
#[inline]
fn parse_lazy(
&mut self,
input: &mut Input,
) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
let (end, value) = self.0.captures(input.range());
take(end).parse_lazy(input).map(|_| value)
}
fn add_error(&mut self, error: &mut Tracked<<Input as StreamOnce>::Error>) {
error.error.add(StreamError::expected_format(format_args!(
"/{}/",
self.0.as_str()
)))
}
}
/// Matches `regex` on the input by running `captures_iter` on the input.
/// Returns all captures which is part of the match in a `F: FromIterator<Input::Range>`.
/// Consumes all input up until the end of the last match.
///
/// ```
/// extern crate regex;
/// extern crate combine;
/// use regex::Regex;
/// use combine::Parser;
/// use combine::parser::regex::captures_many;
///
/// fn main() {
/// let mut fields = captures_many(Regex::new("([a-z]+):([0-9]+)").unwrap());
/// assert_eq!(
/// fields.parse("test:123 field:456 "),
/// Ok((vec![vec!["test:123", "test", "123"],
/// vec!["field:456", "field", "456"]],
/// " "
/// ))
/// );
/// assert_eq!(
/// fields.parse("test:123 :456 "),
/// Ok((vec![vec!["test:123", "test", "123"]],
/// " :456 "
/// ))
/// );
/// }
/// ```
pub fn captures_many<F, G, R, Input>(regex: R) -> CapturesMany<F, G, R, Input>
where
F: FromIterator<Input::Range>,
G: FromIterator<F>,
R: Regex<Input::Range>,
Input: RangeStream,
Input::Range: crate::stream::Range,
{
CapturesMany(regex, PhantomData)
}
#[cfg(test)]
mod tests {
use regex::Regex;
use crate::{parser::regex::find, Parser};
#[test]
fn test() {
let mut digits = find(Regex::new("^[0-9]+").unwrap());
assert_eq!(digits.parse("123 456 "), Ok(("123", " 456 ")));
assert!(digits.parse("abc 123 456 ").is_err());
let mut digits2 = find(Regex::new("[0-9]+").unwrap());
assert_eq!(digits2.parse("123 456 "), Ok(("123", " 456 ")));
assert_eq!(digits2.parse("abc 123 456 "), Ok(("123", " 456 ")));
}
}

1620
vendor/combine/src/parser/repeat.rs vendored Normal file

File diff suppressed because it is too large Load Diff

907
vendor/combine/src/parser/sequence.rs vendored Normal file
View File

@@ -0,0 +1,907 @@
//! Combinators which take multiple parsers and applies them one after another.
use crate::{
error::{
ParseError,
ParseResult::{self, *},
StreamError, Tracked,
},
lib::marker::PhantomData,
parser::{
combinator::{ignore, Ignore, Map},
ParseMode,
},
ErrorOffset, Parser, Stream, StreamOnce,
};
macro_rules! count {
() => { 0 };
($f: ident) => { 1 };
($f: ident, $($rest: ident),+) => { 1 + count!($($rest),*) };
}
#[doc(hidden)]
pub struct SequenceState<T, U> {
pub value: Option<T>,
pub state: U,
}
impl<T, U: Default> Default for SequenceState<T, U> {
fn default() -> Self {
SequenceState {
value: None,
state: U::default(),
}
}
}
impl<T, U> SequenceState<T, U>
where
U: Default,
{
unsafe fn unwrap_value(&mut self) -> T {
match self.value.take() {
Some(t) => t,
None => core::hint::unreachable_unchecked(),
}
}
}
macro_rules! last_ident {
($id: ident) => { $id };
($id: ident, $($rest: ident),+) => { last_ident!($($rest),+) };
}
fn add_sequence_error<Input>(
i: &mut usize,
first_empty_parser: usize,
inner_offset: ErrorOffset,
err: &mut Tracked<Input::Error>,
parser: &mut impl Parser<Input>,
) -> bool
where
Input: Stream,
{
if *i + 1 == first_empty_parser {
Parser::add_committed_expected_error(parser, err);
}
if *i >= first_empty_parser {
if err.offset <= ErrorOffset(1) {
// We reached the last parser we need to add errors to (and the
// parser that actually returned the error), use the returned
// offset for that parser.
err.offset = inner_offset;
}
Parser::add_error(parser, err);
if err.offset <= ErrorOffset(1) {
return false;
}
}
err.offset = ErrorOffset(err.offset.0.saturating_sub(Parser::parser_count(parser).0));
*i += 1;
true
}
macro_rules! tuple_parser {
($partial_state: ident; $h: ident $(, $id: ident)*) => {
#[allow(non_snake_case)]
#[derive(Default)]
pub struct $partial_state < $h $(, $id )* > {
pub $h: $h,
$(
pub $id: $id,
)*
#[allow(dead_code)]
offset: u8,
_marker: PhantomData <( $h, $( $id),* )>,
}
#[allow(non_snake_case)]
impl<$h $(, $id)*> $partial_state<$h $(, $id)*> {
#[allow(dead_code)]
fn add_errors<Input>(
input: &mut Input,
mut err: Tracked<Input::Error>,
first_empty_parser: usize,
offset: u8,
$h: &mut $h $(, $id : &mut $id )*
) -> ParseResult<($h::Output, $($id::Output),*), <Input as StreamOnce>::Error>
where Input: Stream,
$h: Parser<Input>,
$($id: Parser<Input>),*
{
let inner_offset = err.offset;
err.offset = ErrorOffset(offset);
if first_empty_parser != 0 {
if let Ok(t) = input.uncons() {
err.error.add(StreamError::unexpected_token(t));
}
#[allow(unused_assignments)]
let mut i = 0;
loop {
if !add_sequence_error(&mut i, first_empty_parser, inner_offset, &mut err, $h) {
break;
}
$(
if !add_sequence_error(&mut i, first_empty_parser, inner_offset, &mut err, $id) {
break;
}
)*
break;
}
CommitErr(err.error)
} else {
PeekErr(err)
}
}
}
#[allow(non_snake_case)]
impl <Input: Stream, $h:, $($id:),*> Parser<Input> for ($h, $($id),*)
where Input: Stream,
$h: Parser<Input>,
$($id: Parser<Input>),*
{
type Output = ($h::Output, $($id::Output),*);
type PartialState = $partial_state<
SequenceState<$h::Output, $h::PartialState>
$(, SequenceState<$id::Output, $id::PartialState>)*
>;
parse_mode!(Input);
#[inline]
fn parse_mode_impl<MODE>(
&mut self,
mut mode: MODE,
input: &mut Input,
state: &mut Self::PartialState,
) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
where
MODE: ParseMode,
{
let (ref mut $h, $(ref mut $id),*) = *self;
let mut first_empty_parser = 0;
#[allow(unused_mut)]
let mut current_parser = 0;
#[allow(unused_macros)]
macro_rules! add_errors {
($err: ident, $offset: expr) => {
$partial_state::add_errors(
input, $err, first_empty_parser, $offset, $h, $($id),*
)
}
}
if mode.is_first() || state.$h.value.is_none() {
let temp = match $h.parse_mode(mode, input, &mut state.$h.state) {
CommitOk(x) => {
first_empty_parser = current_parser + 1;
x
}
PeekErr(err) => return PeekErr(err),
CommitErr(err) => return CommitErr(err),
PeekOk(x) => {
x
}
};
state.offset = $h.parser_count().0.saturating_add(1);
// SAFETY: must be set to avoid UB below when unwrapping
state.$h.value = Some(temp);
// Once we have successfully parsed the partial input we may resume parsing in
// "first mode"
mode.set_first();
}
$(
if mode.is_first() || state.$id.value.is_none() {
current_parser += 1;
let before = input.checkpoint();
let temp = match $id.parse_mode(mode, input, &mut state.$id.state) {
CommitOk(x) => {
first_empty_parser = current_parser + 1;
x
}
PeekErr(err) => {
if let Err(err) = input.reset(before) {
return if first_empty_parser != 0 {
CommitErr(err.into())
} else {
PeekErr(err.into())
};
}
return add_errors!(err, state.offset)
}
CommitErr(err) => return CommitErr(err),
PeekOk(x) => {
x
}
};
state.offset = state.offset.saturating_add($id.parser_count().0);
// SAFETY: must be set to avoid UB below when unwrapping
state.$id.value = Some(temp);
// Once we have successfully parsed the partial input we may resume parsing in
// "first mode"
mode.set_first();
}
)*
// SAFETY: requires both $h and $id to be set, see previous SAFETY comments
let value = unsafe { (state.$h.unwrap_value(), $(state.$id.unwrap_value()),*) };
if first_empty_parser != 0 {
CommitOk(value)
} else {
PeekOk(value)
}
}
#[inline]
fn parser_count(&self) -> ErrorOffset {
let (ref $h, $(ref $id),*) = *self;
ErrorOffset($h.parser_count().0 $( + $id.parser_count().0)*)
}
#[inline]
fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
let (ref mut $h, $(ref mut $id),*) = *self;
let prev = errors.offset;
$h.add_error(errors);
if errors.offset <= ErrorOffset(1) {
errors.offset = ErrorOffset(
errors.offset.0.saturating_sub(1)
);
return;
}
if errors.offset == prev {
errors.offset = ErrorOffset(errors.offset.0.saturating_sub($h.parser_count().0));
}
#[allow(dead_code)]
const LAST: usize = count!($($id),*);
#[allow(unused_mut, unused_variables)]
let mut i = 0;
$(
i += 1;
let prev = errors.offset;
$id.add_error(errors);
if errors.offset <= ErrorOffset(1) {
errors.offset = ErrorOffset(
errors.offset.0.saturating_sub(1)
);
return;
}
if i != LAST && errors.offset == prev {
errors.offset = ErrorOffset(
errors.offset.0.saturating_sub($id.parser_count().0)
);
}
)*
}
fn add_committed_expected_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
#[allow(unused_variables)]
let (ref mut $h, $(ref mut $id),*) = *self;
last_ident!($h $(, $id)*).add_committed_expected_error(errors)
}
}
}
}
tuple_parser!(PartialState1; A);
tuple_parser!(PartialState2; A, B);
tuple_parser!(PartialState3; A, B, C);
tuple_parser!(PartialState4; A, B, C, D);
tuple_parser!(PartialState5; A, B, C, D, E);
tuple_parser!(PartialState6; A, B, C, D, E, F);
tuple_parser!(PartialState7; A, B, C, D, E, F, G);
tuple_parser!(PartialState8; A, B, C, D, E, F, G, H);
tuple_parser!(PartialState9; A, B, C, D, E, F, G, H, I);
tuple_parser!(PartialState10; A, B, C, D, E, F, G, H, I, J);
tuple_parser!(PartialState11; A, B, C, D, E, F, G, H, I, J, K);
tuple_parser!(PartialState12; A, B, C, D, E, F, G, H, I, J, K, L);
tuple_parser!(PartialState13; A, B, C, D, E, F, G, H, I, J, K, L, M);
tuple_parser!(PartialState14; A, B, C, D, E, F, G, H, I, J, K, L, M, N);
tuple_parser!(PartialState15; A, B, C, D, E, F, G, H, I, J, K, L, M, N, P);
tuple_parser!(PartialState16; A, B, C, D, E, F, G, H, I, J, K, L, M, N, P, Q);
tuple_parser!(PartialState17; A, B, C, D, E, F, G, H, I, J, K, L, M, N, P, Q, R);
tuple_parser!(PartialState18; A, B, C, D, E, F, G, H, I, J, K, L, M, N, P, Q, R, S);
tuple_parser!(PartialState19; A, B, C, D, E, F, G, H, I, J, K, L, M, N, P, Q, R, S, T);
tuple_parser!(PartialState20; A, B, C, D, E, F, G, H, I, J, K, L, M, N, P, Q, R, S, T, U);
#[macro_export]
#[doc(hidden)]
macro_rules! seq_parser_expr {
(; $($tt: tt)*) => {
( $($tt)* )
};
( (_ : $first_parser: expr, $($remaining: tt)+ ); $($tt: tt)*) => {
$crate::seq_parser_expr!( ( $($remaining)+ ) ; $($tt)* $first_parser, )
};
( ($first_field: ident : $first_parser: expr, $($remaining: tt)+ ); $($tt: tt)*) => {
$crate::seq_parser_expr!( ( $($remaining)+ ) ; $($tt)* $first_parser, )
};
( (_ : $first_parser: expr ); $($tt: tt)*) => {
( $($tt)* $first_parser, )
};
( ($first_field: ident : $first_parser: expr, ); $($tt: tt)*) => {
$crate::seq_parser_expr!(; $($tt)* $first_parser,)
};
( (_ : $first_parser: expr, ); $($tt: tt)*) => {
( $($tt)* $first_parser, )
};
( ($first_field: ident : $first_parser: expr ); $($tt: tt)*) => {
$crate::seq_parser_expr!(; $($tt)* $first_parser,)
};
}
#[macro_export]
#[doc(hidden)]
macro_rules! seq_parser_pattern {
(; $($tt: tt)*) => {
( $($tt)* )
};
( (_ : $first_parser: expr, $($remaining: tt)+ ); $($tt: tt)*) => {
$crate::seq_parser_pattern!( ( $($remaining)+ ) ; $($tt)* _, )
};
( ($first_field: ident : $first_parser: expr, $($remaining: tt)+ ); $($tt: tt)*) => {
$crate::seq_parser_pattern!( ( $($remaining)+ ) ; $($tt)* $first_field, )
};
( ( _ : $first_parser: expr ); $($tt: tt)*) => {
$crate::seq_parser_pattern!(; $($tt)* _, )
};
( ($first_field: ident : $first_parser: expr ); $($tt: tt)*) => {
$crate::seq_parser_pattern!(; $($tt)* $first_field,)
};
( ( _ : $first_parser: expr, ); $($tt: tt)*) => {
$crate::seq_parser_pattern!(; $($tt)* _, )
};
( ($first_field: ident : $first_parser: expr, ); $($tt: tt)*) => {
$crate::seq_parser_pattern!(; $($tt)* $first_field,)
};
}
#[macro_export]
#[doc(hidden)]
macro_rules! seq_parser_impl {
(; $name: ident $($tt: tt)*) => {
$name { $($tt)* }
};
( (_ : $first_parser: expr, $($remaining: tt)+ ); $name: ident $($tt: tt)*) => {
$crate::seq_parser_impl!( ( $($remaining)+ ) ; $name $($tt)* )
};
( ($first_field: ident : $first_parser: expr, $($remaining: tt)+ );
$name: ident $($tt: tt)*) =>
{
$crate::seq_parser_impl!( ( $($remaining)+ ) ; $name $($tt)* $first_field: $first_field, )
};
( ( _ : $first_parser: expr ); $name: ident $($tt: tt)*) => {
$crate::seq_parser_impl!( ; $name $($tt)* )
};
( ($first_field: ident : $first_parser: expr ); $name: ident $($tt: tt)*) => {
$crate::seq_parser_impl!(; $name $($tt)* $first_field: $first_field,)
};
( ( _ : $first_parser: expr, ); $name: ident $($tt: tt)*) => {
$crate::seq_parser_impl!(; $name $($tt)*)
};
( ($first_field: ident : $first_parser: expr, ); $name: ident $($tt: tt)*) => {
$crate::seq_parser_impl!(; $name $($tt)* $first_field: $first_field,)
};
}
#[macro_export]
#[doc(hidden)]
macro_rules! seq_tuple_extract {
(; ; $name: ident ; $($arg: expr),* $(,)? ) => {
$name( $($arg,)* )
};
( (_ : $first_parser: expr, $($remaining: tt)+ ); ( $first_arg: expr, $($arg: expr),* ) ; $($tt: tt)*) => {
$crate::seq_tuple_extract!( ( $($remaining)+ ); ( $($arg),* ) ; $($tt)* )
};
( ($first_parser: expr, $($remaining: tt)+ ); ( $first_arg: expr, $($arg: expr),* ) ; $($tt: tt)*) => {
$crate::seq_tuple_extract!( ( $($remaining)+ ) ; ( $($arg),* ) ; $($tt)* $first_arg, )
};
( (_ : $first_parser: expr $(,)? ); ( $first_arg: expr, $($arg: expr),* ) ; $($tt: tt)*) => {
$crate::seq_tuple_extract!(; ; $($tt)*)
};
( ($first_parser: expr $(,)? ); ( $first_arg: expr, $($arg: expr),* ) ; $($tt: tt)*) => {
$crate::seq_tuple_extract!(; ; $($tt)* $first_arg)
};
}
#[macro_export]
#[doc(hidden)]
macro_rules! seq_tuple_parser_impl {
(; $($tt: tt)*) => {
($($tt)*)
};
( (_ : $first_parser: expr, $($remaining: tt)+ ); $($tt: tt)*) => {
$crate::seq_tuple_parser_impl!( ( $($remaining)+ ) ; $($tt)* $first_parser, )
};
( ($first_parser: expr, $($remaining: tt)+ ); $($tt: tt)*) => {
$crate::seq_tuple_parser_impl!( ( $($remaining)+ ) ; $($tt)* $first_parser, )
};
( (_ : $first_parser: expr $(,)? ); $($tt: tt)*) => {
$crate::seq_tuple_parser_impl!(; $($tt)* $first_parser, )
};
( ($first_parser: expr $(,)? ); $($tt: tt)*) => {
$crate::seq_tuple_parser_impl!(; $($tt)* $first_parser, )
};
}
/// Sequences multiple parsers and builds a struct out of them.
///
/// ```
/// use combine::{Parser, between, from_str, many, struct_parser, token};
/// use combine::parser::range::take_while1;
/// use combine::parser::byte::{letter, spaces};
///
/// #[derive(Debug, PartialEq)]
/// struct Point(u32, u32);
///
/// #[derive(Debug, PartialEq)]
/// struct Field {
/// name: Vec<u8>,
/// value: Vec<u8>,
/// point: Point,
/// }
/// fn main() {
/// let num = || from_str(take_while1(|b: u8| b >= b'0' && b <= b'9'));
/// let spaced = |b| between(spaces(), spaces(), token(b));
/// let mut parser = struct_parser!{
/// Field {
/// name: many(letter()),
/// // `_` fields are ignored when building the struct
/// _: spaced(b':'),
/// value: many(letter()),
/// _: spaced(b':'),
/// point: struct_parser!(Point(num(), _: spaced(b','), num())),
/// }
/// };
/// assert_eq!(
/// parser.parse(&b"test: data: 123 , 4"[..]),
/// Ok((
/// Field {
/// name: b"test"[..].to_owned(),
/// value: b"data"[..].to_owned(),
/// point: Point(123, 4),
/// },
/// &b""[..]
/// )),
/// );
/// }
/// ```
#[macro_export]
macro_rules! struct_parser {
($name: ident { $($tt: tt)* }) => {
$crate::seq_parser_expr!( ( $($tt)* ); )
.map(|$crate::seq_parser_pattern!( ( $($tt)* ); )|
$crate::seq_parser_impl!(( $($tt)* ); $name )
)
};
($name: ident ( $($arg: tt)* )) => {
$crate::seq_tuple_parser_impl!( ( $($arg)* ) ; )
.map(|t|
$crate::seq_tuple_extract!(
( $($arg)* );
(t.0, t.1, t.2, t.3, t.4, t.5, t.6, t.7, t.8, t.9, t.10, t.11, t.12, t.13, t.14);
$name ;
)
)
}
}
#[derive(Copy, Clone)]
pub struct With<P1, P2>((Ignore<P1>, P2));
impl<Input, P1, P2> Parser<Input> for With<P1, P2>
where
Input: Stream,
P1: Parser<Input>,
P2: Parser<Input>,
{
type Output = P2::Output;
type PartialState = <(Ignore<P1>, P2) as Parser<Input>>::PartialState;
#[inline]
fn parse_lazy(
&mut self,
input: &mut Input,
) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
self.0.parse_lazy(input).map(|(_, b)| b)
}
parse_mode!(Input);
#[inline]
fn parse_mode_impl<M>(
&mut self,
mode: M,
input: &mut Input,
state: &mut Self::PartialState,
) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
where
M: ParseMode,
{
self.0.parse_mode(mode, input, state).map(|(_, b)| b)
}
forward_parser!(Input, add_error add_committed_expected_error parser_count, 0);
}
/// Equivalent to [`p1.with(p2)`].
///
/// [`p1.with(p2)`]: ../trait.Parser.html#method.with
pub fn with<Input, P1, P2>(p1: P1, p2: P2) -> With<P1, P2>
where
Input: Stream,
P1: Parser<Input>,
P2: Parser<Input>,
{
With((ignore(p1), p2))
}
#[derive(Copy, Clone)]
pub struct Skip<P1, P2>((P1, Ignore<P2>));
impl<Input, P1, P2> Parser<Input> for Skip<P1, P2>
where
Input: Stream,
P1: Parser<Input>,
P2: Parser<Input>,
{
type Output = P1::Output;
type PartialState = <(P1, Ignore<P2>) as Parser<Input>>::PartialState;
parse_mode!(Input);
#[inline]
fn parse_mode_impl<M>(
&mut self,
mode: M,
input: &mut Input,
state: &mut Self::PartialState,
) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
where
M: ParseMode,
{
self.0.parse_mode(mode, input, state).map(|(a, _)| a)
}
forward_parser!(Input, add_error add_committed_expected_error parser_count, 0);
}
pub fn skip<Input, P1, P2>(p1: P1, p2: P2) -> Skip<P1, P2>
where
Input: Stream,
P1: Parser<Input>,
P2: Parser<Input>,
{
Skip((p1, ignore(p2)))
}
parser! {
#[derive(Copy, Clone)]
pub struct Between;
type PartialState = <Map<(L, P, R), fn ((L::Output, P::Output, R::Output)) -> P::Output> as Parser<Input>>::PartialState;
/// Parses `open` followed by `parser` followed by `close`.
/// Returns the value of `parser`.
///
/// ```
/// # extern crate combine;
/// # use combine::*;
/// # use combine::parser::char::string;
/// # fn main() {
/// let result = between(token('['), token(']'), string("rust"))
/// .parse("[rust]")
/// .map(|x| x.0);
/// assert_eq!(result, Ok("rust"));
/// # }
/// ```
pub fn between[Input, L, R, P](open: L, close: R, parser: P)(Input) -> P::Output
where [
Input: Stream,
L: Parser< Input>,
R: Parser< Input>,
P: Parser< Input>,
]
{
fn middle<T, U, V>((_, x, _): (T, U, V)) -> U {
x
}
(open, parser, close).map(middle)
}
}
#[derive(Copy, Clone)]
pub struct Then<P, F>(P, F);
impl<Input, P, N, F> Parser<Input> for Then<P, F>
where
Input: Stream,
F: FnMut(P::Output) -> N,
P: Parser<Input>,
N: Parser<Input>,
{
type Output = N::Output;
type PartialState = (P::PartialState, Option<(bool, N)>, N::PartialState);
parse_mode!(Input);
#[inline]
fn parse_mode_impl<M>(
&mut self,
mut mode: M,
input: &mut Input,
state: &mut Self::PartialState,
) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
where
M: ParseMode,
{
let (ref mut p_state, ref mut n_parser_cache, ref mut n_state) = *state;
if mode.is_first() || n_parser_cache.is_none() {
debug_assert!(n_parser_cache.is_none());
let (value, committed) = match self.0.parse_mode(mode, input, p_state) {
PeekOk(value) => (value, false),
CommitOk(value) => (value, true),
PeekErr(err) => return PeekErr(err),
CommitErr(err) => return CommitErr(err),
};
*n_parser_cache = Some((committed, (self.1)(value)));
mode.set_first();
}
let result = n_parser_cache
.as_mut()
.unwrap()
.1
.parse_committed_mode(mode, input, n_state);
match result {
PeekOk(x) => {
let (committed, _) = *n_parser_cache.as_ref().unwrap();
*n_parser_cache = None;
if committed {
CommitOk(x)
} else {
PeekOk(x)
}
}
CommitOk(x) => {
*n_parser_cache = None;
CommitOk(x)
}
PeekErr(x) => {
let (committed, _) = *n_parser_cache.as_ref().unwrap();
*n_parser_cache = None;
if committed {
CommitErr(x.error)
} else {
PeekErr(x)
}
}
CommitErr(x) => CommitErr(x),
}
}
fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
self.0.add_error(errors);
}
}
/// Equivalent to [`p.then(f)`].
///
/// [`p.then(f)`]: ../trait.Parser.html#method.then
pub fn then<Input, P, F, N>(p: P, f: F) -> Then<P, F>
where
Input: Stream,
F: FnMut(P::Output) -> N,
P: Parser<Input>,
N: Parser<Input>,
{
Then(p, f)
}
#[derive(Copy, Clone)]
pub struct ThenPartial<P, F>(P, F);
impl<Input, P, N, F> Parser<Input> for ThenPartial<P, F>
where
Input: Stream,
F: FnMut(&mut P::Output) -> N,
P: Parser<Input>,
N: Parser<Input>,
{
type Output = N::Output;
type PartialState = (P::PartialState, Option<(bool, P::Output)>, N::PartialState);
parse_mode!(Input);
#[inline]
fn parse_mode_impl<M>(
&mut self,
mut mode: M,
input: &mut Input,
state: &mut Self::PartialState,
) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
where
M: ParseMode,
{
let (ref mut p_state, ref mut n_parser_cache, ref mut n_state) = *state;
if mode.is_first() || n_parser_cache.is_none() {
debug_assert!(n_parser_cache.is_none());
match self.0.parse_mode(mode, input, p_state) {
PeekOk(value) => {
*n_parser_cache = Some((false, value));
}
CommitOk(value) => {
*n_parser_cache = Some((true, value));
}
PeekErr(err) => return PeekErr(err),
CommitErr(err) => return CommitErr(err),
}
mode.set_first();
}
let result = (self.1)(&mut n_parser_cache.as_mut().unwrap().1)
.parse_committed_mode(mode, input, n_state);
match result {
PeekOk(x) => {
let (committed, _) = n_parser_cache.take().unwrap();
if committed {
CommitOk(x)
} else {
PeekOk(x)
}
}
CommitOk(x) => {
*n_parser_cache = None;
CommitOk(x)
}
PeekErr(x) => {
let (committed, _) = n_parser_cache.take().unwrap();
if committed {
CommitErr(x.error)
} else {
PeekErr(x)
}
}
CommitErr(x) => CommitErr(x),
}
}
fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
self.0.add_error(errors);
}
}
/// Equivalent to [`p.then_partial(f)`].
///
/// [`p.then_partial(f)`]: ../trait.Parser.html#method.then_partial
pub fn then_partial<Input, P, F, N>(p: P, f: F) -> ThenPartial<P, F>
where
Input: Stream,
F: FnMut(&mut P::Output) -> N,
P: Parser<Input>,
N: Parser<Input>,
{
ThenPartial(p, f)
}
#[cfg(all(feature = "std", test))]
mod tests {
use crate::parser::{token::any, EasyParser};
#[test]
fn sequence_single_parser() {
assert!((any(),).easy_parse("a").is_ok());
}
}
#[derive(Copy, Clone)]
pub struct ThenRef<P, F>(P, F);
impl<Input, P, N, F> Parser<Input> for ThenRef<P, F>
where
Input: Stream,
F: FnMut(&P::Output) -> N,
P: Parser<Input>,
N: Parser<Input>,
{
type Output = (P::Output, N::Output);
type PartialState = (
P::PartialState,
Option<(bool, P::Output, N)>,
N::PartialState,
);
parse_mode!(Input);
#[inline]
fn parse_mode_impl<M>(
&mut self,
mut mode: M,
input: &mut Input,
state: &mut Self::PartialState,
) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
where
M: ParseMode,
{
let (ref mut p_state, ref mut n_parser_cache, ref mut n_state) = *state;
if mode.is_first() || n_parser_cache.is_none() {
debug_assert!(n_parser_cache.is_none());
let (value, committed) = match self.0.parse_mode(mode, input, p_state) {
PeekOk(value) => (value, false),
CommitOk(value) => (value, true),
PeekErr(err) => return PeekErr(err),
CommitErr(err) => return CommitErr(err),
};
let parser = (self.1)(&value);
*n_parser_cache = Some((committed, value, parser));
mode.set_first();
}
let result = n_parser_cache
.as_mut()
.unwrap()
.2
.parse_committed_mode(mode, input, n_state);
match result {
PeekOk(x) => {
let (committed, in_value, _) = n_parser_cache.take().unwrap();
if committed {
CommitOk((in_value, x))
} else {
PeekOk((in_value, x))
}
}
CommitOk(x) => {
let (_, in_value, _) = n_parser_cache.take().unwrap();
*n_parser_cache = None;
CommitOk((in_value, x))
}
PeekErr(x) => {
let (committed, _, _) = n_parser_cache.take().unwrap();
*n_parser_cache = None;
if committed {
CommitErr(x.error)
} else {
PeekErr(x)
}
}
CommitErr(x) => CommitErr(x),
}
}
fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
self.0.add_error(errors);
}
}
/// Equivalent to [`p.then_ref(f)`].
///
/// [`p.then_ref(f)`]: ../trait.Parser.html#method.then
pub fn then_ref<Input, P, F, N>(p: P, f: F) -> ThenRef<P, F>
where
Input: Stream,
F: FnMut(&P::Output) -> N,
P: Parser<Input>,
N: Parser<Input>,
{
ThenRef(p, f)
}

700
vendor/combine/src/parser/token.rs vendored Normal file
View File

@@ -0,0 +1,700 @@
//! Parsers working with single stream items.
use crate::{
error::{
self, ErrorInfo, ParseError,
ParseResult::{self, *},
ResultExt, StreamError, Tracked,
},
lib::marker::PhantomData,
stream::{uncons, Stream, StreamOnce},
Parser,
};
#[derive(Copy, Clone)]
pub struct Any<Input>(PhantomData<fn(Input) -> Input>);
impl<Input> Parser<Input> for Any<Input>
where
Input: Stream,
{
type Output = Input::Token;
type PartialState = ();
#[inline]
fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<Input::Token, Input::Error> {
uncons(input)
}
}
/// Parses any token.
///
/// ```
/// # extern crate combine;
/// # use combine::*;
/// # fn main() {
/// let mut char_parser = any();
/// assert_eq!(char_parser.parse("!").map(|x| x.0), Ok('!'));
/// assert!(char_parser.parse("").is_err());
/// let mut byte_parser = any();
/// assert_eq!(byte_parser.parse(&b"!"[..]).map(|x| x.0), Ok(b'!'));
/// assert!(byte_parser.parse(&b""[..]).is_err());
/// # }
/// ```
pub fn any<Input>() -> Any<Input>
where
Input: Stream,
{
Any(PhantomData)
}
#[derive(Copy, Clone)]
pub struct Satisfy<Input, P> {
predicate: P,
_marker: PhantomData<Input>,
}
fn satisfy_impl<Input, P, R>(input: &mut Input, mut predicate: P) -> ParseResult<R, Input::Error>
where
Input: Stream,
P: FnMut(Input::Token) -> Option<R>,
{
let position = input.position();
match uncons(input) {
PeekOk(c) | CommitOk(c) => match predicate(c) {
Some(c) => CommitOk(c),
None => PeekErr(Input::Error::empty(position).into()),
},
PeekErr(err) => PeekErr(err),
CommitErr(err) => CommitErr(err),
}
}
impl<Input, P> Parser<Input> for Satisfy<Input, P>
where
Input: Stream,
P: FnMut(Input::Token) -> bool,
{
type Output = Input::Token;
type PartialState = ();
#[inline]
fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<Self::Output, Input::Error> {
satisfy_impl(input, |c| {
if (self.predicate)(c.clone()) {
Some(c)
} else {
None
}
})
}
}
/// Parses a token and succeeds depending on the result of `predicate`.
///
/// ```
/// # extern crate combine;
/// # use combine::*;
/// # fn main() {
/// let mut parser = satisfy(|c| c == '!' || c == '?');
/// assert_eq!(parser.parse("!").map(|x| x.0), Ok('!'));
/// assert_eq!(parser.parse("?").map(|x| x.0), Ok('?'));
/// # }
/// ```
pub fn satisfy<Input, P>(predicate: P) -> Satisfy<Input, P>
where
Input: Stream,
P: FnMut(Input::Token) -> bool,
{
Satisfy {
predicate,
_marker: PhantomData,
}
}
#[derive(Copy, Clone)]
pub struct SatisfyMap<Input, P> {
predicate: P,
_marker: PhantomData<Input>,
}
impl<Input, P, R> Parser<Input> for SatisfyMap<Input, P>
where
Input: Stream,
P: FnMut(Input::Token) -> Option<R>,
{
type Output = R;
type PartialState = ();
#[inline]
fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<Self::Output, Input::Error> {
satisfy_impl(input, &mut self.predicate)
}
}
/// Parses a token and passes it to `predicate`. If `predicate` returns `Some` the parser succeeds
/// and returns the value inside the `Option`. If `predicate` returns `None` the parser fails
/// without consuming any input.
///
/// ```
/// # extern crate combine;
/// # use combine::*;
/// # fn main() {
/// #[derive(Debug, PartialEq)]
/// enum YesNo {
/// Yes,
/// No,
/// }
/// let mut parser = satisfy_map(|c| {
/// match c {
/// 'Y' => Some(YesNo::Yes),
/// 'N' => Some(YesNo::No),
/// _ => None,
/// }
/// });
/// assert_eq!(parser.parse("Y").map(|x| x.0), Ok(YesNo::Yes));
/// assert!(parser.parse("A").map(|x| x.0).is_err());
/// # }
/// ```
pub fn satisfy_map<Input, P, R>(predicate: P) -> SatisfyMap<Input, P>
where
Input: Stream,
P: FnMut(Input::Token) -> Option<R>,
{
SatisfyMap {
predicate,
_marker: PhantomData,
}
}
#[derive(Copy, Clone)]
pub struct Token<Input>
where
Input: Stream,
Input::Token: PartialEq,
{
c: Input::Token,
_marker: PhantomData<Input>,
}
impl<Input> Parser<Input> for Token<Input>
where
Input: Stream,
Input::Token: PartialEq + Clone,
{
type Output = Input::Token;
type PartialState = ();
#[inline]
fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<Input::Token, Input::Error> {
satisfy_impl(input, |c| if c == self.c { Some(c) } else { None })
}
fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
errors.error.add_expected(error::Token(self.c.clone()));
}
}
/// Parses a character and succeeds if the character is equal to `c`.
///
/// ```
/// # extern crate combine;
/// # use combine::*;
/// # fn main() {
/// let result = token('!')
/// .parse("!")
/// .map(|x| x.0);
/// assert_eq!(result, Ok('!'));
/// # }
/// ```
pub fn token<Input>(c: Input::Token) -> Token<Input>
where
Input: Stream,
Input::Token: PartialEq,
{
Token {
c,
_marker: PhantomData,
}
}
#[derive(Clone)]
pub struct Tokens<C, E, T, Input>
where
Input: Stream,
{
cmp: C,
expected: E,
tokens: T,
_marker: PhantomData<Input>,
}
impl<Input, C, E, T> Parser<Input> for Tokens<C, E, T, Input>
where
C: FnMut(T::Item, Input::Token) -> bool,
E: for<'s> ErrorInfo<'s, Input::Token, Input::Range>,
T: Clone + IntoIterator,
Input: Stream,
{
type Output = T;
type PartialState = ();
#[inline]
fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<T, Input::Error> {
let start = input.position();
let mut committed = false;
for c in self.tokens.clone() {
match crate::stream::uncons(input) {
CommitOk(other) | PeekOk(other) => {
if !(self.cmp)(c, other.clone()) {
return if committed {
let mut errors = <Input as StreamOnce>::Error::from_error(
start,
StreamError::unexpected_token(other),
);
errors.add_expected(&self.expected);
CommitErr(errors)
} else {
PeekErr(<Input as StreamOnce>::Error::empty(start).into())
};
}
committed = true;
}
PeekErr(mut error) => {
error.error.set_position(start);
return if committed {
CommitErr(error.error)
} else {
PeekErr(error)
};
}
CommitErr(mut error) => {
error.set_position(start);
return CommitErr(error);
}
}
}
if committed {
CommitOk(self.tokens.clone())
} else {
PeekOk(self.tokens.clone())
}
}
fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
errors.error.add_expected(&self.expected);
}
}
/// Parses multiple tokens.
///
/// Consumes items from the input and compares them to the values from `tokens` using the
/// comparison function `cmp`. Succeeds if all the items from `tokens` are matched in the input
/// stream and fails otherwise with `expected` used as part of the error.
///
/// ```
/// # extern crate combine;
/// # use combine::*;
/// # use combine::error;
/// # fn main() {
/// let result = tokens(|l, r| l.eq_ignore_ascii_case(&r), "abc", "abc".chars())
/// .parse("AbC")
/// .map(|x| x.0.as_str());
/// assert_eq!(result, Ok("abc"));
/// let result = tokens(
/// |&l, r| (if l < r { r - l } else { l - r }) <= 2,
/// error::Range(&b"025"[..]),
/// &b"025"[..]
/// )
/// .parse(&b"123"[..])
/// .map(|x| x.0);
/// assert_eq!(result, Ok(&b"025"[..]));
/// # }
/// ```
pub fn tokens<C, E, T, Input>(cmp: C, expected: E, tokens: T) -> Tokens<C, E, T, Input>
where
C: FnMut(T::Item, Input::Token) -> bool,
T: Clone + IntoIterator,
Input: Stream,
{
Tokens {
cmp,
expected,
tokens,
_marker: PhantomData,
}
}
#[derive(Clone)]
pub struct TokensCmp<C, T, Input>
where
Input: Stream,
{
cmp: C,
tokens: T,
_marker: PhantomData<Input>,
}
impl<Input, C, T> Parser<Input> for TokensCmp<C, T, Input>
where
C: FnMut(T::Item, Input::Token) -> bool,
T: Clone + IntoIterator,
Input: Stream,
{
type Output = T;
type PartialState = ();
#[inline]
fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<T, Input::Error> {
let start = input.position();
let mut committed = false;
for c in self.tokens.clone() {
match crate::stream::uncons(input) {
CommitOk(other) | PeekOk(other) => {
if !(self.cmp)(c, other.clone()) {
return if committed {
let errors = <Input as StreamOnce>::Error::from_error(
start,
StreamError::unexpected_token(other),
);
CommitErr(errors)
} else {
PeekErr(<Input as StreamOnce>::Error::empty(start).into())
};
}
committed = true;
}
PeekErr(mut error) => {
error.error.set_position(start);
return if committed {
CommitErr(error.error)
} else {
PeekErr(error)
};
}
CommitErr(mut error) => {
error.set_position(start);
return CommitErr(error);
}
}
}
if committed {
CommitOk(self.tokens.clone())
} else {
PeekOk(self.tokens.clone())
}
}
}
/// Parses multiple tokens.
///
/// Consumes items from the input and compares them to the values from `tokens` using the
/// comparison function `cmp`. Succeeds if all the items from `tokens` are matched in the input
/// stream and fails otherwise.
///
/// ```
/// # extern crate combine;
/// # use combine::*;
/// # fn main() {
/// # #[allow(deprecated)]
/// # use std::ascii::AsciiExt;
/// let result = tokens_cmp("abc".chars(), |l, r| l.eq_ignore_ascii_case(&r))
/// .parse("AbC")
/// .map(|x| x.0.as_str());
/// assert_eq!(result, Ok("abc"));
/// let result = tokens_cmp(
/// &b"025"[..],
/// |&l, r| (if l < r { r - l } else { l - r }) <= 2,
/// )
/// .parse(&b"123"[..])
/// .map(|x| x.0);
/// assert_eq!(result, Ok(&b"025"[..]));
/// # }
/// ```
pub fn tokens_cmp<C, T, I>(tokens: T, cmp: C) -> TokensCmp<C, T, I>
where
C: FnMut(T::Item, I::Token) -> bool,
T: Clone + IntoIterator,
I: Stream,
{
TokensCmp {
cmp,
tokens,
_marker: PhantomData,
}
}
#[derive(Copy, Clone)]
pub struct Position<Input>
where
Input: Stream,
{
_marker: PhantomData<Input>,
}
impl<Input> Parser<Input> for Position<Input>
where
Input: Stream,
{
type Output = Input::Position;
type PartialState = ();
#[inline]
fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<Input::Position, Input::Error> {
PeekOk(input.position())
}
}
/// Parser which just returns the current position in the stream.
///
/// ```
/// # extern crate combine;
/// # use combine::*;
/// # use combine::stream::position::{self, SourcePosition};
/// # fn main() {
/// let result = (position(), token('!'), position())
/// .parse(position::Stream::new("!"))
/// .map(|x| x.0);
/// assert_eq!(result, Ok((SourcePosition { line: 1, column: 1 },
/// '!',
/// SourcePosition { line: 1, column: 2 })));
/// # }
/// ```
pub fn position<Input>() -> Position<Input>
where
Input: Stream,
{
Position {
_marker: PhantomData,
}
}
#[derive(Copy, Clone)]
pub struct OneOf<T, Input>
where
Input: Stream,
{
tokens: T,
_marker: PhantomData<Input>,
}
impl<Input, T> Parser<Input> for OneOf<T, Input>
where
T: Clone + IntoIterator<Item = Input::Token>,
Input: Stream,
Input::Token: PartialEq,
{
type Output = Input::Token;
type PartialState = ();
#[inline]
fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<Input::Token, Input::Error> {
satisfy(|c| self.tokens.clone().into_iter().any(|t| t == c)).parse_lazy(input)
}
fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
for expected in self.tokens.clone() {
errors.error.add_expected(error::Token(expected));
}
}
}
/// Extract one token and succeeds if it is part of `tokens`.
///
/// ```
/// # extern crate combine;
/// # use combine::*;
/// # fn main() {
/// let result = many(one_of("abc".chars()))
/// .parse("abd");
/// assert_eq!(result, Ok((String::from("ab"), "d")));
/// # }
/// ```
pub fn one_of<T, Input>(tokens: T) -> OneOf<T, Input>
where
T: Clone + IntoIterator,
Input: Stream,
Input::Token: PartialEq<T::Item>,
{
OneOf {
tokens,
_marker: PhantomData,
}
}
#[derive(Copy, Clone)]
pub struct NoneOf<T, Input>
where
Input: Stream,
{
tokens: T,
_marker: PhantomData<Input>,
}
impl<Input, T> Parser<Input> for NoneOf<T, Input>
where
T: Clone + IntoIterator<Item = Input::Token>,
Input: Stream,
Input::Token: PartialEq,
{
type Output = Input::Token;
type PartialState = ();
#[inline]
fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<Input::Token, Input::Error> {
satisfy(|c| self.tokens.clone().into_iter().all(|t| t != c)).parse_lazy(input)
}
}
/// Extract one token and succeeds if it is not part of `tokens`.
///
/// ```
/// # extern crate combine;
/// # use combine::*;
/// # use combine::stream::easy;
/// # use combine::stream::position;
/// # fn main() {
/// let mut parser = many1(none_of(b"abc".iter().cloned()));
/// let result = parser.easy_parse(position::Stream::new(&b"xyb"[..]))
/// .map(|(output, input)| (output, input.input));
/// assert_eq!(result, Ok((b"xy"[..].to_owned(), &b"b"[..])));
///
/// let result = parser.easy_parse(position::Stream::new(&b"ab"[..]));
/// assert_eq!(result, Err(easy::Errors {
/// position: 0,
/// errors: vec![
/// easy::Error::Unexpected(easy::Info::Token(b'a')),
/// ]
/// }));
/// # }
/// ```
pub fn none_of<T, Input>(tokens: T) -> NoneOf<T, Input>
where
T: Clone + IntoIterator,
Input: Stream,
Input::Token: PartialEq<T::Item>,
{
NoneOf {
tokens,
_marker: PhantomData,
}
}
#[derive(Copy, Clone)]
pub struct Value<Input, T>(T, PhantomData<fn(Input) -> Input>);
impl<Input, T> Parser<Input> for Value<Input, T>
where
Input: Stream,
T: Clone,
{
type Output = T;
type PartialState = ();
#[inline]
fn parse_lazy(&mut self, _: &mut Input) -> ParseResult<T, Input::Error> {
PeekOk(self.0.clone())
}
}
/// Always returns the value `v` without consuming any input.
///
/// ```
/// # extern crate combine;
/// # use combine::*;
/// # fn main() {
/// let result = value(42)
/// .parse("hello world")
/// .map(|x| x.0);
/// assert_eq!(result, Ok(42));
/// # }
/// ```
pub fn value<Input, T>(v: T) -> Value<Input, T>
where
Input: Stream,
T: Clone,
{
Value(v, PhantomData)
}
#[derive(Copy, Clone)]
pub struct Produce<Input, F>(F, PhantomData<fn(Input) -> Input>);
impl<Input, F, R> Parser<Input> for Produce<Input, F>
where
Input: Stream,
F: FnMut() -> R,
{
type Output = R;
type PartialState = ();
#[inline]
fn parse_lazy(&mut self, _: &mut Input) -> ParseResult<R, Input::Error> {
PeekOk((self.0)())
}
}
/// Always returns the value produced by calling `f`.
///
/// Can be used when `value` is unable to be used for lack of `Clone` implementation on the value.
///
/// ```
/// # use combine::*;
/// #[derive(Debug, PartialEq)]
/// struct NoClone;
/// let result = produce(|| vec![NoClone])
/// .parse("hello world")
/// .map(|x| x.0);
/// assert_eq!(result, Ok(vec![NoClone]));
/// ```
pub fn produce<Input, F, R>(f: F) -> Produce<Input, F>
where
Input: Stream,
F: FnMut() -> R,
{
Produce(f, PhantomData)
}
#[derive(Copy, Clone)]
pub struct Eof<Input>(PhantomData<Input>);
impl<Input> Parser<Input> for Eof<Input>
where
Input: Stream,
{
type Output = ();
type PartialState = ();
#[inline]
fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<(), Input::Error> {
let before = input.checkpoint();
match input.uncons() {
Err(ref err) if err.is_unexpected_end_of_input() => PeekOk(()),
_ => {
ctry!(input.reset(before).committed());
PeekErr(<Input as StreamOnce>::Error::empty(input.position()).into())
}
}
}
fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
errors.error.add_expected("end of input");
}
}
/// Succeeds only if the stream is at end of input, fails otherwise.
///
/// ```
/// # extern crate combine;
/// # use combine::*;
/// # use combine::stream::easy;
/// # use combine::stream::position::{self, SourcePosition};
/// # fn main() {
/// let mut parser = eof();
/// assert_eq!(parser.easy_parse(position::Stream::new("")), Ok(((), position::Stream::new(""))));
/// assert_eq!(parser.easy_parse(position::Stream::new("x")), Err(easy::Errors {
/// position: SourcePosition::default(),
/// errors: vec![
/// easy::Error::Unexpected('x'.into()),
/// easy::Error::Expected("end of input".into())
/// ]
/// }));
/// # }
/// ```
pub fn eof<Input>() -> Eof<Input>
where
Input: Stream,
{
Eof(PhantomData)
}

942
vendor/combine/src/stream/buf_reader.rs vendored Normal file
View File

@@ -0,0 +1,942 @@
use std::io::{self, BufRead, Read};
#[cfg(any(
feature = "futures-03",
feature = "tokio-02",
feature = "tokio-03",
feature = "tokio"
))]
use std::pin::Pin;
#[cfg(any(feature = "futures-03", feature = "tokio-02", feature = "tokio-03"))]
use std::mem::MaybeUninit;
#[cfg(feature = "futures-core-03")]
use std::task::{Context, Poll};
#[cfg(feature = "futures-03")]
use std::future::Future;
use bytes::{Buf, BufMut, BytesMut};
#[cfg(feature = "pin-project-lite")]
use pin_project_lite::pin_project;
#[cfg(feature = "tokio-03")]
use tokio_03_dep::io::AsyncBufRead as _;
#[cfg(feature = "tokio")]
use tokio_dep::io::AsyncBufRead as _;
#[cfg(feature = "futures-core-03")]
use futures_core_03::ready;
#[cfg(feature = "pin-project-lite")]
pin_project! {
/// `BufReader` used by `Decoder` when it is constructed with [`Decoder::new_bufferless`][]
///
/// [`Decoder::new_bufferless`]: ../decoder/struct.Decoder.html#method.new_bufferless
#[derive(Debug)]
pub struct BufReader<R> {
#[pin]
inner: R,
buf: BytesMut
}
}
#[cfg(not(feature = "pin-project-lite"))]
/// `BufReader` used by `Decoder` when it is constructed with [`Decoder::new_bufferless`][]
///
/// [`Decoder::new_bufferless`]: ../decoder/struct.Decoder.html#method.new_bufferless
#[derive(Debug)]
pub struct BufReader<R> {
inner: R,
buf: BytesMut,
}
impl<R> BufReader<R> {
/// Creates a new `BufReader` with a default buffer capacity. The default is currently 8 KB,
/// but may change in the future.
pub fn new(inner: R) -> Self {
Self::with_capacity(8096, inner)
}
/// Creates a new `BufReader` with the specified buffer capacity.
pub fn with_capacity(capacity: usize, inner: R) -> Self {
let buf = BytesMut::with_capacity(capacity);
Self { inner, buf }
}
/// Gets a reference to the underlying reader.
///
/// It is inadvisable to directly read from the underlying reader.
pub fn get_ref(&self) -> &R {
&self.inner
}
/// Gets a mutable reference to the underlying reader.
///
/// It is inadvisable to directly read from the underlying reader.
pub fn get_mut(&mut self) -> &mut R {
&mut self.inner
}
#[cfg(feature = "pin-project-lite")]
/// Gets a pinned mutable reference to the underlying reader.
///
/// It is inadvisable to directly read from the underlying reader.
pub fn get_pin_mut(self: Pin<&mut Self>) -> Pin<&mut R> {
self.project().inner
}
/// Consumes this `BufWriter`, returning the underlying reader.
///
/// Note that any leftover data in the internal buffer is lost.
pub fn into_inner(self) -> R {
self.inner
}
/// Returns a reference to the internally buffered data.
///
/// Unlike `fill_buf`, this will not attempt to fill the buffer if it is empty.
pub fn buffer(&self) -> &[u8] {
&self.buf
}
/// Invalidates all data in the internal buffer.
#[inline]
#[cfg(any(feature = "tokio-02", feature = "tokio-03", feature = "tokio"))]
fn discard_buffer(self: Pin<&mut Self>) {
let me = self.project();
me.buf.clear();
}
}
mod sealed {
pub trait Sealed {}
}
#[doc(hidden)]
pub trait CombineBuffer<R>: sealed::Sealed {
fn buffer<'a>(&'a self, read: &'a R) -> &'a [u8];
fn advance(&mut self, read: &mut R, len: usize);
#[cfg(feature = "pin-project-lite")]
fn advance_pin(&mut self, read: Pin<&mut R>, len: usize);
}
#[doc(hidden)]
pub trait CombineSyncRead<R>: CombineBuffer<R> {
fn extend_buf_sync(&mut self, read: &mut R) -> io::Result<usize>;
}
#[cfg(any(feature = "tokio-02", feature = "tokio-03", feature = "tokio"))]
#[doc(hidden)]
pub trait CombineRead<R, T: ?Sized>: CombineBuffer<R> {
fn poll_extend_buf(
&mut self,
cx: &mut Context<'_>,
read: Pin<&mut R>,
) -> Poll<io::Result<usize>>;
}
#[cfg(feature = "futures-03")]
#[doc(hidden)]
pub trait CombineAsyncRead<R>: CombineBuffer<R> {
fn poll_extend_buf(
&mut self,
cx: &mut Context<'_>,
read: Pin<&mut R>,
) -> Poll<io::Result<usize>>;
fn extend_buf<'a>(&'a mut self, read: Pin<&'a mut R>) -> ExtendBuf<'a, Self, R>
where
Self: Sized;
}
#[cfg(feature = "futures-03")]
pin_project_lite::pin_project! {
#[doc(hidden)]
pub struct ExtendBuf<'a, C, R> {
buffer: &'a mut C,
read: Pin<&'a mut R>
}
}
#[cfg(feature = "futures-03")]
impl<'a, C, R> Future for ExtendBuf<'a, C, R>
where
C: CombineAsyncRead<R>,
{
type Output = io::Result<usize>;
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let me = self.project();
me.buffer.poll_extend_buf(cx, me.read.as_mut())
}
}
/// Marker used by `Decoder` for an internal buffer
#[derive(Default)]
pub struct Buffer(pub(crate) BytesMut);
impl sealed::Sealed for Buffer {}
impl<R> CombineBuffer<R> for Buffer {
fn buffer<'a>(&'a self, _read: &'a R) -> &'a [u8] {
&self.0
}
fn advance(&mut self, _read: &mut R, len: usize) {
self.0.advance(len);
}
#[cfg(feature = "pin-project-lite")]
fn advance_pin(&mut self, _read: Pin<&mut R>, len: usize) {
self.0.advance(len);
}
}
impl<R> CombineSyncRead<R> for Buffer
where
R: Read,
{
fn extend_buf_sync(&mut self, read: &mut R) -> io::Result<usize> {
extend_buf_sync(&mut self.0, read)
}
}
#[cfg(feature = "futures-03")]
impl<R> CombineAsyncRead<R> for Buffer
where
R: futures_io_03::AsyncRead,
{
fn poll_extend_buf(
&mut self,
cx: &mut Context<'_>,
read: Pin<&mut R>,
) -> Poll<io::Result<usize>> {
poll_extend_buf(&mut self.0, cx, read)
}
fn extend_buf<'a>(&'a mut self, read: Pin<&'a mut R>) -> ExtendBuf<'a, Self, R> {
if !self.0.has_remaining_mut() {
self.0.reserve(8 * 1024);
}
// Copy of tokio's read_buf method (but it has to force initialize the buffer)
let bs = self.0.chunk_mut();
for i in 0..bs.len() {
bs.write_byte(i, 0);
}
ExtendBuf { buffer: self, read }
}
}
#[cfg(feature = "tokio-02")]
impl<R> CombineRead<R, dyn tokio_02_dep::io::AsyncRead> for Buffer
where
R: tokio_02_dep::io::AsyncRead,
{
fn poll_extend_buf(
&mut self,
cx: &mut Context<'_>,
read: Pin<&mut R>,
) -> Poll<io::Result<usize>> {
if !self.0.has_remaining_mut() {
self.0.reserve(8 * 1024);
}
read.poll_read_buf(cx, &mut Bytes05(&mut self.0))
}
}
#[cfg(feature = "tokio-03")]
fn tokio_03_to_read_buf(bs: &mut BytesMut) -> tokio_03_dep::io::ReadBuf<'_> {
let uninit = bs.chunk_mut();
unsafe {
tokio_03_dep::io::ReadBuf::uninit(std::slice::from_raw_parts_mut(
uninit.as_mut_ptr() as *mut MaybeUninit<u8>,
uninit.len(),
))
}
}
#[cfg(feature = "tokio-03")]
impl<R> CombineRead<R, dyn tokio_03_dep::io::AsyncRead> for Buffer
where
R: tokio_03_dep::io::AsyncRead,
{
fn poll_extend_buf(
&mut self,
cx: &mut Context<'_>,
read: Pin<&mut R>,
) -> Poll<io::Result<usize>> {
tokio_03_read_buf(cx, read, &mut self.0)
}
}
#[cfg(feature = "tokio-03")]
fn tokio_03_read_buf(
cx: &mut Context<'_>,
read: Pin<&mut impl tokio_03_dep::io::AsyncRead>,
bs: &mut bytes::BytesMut,
) -> Poll<io::Result<usize>> {
if !bs.has_remaining_mut() {
bs.reserve(8 * 1024);
}
let mut buf = tokio_03_to_read_buf(bs);
ready!(read.poll_read(cx, &mut buf))?;
unsafe {
let n = buf.filled().len();
bs.advance_mut(n);
Poll::Ready(Ok(n))
}
}
#[cfg(feature = "tokio")]
impl<R> CombineRead<R, dyn tokio_dep::io::AsyncRead> for Buffer
where
R: tokio_dep::io::AsyncRead,
{
fn poll_extend_buf(
&mut self,
cx: &mut Context<'_>,
read: Pin<&mut R>,
) -> Poll<io::Result<usize>> {
tokio_read_buf(read, cx, &mut self.0)
}
}
#[cfg(feature = "tokio")]
fn tokio_read_buf(
read: Pin<&mut impl tokio_dep::io::AsyncRead>,
cx: &mut Context<'_>,
bs: &mut bytes::BytesMut,
) -> Poll<io::Result<usize>> {
if !bs.has_remaining_mut() {
bs.reserve(8 * 1024);
}
tokio_util::io::poll_read_buf(read, cx, bs)
}
/// Marker used by `Decoder` for an external buffer
#[derive(Default)]
pub struct Bufferless;
impl sealed::Sealed for Bufferless {}
impl<R> CombineBuffer<BufReader<R>> for Bufferless {
fn buffer<'a>(&'a self, read: &'a BufReader<R>) -> &'a [u8] {
&read.buf
}
fn advance(&mut self, read: &mut BufReader<R>, len: usize) {
read.buf.advance(len);
}
#[cfg(feature = "pin-project-lite")]
fn advance_pin(&mut self, read: Pin<&mut BufReader<R>>, len: usize) {
read.project().buf.advance(len);
}
}
impl<R> CombineSyncRead<BufReader<R>> for Bufferless
where
R: Read,
{
fn extend_buf_sync(&mut self, read: &mut BufReader<R>) -> io::Result<usize> {
extend_buf_sync(&mut read.buf, &mut read.inner)
}
}
fn extend_buf_sync<R>(buf: &mut BytesMut, read: &mut R) -> io::Result<usize>
where
R: Read,
{
let size = 8 * 1024;
if !buf.has_remaining_mut() {
buf.reserve(size);
}
// Copy of tokio's poll_read_buf method (but it has to force initialize the buffer)
let n = {
let bs = buf.chunk_mut();
let initial_size = bs.len().min(size);
let bs = &mut bs[..initial_size];
for i in 0..bs.len() {
bs.write_byte(i, 0);
}
// Convert to `&mut [u8]`
// SAFETY: the entire buffer is preinitialized above
let bs = unsafe { &mut *(bs as *mut _ as *mut [u8]) };
let n = read.read(bs)?;
assert!(
n <= bs.len(),
"AsyncRead reported that it initialized more than the number of bytes in the buffer"
);
n
};
// SAFETY: the entire buffer has been preinitialized
unsafe { buf.advance_mut(n) };
Ok(n)
}
#[cfg(feature = "tokio-02")]
struct Bytes05<'a>(&'a mut BytesMut);
#[cfg(feature = "tokio-02")]
impl bytes_05::BufMut for Bytes05<'_> {
fn remaining_mut(&self) -> usize {
self.0.remaining_mut()
}
unsafe fn advance_mut(&mut self, cnt: usize) {
self.0.advance_mut(cnt)
}
fn bytes_mut(&mut self) -> &mut [MaybeUninit<u8>] {
unsafe { &mut *(self.0.chunk_mut() as *mut _ as *mut [MaybeUninit<u8>]) }
}
}
#[cfg(feature = "tokio-02")]
impl<R> CombineRead<BufReader<R>, dyn tokio_02_dep::io::AsyncRead> for Bufferless
where
R: tokio_02_dep::io::AsyncRead,
{
fn poll_extend_buf(
&mut self,
cx: &mut Context<'_>,
read: Pin<&mut BufReader<R>>,
) -> Poll<io::Result<usize>> {
let me = read.project();
if !me.buf.has_remaining_mut() {
me.buf.reserve(8 * 1024);
}
tokio_02_dep::io::AsyncRead::poll_read_buf(me.inner, cx, &mut Bytes05(me.buf))
}
}
#[cfg(feature = "tokio-03")]
impl<R> CombineRead<BufReader<R>, dyn tokio_03_dep::io::AsyncRead> for Bufferless
where
R: tokio_03_dep::io::AsyncRead,
{
fn poll_extend_buf(
&mut self,
cx: &mut Context<'_>,
read: Pin<&mut BufReader<R>>,
) -> Poll<io::Result<usize>> {
let me = read.project();
tokio_03_read_buf(cx, me.inner, me.buf)
}
}
#[cfg(feature = "tokio")]
impl<R> CombineRead<BufReader<R>, dyn tokio_dep::io::AsyncRead> for Bufferless
where
R: tokio_dep::io::AsyncRead,
{
fn poll_extend_buf(
&mut self,
cx: &mut Context<'_>,
read: Pin<&mut BufReader<R>>,
) -> Poll<io::Result<usize>> {
let me = read.project();
tokio_read_buf(me.inner, cx, me.buf)
}
}
#[cfg(feature = "futures-03")]
impl<R> CombineAsyncRead<BufReader<R>> for Bufferless
where
R: futures_io_03::AsyncRead,
{
fn poll_extend_buf(
&mut self,
cx: &mut Context<'_>,
read: Pin<&mut BufReader<R>>,
) -> Poll<io::Result<usize>> {
let me = read.project();
poll_extend_buf(me.buf, cx, me.inner)
}
fn extend_buf<'a>(
&'a mut self,
mut read: Pin<&'a mut BufReader<R>>,
) -> ExtendBuf<'a, Self, BufReader<R>> {
let me = read.as_mut().project();
if !me.buf.has_remaining_mut() {
me.buf.reserve(8 * 1024);
}
// Copy of tokio's read_buf method (but it has to force initialize the buffer)
let bs = me.buf.chunk_mut();
for i in 0..bs.len() {
bs.write_byte(i, 0);
}
ExtendBuf { buffer: self, read }
}
}
#[cfg(feature = "futures-03")]
fn poll_extend_buf<R>(
buf: &mut BytesMut,
cx: &mut Context<'_>,
read: Pin<&mut R>,
) -> Poll<io::Result<usize>>
where
R: futures_io_03::AsyncRead,
{
// Copy of tokio's read_buf method (but it has to force initialize the buffer)
let n = {
let bs = buf.chunk_mut();
// preinit the buffer
for i in 0..bs.len() {
bs.write_byte(i, 0);
}
// Convert to `&mut [u8]`
// SAFETY: preinitialize the buffer
let bs = unsafe { &mut *(bs as *mut _ as *mut [u8]) };
let n = ready!(read.poll_read(cx, bs))?;
assert!(
n <= bs.len(),
"AsyncRead reported that it initialized more than the number of bytes in the buffer"
);
n
};
// SAFETY: the buffer was preinitialized
unsafe { buf.advance_mut(n) };
Poll::Ready(Ok(n))
}
#[cfg(feature = "tokio-02")]
impl<R: tokio_02_dep::io::AsyncRead> tokio_02_dep::io::AsyncRead for BufReader<R> {
fn poll_read(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &mut [u8],
) -> Poll<io::Result<usize>> {
use tokio_02_dep::io::AsyncBufRead;
// If we don't have any buffered data and we're doing a massive read
// (larger than our internal buffer), bypass our internal buffer
// entirely.
if !self.buf.has_remaining_mut() && buf.len() >= self.buf.len() {
let res = ready!(self.as_mut().get_pin_mut().poll_read(cx, buf));
self.discard_buffer();
return Poll::Ready(res);
}
let mut rem = ready!(self.as_mut().poll_fill_buf(cx))?;
let nread = rem.read(buf)?;
self.consume(nread);
Poll::Ready(Ok(nread))
}
// we can't skip unconditionally because of the large buffer case in read.
unsafe fn prepare_uninitialized_buffer(&self, buf: &mut [MaybeUninit<u8>]) -> bool {
self.inner.prepare_uninitialized_buffer(buf)
}
}
#[cfg(feature = "tokio-02")]
impl<R: tokio_02_dep::io::AsyncRead> tokio_02_dep::io::AsyncBufRead for BufReader<R> {
fn poll_fill_buf(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<&[u8]>> {
let me = self.project();
// If we've reached the end of our internal buffer then we need to fetch
// some more data from the underlying reader.
// Branch using `>=` instead of the more correct `==`
// to tell the compiler that the pos..cap slice is always valid.
if me.buf.is_empty() {
ready!(me.inner.poll_read_buf(cx, &mut Bytes05(me.buf)))?;
}
Poll::Ready(Ok(&me.buf[..]))
}
fn consume(self: Pin<&mut Self>, amt: usize) {
let me = self.project();
me.buf.advance(amt);
}
}
#[cfg(feature = "tokio-02")]
impl<R: tokio_02_dep::io::AsyncRead + tokio_02_dep::io::AsyncWrite> tokio_02_dep::io::AsyncWrite
for BufReader<R>
{
fn poll_write(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &[u8],
) -> Poll<io::Result<usize>> {
self.get_pin_mut().poll_write(cx, buf)
}
fn poll_write_buf<B: bytes_05::Buf>(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &mut B,
) -> Poll<io::Result<usize>> {
self.get_pin_mut().poll_write_buf(cx, buf)
}
fn poll_flush(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<()>> {
self.get_pin_mut().poll_flush(cx)
}
fn poll_shutdown(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<()>> {
self.get_pin_mut().poll_shutdown(cx)
}
}
#[cfg(feature = "tokio-03")]
impl<R: tokio_03_dep::io::AsyncRead> tokio_03_dep::io::AsyncRead for BufReader<R> {
fn poll_read(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &mut tokio_03_dep::io::ReadBuf<'_>,
) -> Poll<io::Result<()>> {
// If we don't have any buffered data and we're doing a massive read
// (larger than our internal buffer), bypass our internal buffer
// entirely.
if !self.buf.has_remaining_mut() && buf.remaining() >= self.buf.len() {
let res = ready!(self.as_mut().get_pin_mut().poll_read(cx, buf));
self.discard_buffer();
return Poll::Ready(res);
}
let rem = ready!(self.as_mut().poll_fill_buf(cx))?;
let amt = std::cmp::min(rem.len(), buf.remaining());
buf.put_slice(&rem[..amt]);
self.consume(amt);
Poll::Ready(Ok(()))
}
}
#[cfg(feature = "tokio-03")]
impl<R: tokio_03_dep::io::AsyncRead> tokio_03_dep::io::AsyncBufRead for BufReader<R> {
fn poll_fill_buf(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<&[u8]>> {
let me = self.project();
// If we've reached the end of our internal buffer then we need to fetch
// some more data from the underlying reader.
if me.buf.is_empty() {
ready!(tokio_03_read_buf(cx, me.inner, me.buf))?;
}
Poll::Ready(Ok(&me.buf[..]))
}
fn consume(self: Pin<&mut Self>, amt: usize) {
let me = self.project();
me.buf.advance(amt);
}
}
#[cfg(feature = "tokio-03")]
impl<R: tokio_03_dep::io::AsyncRead + tokio_03_dep::io::AsyncWrite> tokio_03_dep::io::AsyncWrite
for BufReader<R>
{
fn poll_write(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &[u8],
) -> Poll<io::Result<usize>> {
self.get_pin_mut().poll_write(cx, buf)
}
fn poll_flush(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<()>> {
self.get_pin_mut().poll_flush(cx)
}
fn poll_shutdown(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<()>> {
self.get_pin_mut().poll_shutdown(cx)
}
}
#[cfg(feature = "tokio")]
impl<R: tokio_dep::io::AsyncRead> tokio_dep::io::AsyncRead for BufReader<R> {
fn poll_read(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &mut tokio_dep::io::ReadBuf<'_>,
) -> Poll<io::Result<()>> {
// If we don't have any buffered data and we're doing a massive read
// (larger than our internal buffer), bypass our internal buffer
// entirely.
if !self.buf.has_remaining_mut() && buf.remaining() >= self.buf.len() {
let res = ready!(self.as_mut().get_pin_mut().poll_read(cx, buf));
self.discard_buffer();
return Poll::Ready(res);
}
let rem = ready!(self.as_mut().poll_fill_buf(cx))?;
let amt = std::cmp::min(rem.len(), buf.remaining());
buf.put_slice(&rem[..amt]);
self.consume(amt);
Poll::Ready(Ok(()))
}
}
#[cfg(feature = "tokio")]
impl<R: tokio_dep::io::AsyncRead> tokio_dep::io::AsyncBufRead for BufReader<R> {
fn poll_fill_buf(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<&[u8]>> {
let me = self.project();
// If we've reached the end of our internal buffer then we need to fetch
// some more data from the underlying reader.
if me.buf.is_empty() {
ready!(tokio_read_buf(me.inner, cx, me.buf))?;
}
Poll::Ready(Ok(&me.buf[..]))
}
fn consume(self: Pin<&mut Self>, amt: usize) {
let me = self.project();
me.buf.advance(amt);
}
}
#[cfg(feature = "tokio")]
impl<R: tokio_dep::io::AsyncRead + tokio_dep::io::AsyncWrite> tokio_dep::io::AsyncWrite
for BufReader<R>
{
fn poll_write(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &[u8],
) -> Poll<io::Result<usize>> {
self.get_pin_mut().poll_write(cx, buf)
}
fn poll_flush(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<()>> {
self.get_pin_mut().poll_flush(cx)
}
fn poll_shutdown(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<()>> {
self.get_pin_mut().poll_shutdown(cx)
}
}
impl<R: Read> Read for BufReader<R> {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
// If we don't have any buffered data and we're doing a massive read
// (larger than our internal buffer), bypass our internal buffer
// entirely.
if !self.buf.has_remaining_mut() && buf.len() >= self.buf.len() {
let res = self.read(buf);
self.buf.clear();
return res;
}
let nread = {
let mut rem = self.fill_buf()?;
rem.read(buf)?
};
self.consume(nread);
Ok(nread)
}
}
impl<R: Read> BufRead for BufReader<R> {
fn fill_buf(&mut self) -> io::Result<&[u8]> {
// If we've reached the end of our internal buffer then we need to fetch
// some more data from the underlying reader.
// Branch using `>=` instead of the more correct `==`
// to tell the compiler that the pos..cap slice is always valid.
if self.buf.is_empty() {
Bufferless.extend_buf_sync(self)?;
}
Ok(&self.buf[..])
}
fn consume(&mut self, amt: usize) {
self.buf.advance(amt);
}
}
#[cfg(test)]
#[cfg(feature = "tokio-02")]
mod tests {
use super::{BufReader, Bufferless, CombineRead};
use std::{io, pin::Pin};
use {
bytes_05::BytesMut,
tokio_02_dep::{
self as tokio,
io::{AsyncRead, AsyncReadExt},
},
};
impl<R: AsyncRead> BufReader<R> {
async fn extend_buf_tokio_02(mut self: Pin<&mut Self>) -> io::Result<usize> {
crate::future_ext::poll_fn(|cx| Bufferless.poll_extend_buf(cx, self.as_mut())).await
}
}
#[tokio::test]
async fn buf_reader() {
let mut read = BufReader::with_capacity(3, &[1u8, 2, 3, 4, 5, 6, 7, 8, 9, 0][..]);
let mut buf = [0u8; 3];
read.read(&mut buf).await.unwrap();
assert_eq!(buf, [1, 2, 3]);
let mut buf = [0u8; 3];
read.read(&mut buf).await.unwrap();
assert_eq!(buf, [4, 5, 6]);
let mut buf = [0u8; 3];
read.read(&mut buf).await.unwrap();
assert_eq!(buf, [7, 8, 9]);
let mut buf = [1u8; 3];
read.read(&mut buf).await.unwrap();
assert_eq!(buf, [0, 1, 1]);
}
#[tokio::test]
async fn buf_reader_buf() {
let mut read = BufReader::with_capacity(3, &[1u8, 2, 3, 4, 5, 6, 7, 8, 9, 0][..]);
let mut buf = BytesMut::with_capacity(3);
read.read_buf(&mut buf).await.unwrap();
assert_eq!(&buf[..], [1, 2, 3]);
read.read_buf(&mut buf).await.unwrap();
assert_eq!(&buf[..], [1, 2, 3, 4, 5, 6, 7, 8, 9, 0]);
}
#[tokio::test]
async fn buf_reader_extend_buf() {
let read = BufReader::with_capacity(3, &[1u8, 2, 3, 4, 5, 6, 7, 8, 9, 0][..]);
futures_03_dep::pin_mut!(read);
assert_eq!(read.as_mut().extend_buf_tokio_02().await.unwrap(), 3);
assert_eq!(read.buffer(), [1, 2, 3]);
assert_eq!(read.as_mut().extend_buf_tokio_02().await.unwrap(), 7);
assert_eq!(read.buffer(), [1, 2, 3, 4, 5, 6, 7, 8, 9, 0]);
}
}
#[cfg(test)]
#[cfg(feature = "tokio")]
mod tests_tokio_1 {
use super::{BufReader, Bufferless, CombineRead};
use std::{io, pin::Pin};
use {
bytes::BytesMut,
tokio_dep::{
self as tokio,
io::{AsyncRead, AsyncReadExt},
},
};
impl<R: AsyncRead> BufReader<R> {
async fn extend_buf_tokio(mut self: Pin<&mut Self>) -> io::Result<usize> {
crate::future_ext::poll_fn(|cx| Bufferless.poll_extend_buf(cx, self.as_mut())).await
}
}
#[tokio::test]
async fn buf_reader() {
let mut read = BufReader::with_capacity(3, &[1u8, 2, 3, 4, 5, 6, 7, 8, 9, 0][..]);
let mut buf = [0u8; 3];
read.read(&mut buf).await.unwrap();
assert_eq!(buf, [1, 2, 3]);
let mut buf = [0u8; 3];
read.read(&mut buf).await.unwrap();
assert_eq!(buf, [4, 5, 6]);
let mut buf = [0u8; 3];
read.read(&mut buf).await.unwrap();
assert_eq!(buf, [7, 8, 9]);
let mut buf = [1u8; 3];
read.read(&mut buf).await.unwrap();
assert_eq!(buf, [0, 1, 1]);
}
#[tokio::test]
async fn buf_reader_buf() {
let mut read = BufReader::with_capacity(3, &[1u8, 2, 3, 4, 5, 6, 7, 8, 9, 0][..]);
let mut buf = BytesMut::with_capacity(3);
read.read_buf(&mut buf).await.unwrap();
assert_eq!(&buf[..], [1, 2, 3]);
read.read_buf(&mut buf).await.unwrap();
assert_eq!(&buf[..], [1, 2, 3, 4, 5, 6, 7, 8, 9, 0]);
}
#[tokio::test]
async fn buf_reader_extend_buf() {
let read = BufReader::with_capacity(3, &[1u8, 2, 3, 4, 5, 6, 7, 8, 9, 0][..]);
futures_03_dep::pin_mut!(read);
assert_eq!(read.as_mut().extend_buf_tokio().await.unwrap(), 3);
assert_eq!(read.buffer(), [1, 2, 3]);
assert_eq!(read.as_mut().extend_buf_tokio().await.unwrap(), 7);
assert_eq!(read.buffer(), [1, 2, 3, 4, 5, 6, 7, 8, 9, 0]);
}
}
#[cfg(test)]
mod tests_sync {
use super::{BufReader, Bufferless, CombineSyncRead};
use std::io::Read;
#[test]
#[allow(clippy::unused_io_amount)]
fn buf_reader() {
let mut read = BufReader::with_capacity(3, &[1u8, 2, 3, 4, 5, 6, 7, 8, 9, 0][..]);
let mut buf = [0u8; 3];
read.read(&mut buf).unwrap();
assert_eq!(buf, [1, 2, 3]);
let mut buf = [0u8; 3];
read.read(&mut buf).unwrap();
assert_eq!(buf, [4, 5, 6]);
let mut buf = [0u8; 3];
read.read(&mut buf).unwrap();
assert_eq!(buf, [7, 8, 9]);
let mut buf = [1u8; 3];
read.read(&mut buf).unwrap();
assert_eq!(buf, [0, 1, 1]);
}
#[test]
fn buf_reader_extend_buf() {
let mut read = BufReader::with_capacity(3, &[1u8, 2, 3, 4, 5, 6, 7, 8, 9, 0][..]);
assert_eq!(Bufferless.extend_buf_sync(&mut read).unwrap(), 3);
assert_eq!(read.buffer(), [1, 2, 3]);
assert_eq!(Bufferless.extend_buf_sync(&mut read).unwrap(), 7);
assert_eq!(read.buffer(), [1, 2, 3, 4, 5, 6, 7, 8, 9, 0]);
}
}

141
vendor/combine/src/stream/buffered.rs vendored Normal file
View File

@@ -0,0 +1,141 @@
use alloc::collections::VecDeque;
use crate::{
error::StreamError,
stream::{ParseError, Positioned, ResetStream, StreamErrorFor, StreamOnce},
};
/// `Stream` which buffers items from an instance of `StreamOnce` into a ring buffer.
/// Instances of `StreamOnce` which is not able to implement `ResetStream` (such as `ReadStream`) may
/// use this as a way to implement `ResetStream` and become a full `Stream` instance.
///
/// The drawback is that the buffer only stores a limited number of items which limits how many
/// tokens that can be reset and replayed. If a `buffered::Stream` is reset past this limit an error
/// will be returned when `uncons` is next called.
///
/// NOTE: If this stream is used in conjunction with an error enhancing stream such as
/// `easy::Stream` (also via the `easy_parser` method) it is recommended that the `buffered::Stream`
/// instance wraps the `easy::Stream` instance instead of the other way around.
///
/// ```ignore
/// // DO
/// buffered::Stream::new(easy::Stream(..), ..)
/// // DON'T
/// easy::Stream(buffered::Stream::new(.., ..))
/// parser.easy_parse(buffered::Stream::new(..));
/// ```
#[derive(Debug, PartialEq)]
pub struct Stream<Input>
where
Input: StreamOnce + Positioned,
{
offset: usize,
iter: Input,
buffer_offset: usize,
buffer: VecDeque<(Input::Token, Input::Position)>,
}
impl<Input> ResetStream for Stream<Input>
where
Input: Positioned,
{
type Checkpoint = usize;
fn checkpoint(&self) -> Self::Checkpoint {
self.offset
}
fn reset(&mut self, checkpoint: Self::Checkpoint) -> Result<(), Self::Error> {
if checkpoint < self.buffer_offset - self.buffer.len() {
// We have backtracked to far
Err(Self::Error::from_error(
self.position(),
StreamErrorFor::<Self>::message_static_message("Backtracked to far"),
))
} else {
self.offset = checkpoint;
Ok(())
}
}
}
impl<Input> Stream<Input>
where
Input: StreamOnce + Positioned,
Input::Position: Clone,
Input::Token: Clone,
{
/// Constructs a new `BufferedStream` from a `StreamOnce` instance with a `lookahead`
/// number of elements that can be stored in the buffer.
pub fn new(iter: Input, lookahead: usize) -> Stream<Input> {
Stream {
offset: 0,
iter,
buffer_offset: 0,
buffer: VecDeque::with_capacity(lookahead),
}
}
}
impl<Input> Positioned for Stream<Input>
where
Input: StreamOnce + Positioned,
{
#[inline]
fn position(&self) -> Self::Position {
if self.offset >= self.buffer_offset {
self.iter.position()
} else if self.offset < self.buffer_offset - self.buffer.len() {
self.buffer
.front()
.expect("At least 1 element in the buffer")
.1
.clone()
} else {
self.buffer[self.buffer.len() - (self.buffer_offset - self.offset)]
.1
.clone()
}
}
}
impl<Input> StreamOnce for Stream<Input>
where
Input: StreamOnce + Positioned,
Input::Token: Clone,
{
type Token = Input::Token;
type Range = Input::Range;
type Position = Input::Position;
type Error = Input::Error;
#[inline]
fn uncons(&mut self) -> Result<Input::Token, StreamErrorFor<Self>> {
if self.offset >= self.buffer_offset {
let position = self.iter.position();
let token = self.iter.uncons()?;
self.buffer_offset += 1;
// We want the VecDeque to only keep the last .capacity() elements so we need to remove
// an element if it gets to large
if self.buffer.len() == self.buffer.capacity() {
self.buffer.pop_front();
}
self.buffer.push_back((token.clone(), position));
self.offset += 1;
Ok(token)
} else if self.offset < self.buffer_offset - self.buffer.len() {
// We have backtracked to far
Err(StreamError::message_static_message("Backtracked to far"))
} else {
let value = self.buffer[self.buffer.len() - (self.buffer_offset - self.offset)]
.0
.clone();
self.offset += 1;
Ok(value)
}
}
fn is_partial(&self) -> bool {
self.iter.is_partial()
}
}

227
vendor/combine/src/stream/decoder.rs vendored Normal file
View File

@@ -0,0 +1,227 @@
use crate::{
error::ParseError,
stream::buf_reader::{Buffer, Bufferless, CombineBuffer},
};
use std::{
fmt,
io::{self, Read},
};
#[cfg(feature = "pin-project-lite")]
use std::pin::Pin;
#[derive(Debug)]
pub enum Error<E, P> {
Parse(E),
Io { position: P, error: io::Error },
}
impl<'a, P> From<Error<crate::easy::Errors<u8, &'a [u8], P>, P>>
for crate::easy::Errors<u8, &'a [u8], P>
where
P: Ord + Clone,
{
fn from(e: Error<crate::easy::Errors<u8, &'a [u8], P>, P>) -> Self {
match e {
Error::Parse(e) => e,
Error::Io { position, error } => {
crate::easy::Errors::from_error(position, crate::easy::Error::Other(error.into()))
}
}
}
}
impl<E, P> std::error::Error for Error<E, P>
where
E: std::error::Error,
P: fmt::Display + fmt::Debug,
{
}
impl<E: fmt::Display, P: fmt::Display> fmt::Display for Error<E, P> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Error::Parse(e) => e.fmt(f),
Error::Io { position: _, error } => error.fmt(f),
}
}
}
#[derive(Default)]
/// Used together with the `decode!` macro
pub struct Decoder<S, P, C = Buffer> {
position: P,
state: S,
buffer: C,
end_of_input: bool,
}
impl<S, P> Decoder<S, P, Buffer>
where
P: Default,
S: Default,
{
/// Constructs a new [`Decoder`] with an internal buffer. Allows any `AsyncRead/Read` instance to
/// be used when decoding but there may be data left in the internal buffer after decoding
/// (accessible with [`Decoder::buffer`])
pub fn new() -> Self {
Decoder::default()
}
/// Constructs a new [`Decoder`] with an internal buffer. Allows any `AsyncRead/Read` instance to
/// be used when decoding but there may be data left in the internal buffer after decoding
/// (accessible with [`Decoder::buffer`])
pub fn new_buffer() -> Self {
Decoder::new()
}
}
impl<S, P> Decoder<S, P, Bufferless>
where
P: Default,
S: Default,
{
/// Constructs a new `Decoder` without an internal buffer. Requires the read instance to be
/// wrapped with combine's [`BufReader`] instance to
///
/// [`BufReader`]: super::buf_reader::BufReader
pub fn new_bufferless() -> Self {
Decoder::default()
}
}
impl<S, P> Decoder<S, P> {
pub fn buffer(&self) -> &[u8] {
&self.buffer.0
}
}
impl<S, P, C> Decoder<S, P, C> {
#[doc(hidden)]
pub fn advance<R>(&mut self, read: &mut R, removed: usize)
where
C: CombineBuffer<R>,
{
// Remove the data we have parsed and adjust `removed` to be the amount of data we
// committed from `self.reader`
self.buffer.advance(read, removed)
}
#[doc(hidden)]
#[cfg(feature = "pin-project-lite")]
pub fn advance_pin<R>(&mut self, read: Pin<&mut R>, removed: usize)
where
C: CombineBuffer<R>,
{
// Remove the data we have parsed and adjust `removed` to be the amount of data we
// committed from `self.reader`
self.buffer.advance_pin(read, removed);
}
pub fn position(&self) -> &P {
&self.position
}
#[doc(hidden)]
pub fn __inner(&mut self) -> (&mut S, &mut P, &C, bool) {
(
&mut self.state,
&mut self.position,
&self.buffer,
self.end_of_input,
)
}
}
impl<S, P, C> Decoder<S, P, C>
where
C: ,
{
#[doc(hidden)]
pub fn __before_parse<R>(&mut self, mut reader: R) -> io::Result<()>
where
R: Read,
C: crate::stream::buf_reader::CombineSyncRead<R>,
{
if self.buffer.extend_buf_sync(&mut reader)? == 0 {
self.end_of_input = true;
}
Ok(())
}
}
#[cfg(feature = "tokio-02")]
impl<S, P, C> Decoder<S, P, C> {
#[doc(hidden)]
pub async fn __before_parse_tokio_02<R>(&mut self, mut reader: Pin<&mut R>) -> io::Result<()>
where
R: tokio_02_dep::io::AsyncRead,
C: crate::stream::buf_reader::CombineRead<R, dyn tokio_02_dep::io::AsyncRead>,
{
let copied =
crate::future_ext::poll_fn(|cx| self.buffer.poll_extend_buf(cx, reader.as_mut()))
.await?;
if copied == 0 {
self.end_of_input = true;
}
Ok(())
}
}
#[cfg(feature = "tokio-03")]
impl<S, P, C> Decoder<S, P, C> {
#[doc(hidden)]
pub async fn __before_parse_tokio_03<R>(&mut self, mut reader: Pin<&mut R>) -> io::Result<()>
where
R: tokio_03_dep::io::AsyncRead,
C: crate::stream::buf_reader::CombineRead<R, dyn tokio_03_dep::io::AsyncRead>,
{
let copied =
crate::future_ext::poll_fn(|cx| self.buffer.poll_extend_buf(cx, reader.as_mut()))
.await?;
if copied == 0 {
self.end_of_input = true;
}
Ok(())
}
}
#[cfg(feature = "tokio")]
impl<S, P, C> Decoder<S, P, C> {
#[doc(hidden)]
pub async fn __before_parse_tokio<R>(&mut self, mut reader: Pin<&mut R>) -> io::Result<()>
where
R: tokio_dep::io::AsyncRead,
C: crate::stream::buf_reader::CombineRead<R, dyn tokio_dep::io::AsyncRead>,
{
let copied =
crate::future_ext::poll_fn(|cx| self.buffer.poll_extend_buf(cx, reader.as_mut()))
.await?;
if copied == 0 {
self.end_of_input = true;
}
Ok(())
}
}
#[cfg(feature = "futures-03")]
impl<S, P, C> Decoder<S, P, C> {
#[doc(hidden)]
pub async fn __before_parse_async<R>(&mut self, reader: Pin<&mut R>) -> io::Result<()>
where
R: futures_io_03::AsyncRead,
C: crate::stream::buf_reader::CombineAsyncRead<R>,
{
let copied = self.buffer.extend_buf(reader).await?;
if copied == 0 {
self.end_of_input = true;
}
Ok(())
}
}

897
vendor/combine/src/stream/easy.rs vendored Normal file
View File

@@ -0,0 +1,897 @@
//! Stream wrapper which provides an informative and easy to use error type.
//!
//! Unless you have specific constraints preventing you from using this error type (such as being
//! a `no_std` environment) you probably want to use this stream type. It can easily be used
//! through the [`EasyParser::easy_parse`] method.
//!
//! The provided `Errors` type is roughly the same as `ParseError` in combine 1.x and 2.x.
//!
//! ```
//! #[macro_use]
//! extern crate combine;
//! use combine::{easy, Parser, EasyParser, Stream, many1};
//! use combine::parser::char::letter;
//! use combine::stream::StreamErrorFor;
//! use combine::error::{ParseError, StreamError};
//!
//! fn main() {
//! parser!{
//! fn parser[Input]()(Input) -> String
//! where [
//! Input: Stream<Token = char, Error = easy::ParseError<Input>>,
//! Input::Range: PartialEq,
//! // If we want to use the error type explicitly we need to help rustc infer
//! // `StreamError` to `easy::Error` (rust-lang/rust#24159)
//! Input::Error: ParseError<
//! Input::Token,
//! Input::Range,
//! Input::Position,
//! StreamError = easy::Error<Input::Token, Input::Range>
//! >
//! ]
//! {
//! many1(letter()).and_then(|word: String| {
//! if word == "combine" {
//! Ok(word)
//! } else {
//! Err(easy::Error::Expected(easy::Info::Static("combine")))
//! }
//! })
//! }
//! }
//!
//! parser!{
//! fn parser2[Input]()(Input) -> String
//! where [
//! Input: Stream<Token = char>,
//! ]
//! {
//! many1(letter()).and_then(|word: String| {
//! if word == "combine" {
//! Ok(word)
//! } else {
//! // Alternatively it is possible to only use the methods provided by the
//! // `StreamError` trait.
//! // In that case the extra bound is not necessary (and this method will work
//! // for other errors than `easy::Errors`)
//! Err(StreamErrorFor::<Input>::expected_static_message("combine"))
//! }
//! })
//! }
//! }
//!
//! let input = "combin";
//! let expected_error = Err(easy::Errors {
//! errors: vec![
//! easy::Error::Expected("combine".into())
//! ],
//! position: 0,
//! });
//! assert_eq!(
//! parser().easy_parse(input).map_err(|err| err.map_position(|p| p.translate_position(input))),
//! expected_error
//! );
//! assert_eq!(
//! parser2().easy_parse(input).map_err(|err| err.map_position(|p| p.translate_position(input))),
//! expected_error
//! );
//! }
//!
//! ```
//!
//! [`EasyParser::easy_parse`]: super::super::parser::EasyParser::easy_parse
use std::{error::Error as StdError, fmt};
use crate::error::{Info as PrimitiveInfo, ParseResult, StreamError, Tracked};
use crate::stream::{
Positioned, RangeStream, RangeStreamOnce, ResetStream, StreamErrorFor, StreamOnce,
};
/// Enum holding error information. Variants are defined for `Stream::Token` and `Stream::Range` as
/// well as string variants holding easy descriptions.
///
/// As there is implementations of `From` for `String` and `&'static str` the
/// constructor need not be used directly as calling `msg.into()` should turn a message into the
/// correct `Info` variant.
#[derive(Clone, Debug)]
pub enum Info<T, R> {
Token(T),
Range(R),
Owned(String),
Static(&'static str),
}
impl<T, R, F> From<PrimitiveInfo<T, R, F>> for Info<T, R>
where
F: fmt::Display,
{
fn from(info: PrimitiveInfo<T, R, F>) -> Self {
match info {
PrimitiveInfo::Token(b) => Info::Token(b),
PrimitiveInfo::Range(b) => Info::Range(b),
PrimitiveInfo::Static(b) => Info::Static(b),
PrimitiveInfo::Format(b) => Info::Owned(b.to_string()),
}
}
}
impl<T, R> Info<T, R> {
pub fn map_token<F, U>(self, f: F) -> Info<U, R>
where
F: FnOnce(T) -> U,
{
use self::Info::*;
match self {
Token(t) => Token(f(t)),
Range(r) => Range(r),
Owned(s) => Owned(s),
Static(x) => Static(x),
}
}
pub fn map_range<F, S>(self, f: F) -> Info<T, S>
where
F: FnOnce(R) -> S,
{
use self::Info::*;
match self {
Token(t) => Token(t),
Range(r) => Range(f(r)),
Owned(s) => Owned(s),
Static(x) => Static(x),
}
}
}
impl<T: PartialEq, R: PartialEq> PartialEq for Info<T, R> {
fn eq(&self, other: &Info<T, R>) -> bool {
match (self, other) {
(&Info::Token(ref l), &Info::Token(ref r)) => l == r,
(&Info::Range(ref l), &Info::Range(ref r)) => l == r,
(&Info::Owned(ref l), &Info::Owned(ref r)) => l == r,
(&Info::Static(l), &Info::Owned(ref r)) => l == r,
(&Info::Owned(ref l), &Info::Static(r)) => l == r,
(&Info::Static(l), &Info::Static(r)) => l == r,
_ => false,
}
}
}
impl<T: fmt::Display, R: fmt::Display> fmt::Display for Info<T, R> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
Info::Token(ref c) => write!(f, "`{}`", c),
Info::Range(ref c) => write!(f, "`{}`", c),
Info::Owned(ref s) => write!(f, "{}", s),
Info::Static(s) => write!(f, "{}", s),
}
}
}
impl<R> From<char> for Info<char, R> {
fn from(s: char) -> Info<char, R> {
Info::Token(s)
}
}
impl<T, R> From<String> for Info<T, R> {
fn from(s: String) -> Info<T, R> {
Info::Owned(s)
}
}
impl<T, R> From<&'static str> for Info<T, R> {
fn from(s: &'static str) -> Info<T, R> {
Info::Static(s)
}
}
impl<R> From<u8> for Info<u8, R> {
fn from(s: u8) -> Info<u8, R> {
Info::Token(s)
}
}
/// Enum used to store information about an error that has occurred during parsing.
#[derive(Debug)]
pub enum Error<T, R> {
/// Error indicating an unexpected token has been encountered in the stream
Unexpected(Info<T, R>),
/// Error indicating that the parser expected something else
Expected(Info<T, R>),
/// Generic message
Message(Info<T, R>),
/// Variant for containing other types of errors
Other(Box<dyn StdError + Send + Sync>),
}
impl<Item, Range> StreamError<Item, Range> for Error<Item, Range>
where
Item: PartialEq,
Range: PartialEq,
{
#[inline]
fn unexpected_token(token: Item) -> Self {
Error::Unexpected(Info::Token(token))
}
#[inline]
fn unexpected_range(token: Range) -> Self {
Error::Unexpected(Info::Range(token))
}
#[inline]
fn unexpected_format<T>(msg: T) -> Self
where
T: fmt::Display,
{
Error::Unexpected(Info::Owned(msg.to_string()))
}
#[inline]
fn unexpected_static_message(msg: &'static str) -> Self {
Error::Unexpected(Info::Static(msg))
}
#[inline]
fn expected_token(token: Item) -> Self {
Error::Expected(Info::Token(token))
}
#[inline]
fn expected_range(token: Range) -> Self {
Error::Expected(Info::Range(token))
}
#[inline]
fn expected_format<T>(msg: T) -> Self
where
T: fmt::Display,
{
Error::Expected(Info::Owned(msg.to_string()))
}
#[inline]
fn expected_static_message(msg: &'static str) -> Self {
Error::Expected(Info::Static(msg))
}
#[inline]
fn message_format<T>(msg: T) -> Self
where
T: fmt::Display,
{
Error::Message(Info::Owned(msg.to_string()))
}
#[inline]
fn message_static_message(msg: &'static str) -> Self {
Error::Message(Info::Static(msg))
}
#[inline]
fn message_token(token: Item) -> Self {
Error::Message(Info::Token(token))
}
#[inline]
fn message_range(token: Range) -> Self {
Error::Message(Info::Range(token))
}
fn is_unexpected_end_of_input(&self) -> bool {
*self == Self::end_of_input()
}
#[inline]
fn other<E>(err: E) -> Self
where
E: StdError + Send + Sync + 'static,
{
err.into()
}
#[inline]
fn into_other<T>(self) -> T
where
T: StreamError<Item, Range>,
{
match self {
Error::Unexpected(info) => match info {
Info::Token(x) => T::unexpected_token(x),
Info::Range(x) => T::unexpected_range(x),
Info::Static(x) => T::unexpected_static_message(x),
Info::Owned(x) => T::unexpected_format(x),
},
Error::Expected(info) => match info {
Info::Token(x) => T::expected_token(x),
Info::Range(x) => T::expected_range(x),
Info::Static(x) => T::expected_static_message(x),
Info::Owned(x) => T::expected_format(x),
},
Error::Message(info) => match info {
Info::Token(x) => T::expected_token(x),
Info::Range(x) => T::expected_range(x),
Info::Static(x) => T::expected_static_message(x),
Info::Owned(x) => T::expected_format(x),
},
Error::Other(err) => T::message_format(err),
}
}
}
impl<Item, Range, Position> crate::error::ParseError<Item, Range, Position> for Error<Item, Range>
where
Item: PartialEq,
Range: PartialEq,
Position: Default,
{
type StreamError = Self;
#[inline]
fn empty(_: Position) -> Self {
Self::message_static_message("")
}
#[inline]
fn from_error(_: Position, err: Self::StreamError) -> Self {
err
}
#[inline]
fn position(&self) -> Position {
Position::default()
}
#[inline]
fn set_position(&mut self, _position: Position) {}
#[inline]
fn add(&mut self, err: Self::StreamError) {
*self = err;
}
#[inline]
fn set_expected<F>(self_: &mut Tracked<Self>, info: Self::StreamError, f: F)
where
F: FnOnce(&mut Tracked<Self>),
{
f(self_);
self_.error = info;
}
fn is_unexpected_end_of_input(&self) -> bool {
*self == Self::end_of_input()
}
#[inline]
fn into_other<T>(self) -> T
where
T: crate::error::ParseError<Item, Range, Position>,
{
T::from_error(Position::default(), StreamError::into_other(self))
}
}
impl<Item, Range, Position> crate::error::ParseErrorInto<Item, Range, Position>
for Errors<Item, Range, Position>
{
fn into_other_error<T, Item2, Range2, Position2>(self) -> T
where
T: crate::error::ParseError<Item2, Range2, Position2>,
Item2: From<Item>,
Range2: From<Range>,
Position2: From<Position>,
{
let mut error = T::empty(self.position.into());
for err in self.errors {
error.add(crate::error::StreamErrorInto::<Item, Range>::into_other_error(err));
}
error
}
}
impl<Item, Range> crate::error::StreamErrorInto<Item, Range> for Error<Item, Range> {
fn into_other_error<T, Item2, Range2>(self) -> T
where
T: crate::error::StreamError<Item2, Range2>,
Item2: From<Item>,
Range2: From<Range>,
{
match self {
Error::Unexpected(info) => match info {
Info::Token(x) => T::unexpected_token(x.into()),
Info::Range(x) => T::unexpected_range(x.into()),
Info::Static(x) => T::unexpected_static_message(x),
Info::Owned(x) => T::unexpected_format(x),
},
Error::Expected(info) => match info {
Info::Token(x) => T::expected_token(x.into()),
Info::Range(x) => T::expected_range(x.into()),
Info::Static(x) => T::expected_static_message(x),
Info::Owned(x) => T::expected_format(x),
},
Error::Message(info) => match info {
Info::Token(x) => T::expected_token(x.into()),
Info::Range(x) => T::expected_range(x.into()),
Info::Static(x) => T::expected_static_message(x),
Info::Owned(x) => T::expected_format(x),
},
Error::Other(err) => T::message_format(err),
}
}
}
impl<Item, Range, Position> crate::error::ParseError<Item, Range, Position>
for Errors<Item, Range, Position>
where
Item: PartialEq,
Range: PartialEq,
Position: Ord + Clone,
{
type StreamError = Error<Item, Range>;
#[inline]
fn empty(pos: Position) -> Self {
Errors::empty(pos)
}
#[inline]
fn from_error(position: Position, err: Self::StreamError) -> Self {
Self::new(position, err)
}
#[inline]
fn position(&self) -> Position {
self.position.clone()
}
#[inline]
fn set_position(&mut self, position: Position) {
self.position = position;
}
#[inline]
fn merge(self, other: Self) -> Self {
Errors::merge(self, other)
}
#[inline]
fn add(&mut self, err: Self::StreamError) {
self.add_error(err);
}
#[inline]
fn set_expected<F>(self_: &mut Tracked<Self>, info: Self::StreamError, f: F)
where
F: FnOnce(&mut Tracked<Self>),
{
let start = self_.error.errors.len();
f(self_);
// Replace all expected errors that were added from the previous add_error
// with this expected error
let mut i = 0;
self_.error.errors.retain(|e| {
if i < start {
i += 1;
true
} else {
match *e {
Error::Expected(_) => false,
_ => true,
}
}
});
self_.error.add(info);
}
fn clear_expected(&mut self) {
self.errors.retain(|e| match *e {
Error::Expected(_) => false,
_ => true,
})
}
fn is_unexpected_end_of_input(&self) -> bool {
self.errors
.iter()
.any(StreamError::is_unexpected_end_of_input)
}
#[inline]
fn into_other<T>(mut self) -> T
where
T: crate::error::ParseError<Item, Range, Position>,
{
match self.errors.pop() {
Some(err) => T::from_error(self.position, StreamError::into_other(err)),
None => T::empty(self.position),
}
}
}
impl<T, R> Error<T, R> {
pub fn map_token<F, U>(self, f: F) -> Error<U, R>
where
F: FnOnce(T) -> U,
{
use self::Error::*;
match self {
Unexpected(x) => Unexpected(x.map_token(f)),
Expected(x) => Expected(x.map_token(f)),
Message(x) => Message(x.map_token(f)),
Other(x) => Other(x),
}
}
pub fn map_range<F, S>(self, f: F) -> Error<T, S>
where
F: FnOnce(R) -> S,
{
use self::Error::*;
match self {
Unexpected(x) => Unexpected(x.map_range(f)),
Expected(x) => Expected(x.map_range(f)),
Message(x) => Message(x.map_range(f)),
Other(x) => Other(x),
}
}
}
impl<T: PartialEq, R: PartialEq> PartialEq for Error<T, R> {
fn eq(&self, other: &Error<T, R>) -> bool {
match (self, other) {
(&Error::Unexpected(ref l), &Error::Unexpected(ref r))
| (&Error::Expected(ref l), &Error::Expected(ref r))
| (&Error::Message(ref l), &Error::Message(ref r)) => l == r,
_ => false,
}
}
}
impl<T, R, E> From<E> for Error<T, R>
where
E: StdError + 'static + Send + Sync,
{
fn from(e: E) -> Error<T, R> {
Error::Other(Box::new(e))
}
}
impl<T, R> Error<T, R> {
/// Returns the `end_of_input` error.
pub fn end_of_input() -> Error<T, R> {
Error::Unexpected("end of input".into())
}
/// Formats a slice of errors in a human readable way.
///
/// ```rust
/// # extern crate combine;
/// # use combine::*;
/// # use combine::parser::char::*;
/// # use combine::stream::position::{self, SourcePosition};
///
/// # fn main() {
/// let input = r"
/// ,123
/// ";
/// let result = spaces().silent().with(char('.').or(char('a')).or(digit()))
/// .easy_parse(position::Stream::new(input));
/// let m = format!("{}", result.unwrap_err());
/// let expected = r"Parse error at line: 2, column: 3
/// Unexpected `,`
/// Expected `.`, `a` or digit
/// ";
/// assert_eq!(m, expected);
/// # }
/// ```
pub fn fmt_errors(errors: &[Error<T, R>], f: &mut fmt::Formatter<'_>) -> fmt::Result
where
T: fmt::Display,
R: fmt::Display,
{
// First print the token that we did not expect
// There should really just be one unexpected message at this point though we print them
// all to be safe
let unexpected = errors.iter().filter(|e| match **e {
Error::Unexpected(_) => true,
_ => false,
});
for error in unexpected {
writeln!(f, "{}", error)?;
}
// Then we print out all the things that were expected in a comma separated list
// 'Expected 'a', 'expression' or 'let'
let iter = || {
errors.iter().filter_map(|e| match *e {
Error::Expected(ref err) => Some(err),
_ => None,
})
};
let expected_count = iter().count();
for (i, message) in iter().enumerate() {
let s = match i {
0 => "Expected",
_ if i < expected_count - 1 => ",",
// Last expected message to be written
_ => " or",
};
write!(f, "{} {}", s, message)?;
}
if expected_count != 0 {
writeln!(f)?;
}
// If there are any generic messages we print them out last
let messages = errors.iter().filter(|e| match **e {
Error::Message(_) | Error::Other(_) => true,
_ => false,
});
for error in messages {
writeln!(f, "{}", error)?;
}
Ok(())
}
}
/// Convenience alias over `Errors` for `StreamOnce` types which makes it possible to specify the
/// `Errors` type from a `StreamOnce` by writing `ParseError<Input>` instead of `Errors<Input::Token,
/// Input::Range, Input::Position>`
pub type ParseError<S> =
Errors<<S as StreamOnce>::Token, <S as StreamOnce>::Range, <S as StreamOnce>::Position>;
/// Struct which hold information about an error that occurred at a specific position.
/// Can hold multiple instances of `Error` if more that one error occurred in the same position.
#[derive(Debug, PartialEq)]
pub struct Errors<T, R, P> {
/// The position where the error occurred
pub position: P,
/// A vector containing specific information on what errors occurred at `position`. Usually
/// a fully formed message contains one `Unexpected` error and one or more `Expected` errors.
/// `Message` and `Other` may also appear (`combine` never generates these errors on its own)
/// and may warrant custom handling.
pub errors: Vec<Error<T, R>>,
}
impl<T, R, P> Errors<T, R, P> {
/// Constructs a new `ParseError` which occurred at `position`.
#[inline]
pub fn new(position: P, error: Error<T, R>) -> Errors<T, R, P> {
Self::from_errors(position, vec![error])
}
/// Constructs an error with no other information than the position it occurred at.
#[inline]
pub fn empty(position: P) -> Errors<T, R, P> {
Self::from_errors(position, vec![])
}
/// Constructs a `ParseError` with multiple causes.
#[inline]
pub fn from_errors(position: P, errors: Vec<Error<T, R>>) -> Errors<T, R, P> {
Errors { position, errors }
}
/// Constructs an end of input error. Should be returned by parsers which encounter end of
/// input unexpectedly.
#[inline]
pub fn end_of_input(position: P) -> Errors<T, R, P> {
Self::new(position, Error::end_of_input())
}
/// Adds an error if `error` does not exist in this `ParseError` already (as determined byte
/// `PartialEq`).
pub fn add_error(&mut self, error: Error<T, R>)
where
T: PartialEq,
R: PartialEq,
{
// Don't add duplicate errors
if self.errors.iter().all(|err| *err != error) {
self.errors.push(error);
}
}
/// Removes all `Expected` errors in `self` and adds `info` instead.
pub fn set_expected(&mut self, info: Info<T, R>) {
// Remove all other expected messages
self.errors.retain(|e| match *e {
Error::Expected(_) => false,
_ => true,
});
self.errors.push(Error::Expected(info));
}
/// Merges two `ParseError`s. If they exist at the same position the errors of `other` are
/// added to `self` (using `add_error` to skip duplicates). If they are not at the same
/// position the error furthest ahead are returned, ignoring the other `ParseError`.
pub fn merge(mut self, mut other: Errors<T, R, P>) -> Errors<T, R, P>
where
P: Ord,
T: PartialEq,
R: PartialEq,
{
use std::cmp::Ordering;
// Only keep the errors which occurred after consuming the most amount of data
match self.position.cmp(&other.position) {
Ordering::Less => other,
Ordering::Greater => self,
Ordering::Equal => {
for message in other.errors.drain(..) {
self.add_error(message);
}
self
}
}
}
/// Maps the position to a new value
pub fn map_position<F, Q>(self, f: F) -> Errors<T, R, Q>
where
F: FnOnce(P) -> Q,
{
Errors::from_errors(f(self.position), self.errors)
}
/// Maps all token variants to a new value
pub fn map_token<F, U>(self, mut f: F) -> Errors<U, R, P>
where
F: FnMut(T) -> U,
{
Errors::from_errors(
self.position,
self.errors
.into_iter()
.map(|error| error.map_token(&mut f))
.collect(),
)
}
/// Maps all range variants to a new value.
///
/// ```
/// use combine::*;
/// use combine::parser::range::range;
/// println!(
/// "{}",
/// range(&"HTTP"[..])
/// .easy_parse("HTT")
/// .unwrap_err()
/// .map_range(|bytes| format!("{:?}", bytes))
/// );
/// ```
pub fn map_range<F, S>(self, mut f: F) -> Errors<T, S, P>
where
F: FnMut(R) -> S,
{
Errors::from_errors(
self.position,
self.errors
.into_iter()
.map(|error| error.map_range(&mut f))
.collect(),
)
}
}
impl<T, R, P> StdError for Errors<T, R, P>
where
P: fmt::Display + fmt::Debug,
T: fmt::Display + fmt::Debug,
R: fmt::Display + fmt::Debug,
{
fn description(&self) -> &str {
"parse error"
}
}
impl<T, R, P> fmt::Display for Errors<T, R, P>
where
P: fmt::Display,
T: fmt::Display,
R: fmt::Display,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
writeln!(f, "Parse error at {}", self.position)?;
Error::fmt_errors(&self.errors, f)
}
}
impl<T: fmt::Display, R: fmt::Display> fmt::Display for Error<T, R> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
Error::Unexpected(ref c) => write!(f, "Unexpected {}", c),
Error::Expected(ref s) => write!(f, "Expected {}", s),
Error::Message(ref msg) => msg.fmt(f),
Error::Other(ref err) => err.fmt(f),
}
}
}
#[derive(PartialEq, Eq, Copy, Clone, Debug)]
pub struct Stream<S>(pub S);
impl<S> From<S> for Stream<S> {
fn from(stream: S) -> Self {
Stream(stream)
}
}
impl<S> ResetStream for Stream<S>
where
S: ResetStream + Positioned,
S::Token: PartialEq,
S::Range: PartialEq,
{
type Checkpoint = S::Checkpoint;
fn checkpoint(&self) -> Self::Checkpoint {
self.0.checkpoint()
}
fn reset(&mut self, checkpoint: Self::Checkpoint) -> Result<(), Self::Error> {
self.0
.reset(checkpoint)
.map_err(crate::error::ParseError::into_other)
}
}
impl<S> StreamOnce for Stream<S>
where
S: StreamOnce + Positioned,
S::Token: PartialEq,
S::Range: PartialEq,
{
type Token = S::Token;
type Range = S::Range;
type Position = S::Position;
type Error = ParseError<S>;
#[inline]
fn uncons(&mut self) -> Result<Self::Token, StreamErrorFor<Self>> {
self.0.uncons().map_err(StreamError::into_other)
}
fn is_partial(&self) -> bool {
self.0.is_partial()
}
}
impl<S> RangeStreamOnce for Stream<S>
where
S: RangeStream,
S::Token: PartialEq,
S::Range: PartialEq,
{
#[inline]
fn uncons_range(&mut self, size: usize) -> Result<Self::Range, StreamErrorFor<Self>> {
self.0.uncons_range(size).map_err(StreamError::into_other)
}
#[inline]
fn uncons_while<F>(&mut self, f: F) -> Result<Self::Range, StreamErrorFor<Self>>
where
F: FnMut(Self::Token) -> bool,
{
self.0.uncons_while(f).map_err(StreamError::into_other)
}
#[inline]
fn uncons_while1<F>(&mut self, f: F) -> ParseResult<Self::Range, StreamErrorFor<Self>>
where
F: FnMut(Self::Token) -> bool,
{
self.0.uncons_while1(f).map_err(StreamError::into_other)
}
#[inline]
fn distance(&self, end: &Self::Checkpoint) -> usize {
self.0.distance(end)
}
fn range(&self) -> Self::Range {
self.0.range()
}
}
impl<S> Positioned for Stream<S>
where
S: StreamOnce + Positioned,
S::Token: PartialEq,
S::Range: PartialEq,
{
fn position(&self) -> S::Position {
self.0.position()
}
}

1896
vendor/combine/src/stream/mod.rs vendored Normal file

File diff suppressed because it is too large Load Diff

465
vendor/combine/src/stream/position.rs vendored Normal file
View File

@@ -0,0 +1,465 @@
use crate::{
error::{ParseError, ParseResult, StreamError},
lib::fmt,
stream::{
IteratorStream, Positioned, RangeStreamOnce, ResetStream, SliceStream, StreamErrorFor,
StreamOnce,
},
};
#[cfg(feature = "std")]
use crate::stream::read;
/// Trait for tracking the current position of a `Stream`.
pub trait Positioner<Item> {
/// The type which keeps track of the position
type Position: Clone + Ord;
type Checkpoint: Clone;
/// Returns the current position
fn position(&self) -> Self::Position;
/// Updates the position given that `token` has been taken from the stream
fn update(&mut self, token: &Item);
fn checkpoint(&self) -> Self::Checkpoint;
fn reset(&mut self, checkpoint: Self::Checkpoint);
}
/// Trait for tracking the current position of a `RangeStream`.
pub trait RangePositioner<Item, Range>: Positioner<Item> {
/// Updates the position given that `range` has been taken from the stream
fn update_range(&mut self, range: &Range);
}
/// Defines a default `Positioner` type for a particular `Stream` type.
pub trait DefaultPositioned {
type Positioner: Default;
}
impl<'a> DefaultPositioned for &'a str {
type Positioner = SourcePosition;
}
impl<'a, T> DefaultPositioned for &'a [T] {
type Positioner = IndexPositioner;
}
impl<'a, T> DefaultPositioned for SliceStream<'a, T> {
type Positioner = IndexPositioner;
}
impl<T> DefaultPositioned for IteratorStream<T> {
type Positioner = IndexPositioner;
}
#[cfg(feature = "std")]
impl<R> DefaultPositioned for read::Stream<R> {
type Positioner = IndexPositioner;
}
/// The `Stream<Input>` struct maintains the current position in the stream `Input` using
/// the `Positioner` trait to track the position.
///
/// ```
/// # #![cfg(feature = "std")]
/// # extern crate combine;
/// # use combine::*;
/// # use combine::stream::easy;
/// # use combine::stream::position;
/// # fn main() {
/// let result = token(b'9')
/// .message("Not a nine")
/// .easy_parse(position::Stream::new(&b"8"[..]));
/// assert_eq!(result, Err(easy::Errors {
/// position: 0,
/// errors: vec![
/// easy::Error::Unexpected(b'8'.into()),
/// easy::Error::Expected(b'9'.into()),
/// easy::Error::Message("Not a nine".into())
/// ]
/// }));
/// # }
/// ```
#[derive(Clone, Debug, PartialEq)]
pub struct Stream<Input, X> {
/// The input stream used when items are requested
pub input: Input,
/// The positioner used to update the current position
pub positioner: X,
}
impl<Input, X> Stream<Input, X>
where
Input: StreamOnce,
X: Positioner<Input::Token>,
{
/// Creates a new `Stream<Input, X>` from an input stream and a positioner.
pub fn with_positioner(input: Input, positioner: X) -> Stream<Input, X> {
Stream { input, positioner }
}
}
impl<Input> Stream<Input, Input::Positioner>
where
Input: StreamOnce + DefaultPositioned,
Input::Positioner: Positioner<Input::Token>,
{
/// Creates a new `Stream<Input, X>` from an input stream and its default positioner.
pub fn new(input: Input) -> Stream<Input, Input::Positioner> {
Stream::with_positioner(input, Input::Positioner::default())
}
}
impl<Input, X, S> Positioned for Stream<Input, X>
where
Input: StreamOnce,
X: Positioner<Input::Token>,
S: StreamError<Input::Token, Input::Range>,
Input::Error: ParseError<Input::Token, Input::Range, X::Position, StreamError = S>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position, StreamError = S>,
{
#[inline]
fn position(&self) -> Self::Position {
self.positioner.position()
}
}
impl<Input, X, S> StreamOnce for Stream<Input, X>
where
Input: StreamOnce,
X: Positioner<Input::Token>,
S: StreamError<Input::Token, Input::Range>,
Input::Error: ParseError<Input::Token, Input::Range, X::Position, StreamError = S>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position, StreamError = S>,
{
type Token = Input::Token;
type Range = Input::Range;
type Position = X::Position;
type Error = Input::Error;
#[inline]
fn uncons(&mut self) -> Result<Input::Token, StreamErrorFor<Self>> {
self.input.uncons().map(|c| {
self.positioner.update(&c);
c
})
}
fn is_partial(&self) -> bool {
self.input.is_partial()
}
}
impl<Item, T> Positioner<Item> for &'_ mut T
where
Item: Clone,
T: ?Sized + Positioner<Item>,
{
type Position = T::Position;
type Checkpoint = T::Checkpoint;
#[inline]
fn position(&self) -> T::Position {
(**self).position()
}
#[inline]
fn update(&mut self, item: &Item) {
(**self).update(item)
}
#[inline]
fn checkpoint(&self) -> Self::Checkpoint {
(**self).checkpoint()
}
#[inline]
fn reset(&mut self, checkpoint: Self::Checkpoint) {
(**self).reset(checkpoint)
}
}
impl<Item, Range, T> RangePositioner<Item, Range> for &'_ mut T
where
Item: Clone,
Range: Clone + crate::stream::Range,
T: ?Sized + RangePositioner<Item, Range>,
{
fn update_range(&mut self, range: &Range) {
(**self).update_range(range);
}
}
/// The `IndexPositioner<Item, Range>` struct maintains the current index into the stream `Input`. The
/// initial index is index 0. Each `Item` committed increments the index by 1; each `range` committed
/// increments the position by `range.len()`.
#[derive(Clone, Debug, Default, PartialEq)]
pub struct IndexPositioner(usize);
impl<Item> Positioner<Item> for IndexPositioner
where
Item: Clone,
{
type Position = usize;
type Checkpoint = Self;
#[inline]
fn position(&self) -> usize {
self.0
}
#[inline]
fn update(&mut self, _item: &Item) {
self.0 += 1
}
#[inline]
fn checkpoint(&self) -> Self::Checkpoint {
self.clone()
}
#[inline]
fn reset(&mut self, checkpoint: Self::Checkpoint) {
*self = checkpoint;
}
}
impl IndexPositioner {
pub fn new() -> IndexPositioner {
IndexPositioner::new_with_position(0)
}
pub fn new_with_position(position: usize) -> IndexPositioner {
IndexPositioner(position)
}
}
impl<Item, Range> RangePositioner<Item, Range> for IndexPositioner
where
Item: Clone,
Range: Clone + crate::stream::Range,
{
fn update_range(&mut self, range: &Range) {
self.0 += range.len()
}
}
/// Struct which represents a position in a source file.
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd)]
pub struct SourcePosition {
/// Current line of the input
pub line: i32,
/// Current column of the input
pub column: i32,
}
impl Default for SourcePosition {
fn default() -> Self {
SourcePosition { line: 1, column: 1 }
}
}
impl fmt::Display for SourcePosition {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "line: {}, column: {}", self.line, self.column)
}
}
impl SourcePosition {
pub fn new() -> Self {
SourcePosition::default()
}
}
impl Positioner<char> for SourcePosition {
type Position = SourcePosition;
type Checkpoint = Self;
#[inline]
fn position(&self) -> SourcePosition {
*self
}
#[inline]
fn update(&mut self, token: &char) {
self.column += 1;
if *token == '\n' {
self.column = 1;
self.line += 1;
}
}
#[inline]
fn checkpoint(&self) -> Self::Checkpoint {
*self
}
#[inline]
fn reset(&mut self, checkpoint: Self::Checkpoint) {
*self = checkpoint;
}
}
impl Positioner<u8> for SourcePosition {
type Position = SourcePosition;
type Checkpoint = Self;
#[inline]
fn position(&self) -> SourcePosition {
*self
}
#[inline]
fn update(&mut self, token: &u8) {
self.column += 1;
if *token == b'\n' {
self.column = 1;
self.line += 1;
}
}
#[inline]
fn checkpoint(&self) -> Self::Checkpoint {
*self
}
#[inline]
fn reset(&mut self, checkpoint: Self::Checkpoint) {
*self = checkpoint;
}
}
impl<'a> RangePositioner<char, &'a str> for SourcePosition {
fn update_range(&mut self, range: &&'a str) {
for c in range.chars() {
self.update(&c);
}
}
}
impl<Input, X, S> RangeStreamOnce for Stream<Input, X>
where
Input: RangeStreamOnce,
X: RangePositioner<Input::Token, Input::Range>,
S: StreamError<Input::Token, Input::Range>,
Input::Error: ParseError<Input::Token, Input::Range, X::Position, StreamError = S>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position, StreamError = S>,
Input::Position: Clone + Ord,
{
#[inline]
fn uncons_range(&mut self, size: usize) -> Result<Input::Range, StreamErrorFor<Self>> {
self.input.uncons_range(size).map(|range| {
self.positioner.update_range(&range);
range
})
}
#[inline]
fn uncons_while<F>(&mut self, mut predicate: F) -> Result<Input::Range, StreamErrorFor<Self>>
where
F: FnMut(Input::Token) -> bool,
{
let positioner = &mut self.positioner;
self.input.uncons_while(|t| {
if predicate(t.clone()) {
positioner.update(&t);
true
} else {
false
}
})
}
#[inline]
fn uncons_while1<F>(
&mut self,
mut predicate: F,
) -> ParseResult<Self::Range, StreamErrorFor<Self>>
where
F: FnMut(Self::Token) -> bool,
{
let positioner = &mut self.positioner;
self.input.uncons_while1(|t| {
if predicate(t.clone()) {
positioner.update(&t);
true
} else {
false
}
})
}
#[inline]
fn distance(&self, end: &Self::Checkpoint) -> usize {
self.input.distance(&end.input)
}
fn range(&self) -> Self::Range {
self.input.range()
}
}
impl<Input, X, S> ResetStream for Stream<Input, X>
where
Input: ResetStream,
X: Positioner<Input::Token>,
S: StreamError<Input::Token, Input::Range>,
Input::Error: ParseError<Input::Token, Input::Range, X::Position, StreamError = S>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position, StreamError = S>,
{
type Checkpoint = Stream<Input::Checkpoint, X::Checkpoint>;
fn checkpoint(&self) -> Self::Checkpoint {
Stream {
input: self.input.checkpoint(),
positioner: self.positioner.checkpoint(),
}
}
fn reset(&mut self, checkpoint: Self::Checkpoint) -> Result<(), Self::Error> {
self.input.reset(checkpoint.input)?;
self.positioner.reset(checkpoint.positioner);
Ok(())
}
}
#[cfg(all(feature = "std", test))]
mod tests {
use crate::Parser;
use super::*;
#[test]
fn test_positioner() {
let input = ["a".to_string(), "b".to_string()];
let mut parser = crate::any();
let result = parser.parse(Stream::new(&input[..]));
assert_eq!(
result,
Ok((
"a".to_string(),
Stream::with_positioner(
&["b".to_string()][..],
IndexPositioner::new_with_position(1)
)
))
);
}
#[test]
fn test_range_positioner() {
let input = ["a".to_string(), "b".to_string(), "c".to_string()];
let mut parser = crate::parser::range::take(2);
let result = parser.parse(Stream::new(&input[..]));
assert_eq!(
result,
Ok((
&["a".to_string(), "b".to_string()][..],
Stream::with_positioner(
&["c".to_string()][..],
IndexPositioner::new_with_position(2)
)
))
);
}
}

210
vendor/combine/src/stream/read.rs vendored Normal file
View File

@@ -0,0 +1,210 @@
use std::{
fmt,
io::{self, Bytes, Read},
};
use crate::{
error::{ParseError, StreamError, Tracked},
stream::{StreamErrorFor, StreamOnce},
};
#[derive(Debug)]
pub enum Error {
Unexpected,
EndOfInput,
Io(io::Error),
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Error::Unexpected => write!(f, "unexpected parse"),
Error::EndOfInput => write!(f, "unexpected end of input"),
Error::Io(err) => write!(f, "{}", err),
}
}
}
impl PartialEq for Error {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(Error::Unexpected, Error::Unexpected) => true,
(Error::EndOfInput, Error::EndOfInput) => true,
_ => false,
}
}
}
impl<Item, Range> StreamError<Item, Range> for Error {
#[inline]
fn unexpected_token(_: Item) -> Self {
Error::Unexpected
}
#[inline]
fn unexpected_range(_: Range) -> Self {
Error::Unexpected
}
#[inline]
fn unexpected_format<T>(_: T) -> Self
where
T: fmt::Display,
{
Error::Unexpected
}
#[inline]
fn expected_token(_: Item) -> Self {
Error::Unexpected
}
#[inline]
fn expected_range(_: Range) -> Self {
Error::Unexpected
}
#[inline]
fn expected_format<T>(_: T) -> Self
where
T: fmt::Display,
{
Error::Unexpected
}
#[inline]
fn message_format<T>(_: T) -> Self
where
T: fmt::Display,
{
Error::Unexpected
}
#[inline]
fn message_token(_: Item) -> Self {
Error::Unexpected
}
#[inline]
fn message_range(_: Range) -> Self {
Error::Unexpected
}
#[inline]
fn end_of_input() -> Self {
Error::EndOfInput
}
#[inline]
fn is_unexpected_end_of_input(&self) -> bool {
*self == Error::EndOfInput
}
#[inline]
fn into_other<T>(self) -> T
where
T: StreamError<Item, Range>,
{
match self {
Error::Unexpected => T::unexpected_static_message("parse"),
Error::EndOfInput => T::end_of_input(),
Error::Io(err) => T::other(err),
}
}
}
impl<Item, Range, Position> ParseError<Item, Range, Position> for Error
where
Position: Default,
{
type StreamError = Self;
#[inline]
fn empty(_position: Position) -> Self {
Error::Unexpected
}
#[inline]
fn from_error(_: Position, err: Self::StreamError) -> Self {
err
}
#[inline]
fn set_position(&mut self, _position: Position) {}
#[inline]
fn add(&mut self, err: Self::StreamError) {
*self = match (&*self, err) {
(Error::EndOfInput, _) => Error::EndOfInput,
(_, err) => err,
};
}
#[inline]
fn set_expected<F>(self_: &mut Tracked<Self>, info: Self::StreamError, f: F)
where
F: FnOnce(&mut Tracked<Self>),
{
f(self_);
self_.error = info;
}
fn is_unexpected_end_of_input(&self) -> bool {
*self == Error::EndOfInput
}
#[inline]
fn into_other<T>(self) -> T
where
T: ParseError<Item, Range, Position>,
{
T::from_error(Position::default(), StreamError::into_other(self))
}
}
pub struct Stream<R> {
bytes: Bytes<R>,
}
impl<R: Read> StreamOnce for Stream<R> {
type Token = u8;
type Range = &'static [u8];
type Position = usize;
type Error = Error;
#[inline]
fn uncons(&mut self) -> Result<u8, StreamErrorFor<Self>> {
match self.bytes.next() {
Some(Ok(b)) => Ok(b),
Some(Err(err)) => Err(Error::Io(err)),
None => Err(Error::EndOfInput),
}
}
}
impl<R> Stream<R>
where
R: Read,
{
/// Creates a `StreamOnce` instance from a value implementing `std::io::Read`.
///
/// NOTE: This type do not implement `Positioned` and `Clone` and must be wrapped with types
/// such as `BufferedStreamRef` and `State` to become a `Stream` which can be parsed
///
/// ```rust
/// # #![cfg(feature = "std")]
/// # extern crate combine;
/// use combine::*;
/// use combine::parser::byte::*;
/// use combine::stream::read;
/// use combine::stream::buffered;
/// use combine::stream::position;
/// use std::io::Read;
///
/// # fn main() {
/// let input: &[u8] = b"123,";
/// let stream = buffered::Stream::new(position::Stream::new(read::Stream::new(input)), 1);
/// let result = (many(digit()), byte(b','))
/// .parse(stream)
/// .map(|t| t.0);
/// assert_eq!(result, Ok((vec![b'1', b'2', b'3'], b',')));
/// # }
/// ```
pub fn new(read: R) -> Stream<R> {
Stream {
bytes: read.bytes(),
}
}
}

157
vendor/combine/src/stream/span.rs vendored Normal file
View File

@@ -0,0 +1,157 @@
use crate::lib::marker::PhantomData;
use crate::{
error::{ParseErrorInto, ParseResult, StreamErrorInto},
stream::{ResetStream, StreamErrorFor},
Positioned, RangeStream, RangeStreamOnce, StreamOnce,
};
#[derive(Copy, Clone, Default, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub struct Span<P> {
pub start: P,
pub end: P,
}
impl<P> From<P> for Span<P>
where
P: Clone,
{
#[inline]
fn from(p: P) -> Self {
Self {
start: p.clone(),
end: p,
}
}
}
impl<P> Span<P> {
pub fn map<Q>(self, mut f: impl FnMut(P) -> Q) -> Span<Q> {
Span {
start: f(self.start),
end: f(self.end),
}
}
}
#[derive(PartialEq, Eq, Copy, Clone, Debug)]
pub struct Stream<S, E>(pub S, PhantomData<fn(E) -> E>);
impl<S, E> From<S> for Stream<S, E> {
fn from(stream: S) -> Self {
Stream(stream, PhantomData)
}
}
impl<S, E> ResetStream for Stream<S, E>
where
S: ResetStream + Positioned,
S::Token: PartialEq,
S::Range: PartialEq,
E: crate::error::ParseError<S::Token, S::Range, Span<S::Position>>,
S::Error: ParseErrorInto<S::Token, S::Range, S::Position>,
<S::Error as crate::error::ParseError<S::Token, S::Range, S::Position>>::StreamError:
StreamErrorInto<S::Token, S::Range>,
{
type Checkpoint = S::Checkpoint;
#[inline]
fn checkpoint(&self) -> Self::Checkpoint {
self.0.checkpoint()
}
#[inline]
fn reset(&mut self, checkpoint: Self::Checkpoint) -> Result<(), Self::Error> {
self.0
.reset(checkpoint)
.map_err(ParseErrorInto::into_other_error)
}
}
impl<S, E> StreamOnce for Stream<S, E>
where
S: StreamOnce + Positioned,
S::Token: PartialEq,
S::Range: PartialEq,
E: crate::error::ParseError<S::Token, S::Range, Span<S::Position>>,
S::Error: ParseErrorInto<S::Token, S::Range, S::Position>,
<S::Error as crate::error::ParseError<S::Token, S::Range, S::Position>>::StreamError:
StreamErrorInto<S::Token, S::Range>,
{
type Token = S::Token;
type Range = S::Range;
type Position = Span<S::Position>;
type Error = E;
#[inline]
fn uncons(&mut self) -> Result<Self::Token, StreamErrorFor<Self>> {
self.0.uncons().map_err(StreamErrorInto::into_other_error)
}
#[inline]
fn is_partial(&self) -> bool {
self.0.is_partial()
}
}
impl<S, E> RangeStreamOnce for Stream<S, E>
where
S: RangeStream,
S::Token: PartialEq,
S::Range: PartialEq,
E: crate::error::ParseError<S::Token, S::Range, Span<S::Position>>,
S::Error: ParseErrorInto<S::Token, S::Range, S::Position>,
<S::Error as crate::error::ParseError<S::Token, S::Range, S::Position>>::StreamError:
StreamErrorInto<S::Token, S::Range>,
{
#[inline]
fn uncons_range(&mut self, size: usize) -> Result<Self::Range, StreamErrorFor<Self>> {
self.0
.uncons_range(size)
.map_err(StreamErrorInto::into_other_error)
}
#[inline]
fn uncons_while<F>(&mut self, f: F) -> Result<Self::Range, StreamErrorFor<Self>>
where
F: FnMut(Self::Token) -> bool,
{
self.0
.uncons_while(f)
.map_err(StreamErrorInto::into_other_error)
}
#[inline]
fn uncons_while1<F>(&mut self, f: F) -> ParseResult<Self::Range, StreamErrorFor<Self>>
where
F: FnMut(Self::Token) -> bool,
{
self.0
.uncons_while1(f)
.map_err(StreamErrorInto::into_other_error)
}
#[inline]
fn distance(&self, end: &Self::Checkpoint) -> usize {
self.0.distance(end)
}
fn range(&self) -> Self::Range {
self.0.range()
}
}
impl<S, E> Positioned for Stream<S, E>
where
S: StreamOnce + Positioned,
S::Token: PartialEq,
S::Range: PartialEq,
E: crate::error::ParseError<S::Token, S::Range, Span<S::Position>>,
S::Error: ParseErrorInto<S::Token, S::Range, S::Position>,
<S::Error as crate::error::ParseError<S::Token, S::Range, S::Position>>::StreamError:
StreamErrorInto<S::Token, S::Range>,
{
fn position(&self) -> Span<S::Position> {
Span::from(self.0.position())
}
}

91
vendor/combine/src/stream/state.rs vendored Normal file
View File

@@ -0,0 +1,91 @@
use crate::{
error::ParseResult,
stream::{Positioned, RangeStreamOnce, ResetStream, StreamErrorFor, StreamOnce},
};
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Debug)]
pub struct Stream<S, U> {
pub stream: S,
pub state: U,
}
impl<S, U> Positioned for Stream<S, U>
where
S: Positioned,
{
#[inline]
fn position(&self) -> Self::Position {
self.stream.position()
}
}
impl<S, U> ResetStream for Stream<S, U>
where
S: ResetStream,
{
type Checkpoint = S::Checkpoint;
#[inline]
fn checkpoint(&self) -> Self::Checkpoint {
self.stream.checkpoint()
}
#[inline]
fn reset(&mut self, checkpoint: Self::Checkpoint) -> Result<(), Self::Error> {
self.stream.reset(checkpoint)
}
}
impl<S, U> StreamOnce for Stream<S, U>
where
S: StreamOnce,
{
type Token = S::Token;
type Range = S::Range;
type Position = S::Position;
type Error = S::Error;
#[inline]
fn uncons(&mut self) -> Result<S::Token, StreamErrorFor<Self>> {
self.stream.uncons()
}
fn is_partial(&self) -> bool {
self.stream.is_partial()
}
}
impl<S, U> RangeStreamOnce for Stream<S, U>
where
S: RangeStreamOnce,
{
#[inline]
fn uncons_range(&mut self, size: usize) -> Result<Self::Range, StreamErrorFor<Self>> {
self.stream.uncons_range(size)
}
#[inline]
fn uncons_while<F>(&mut self, f: F) -> Result<Self::Range, StreamErrorFor<Self>>
where
F: FnMut(Self::Token) -> bool,
{
self.stream.uncons_while(f)
}
fn uncons_while1<F>(&mut self, f: F) -> ParseResult<Self::Range, StreamErrorFor<Self>>
where
F: FnMut(Self::Token) -> bool,
{
self.stream.uncons_while1(f)
}
#[inline]
fn distance(&self, end: &Self::Checkpoint) -> usize {
self.stream.distance(end)
}
#[inline]
fn range(&self) -> Self::Range {
self.stream.range()
}
}

852
vendor/combine/tests/async.rs vendored Normal file
View File

@@ -0,0 +1,852 @@
#![allow(renamed_and_removed_lints)]
use std::{
cell::Cell,
io::{self, Cursor},
rc::Rc,
str,
};
use {
bytes::{Buf, BytesMut},
combine::{
any, count_min_max,
error::{ParseError, StreamError},
many1, parser,
parser::{
byte::{num, take_until_bytes},
char::{char, digit, letter, string},
choice::optional,
combinator::{
any_partial_state, any_send_partial_state, attempt, from_str, no_partial,
recognize, AnyPartialState, AnySendPartialState,
},
range::{
self, range, recognize_with_value, take, take_fn, take_until_range, take_while,
take_while1,
},
repeat,
},
satisfy, sep_end_by, skip_many, skip_many1,
stream::{easy, RangeStream, StreamErrorFor},
token, Parser,
},
futures::prelude::*,
futures_03_dep as futures,
partial_io::PartialRead,
quick_error::quick_error,
quickcheck::quickcheck,
tokio_dep as tokio,
tokio_util::codec::{Decoder, FramedRead},
};
// Workaround partial_io not working with tokio-0.2
mod support;
use support::*;
quick_error! {
#[derive(Debug)]
enum Error {
Io(err: io::Error) {
display("{}", err)
from()
}
Parse(err: easy::Errors<char, String, usize>) {
display("{}", err)
from()
}
Utf8(err: std::str::Utf8Error) {
display("{}", err)
from()
}
Message(err: String) {
display("{}", err)
from()
}
}
}
macro_rules! mk_parser {
($parser:expr, $self_:expr,()) => {
$parser
};
($parser:expr, $self_:expr,($custom_state:ty)) => {
$parser($self_.1.clone())
};
}
macro_rules! impl_decoder {
($typ: ident, $token: ty, $parser: expr, $custom_state: ty) => {
#[derive(Default)]
struct $typ(AnyPartialState, $custom_state);
impl_decoder!{$typ, $token, $parser; ($custom_state)}
};
($typ: ident, $token: ty, $parser: expr) => {
#[derive(Default)]
struct $typ(AnyPartialState);
impl_decoder!{$typ, $token, $parser; ()}
};
($typ: ident, $token: ty, $parser: expr; ( $($custom_state: tt)* )) => {
impl Decoder for $typ {
type Item = $token;
type Error = Error;
fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {
(&mut &mut *self).decode(src)
}
fn decode_eof(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {
(&mut &mut *self).decode_eof(src)
}
}
impl<'a> Decoder for &'a mut $typ {
type Item = $token;
type Error = Error;
fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {
self.decode_stream(src, false)
}
fn decode_eof(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {
self.decode_stream(src, true)
}
}
impl<'a> $typ {
fn decode_stream(&mut self, src: &mut BytesMut, eof: bool) -> Result<Option<$token>, Error> {
let (opt, removed_len) = {
let str_src = str::from_utf8(&src[..])?;
println!("Decoding `{}`", str_src);
combine::stream::decode_tokio(
any_partial_state(mk_parser!($parser, self, ($($custom_state)*))),
&mut easy::Stream(combine::stream::MaybePartialStream(str_src, !eof)),
&mut self.0,
).map_err(|err| {
// Since err contains references into `src` we must remove these before
// returning the error and before we call `advance` to remove the input we
// just committed
let err = err.map_range(|r| r.to_string())
.map_position(|p| p.translate_position(&str_src[..]));
format!("{}\nIn input: `{}`", err, str_src)
})?
};
src.advance(removed_len);
match opt {
None => println!("Need more input!"),
Some(_) => (),
}
Ok(opt)
}
}
}
}
macro_rules! impl_byte_decoder {
($typ: ident, $token: ty, $parser: expr, $custom_state: ty) => {
#[derive(Default)]
struct $typ(AnyPartialState, $custom_state);
impl_byte_decoder!{$typ, $token, $parser; ($custom_state)}
};
($typ: ident, $token: ty, $parser: expr) => {
#[derive(Default)]
struct $typ(AnyPartialState);
impl_byte_decoder!{$typ, $token, $parser; ()}
};
($typ: ident, $token: ty, $parser: expr; ( $($custom_state: tt)* )) => {
impl Decoder for $typ {
type Item = $token;
type Error = Error;
fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {
(&mut &mut *self).decode(src)
}
}
impl<'a> Decoder for &'a mut $typ {
type Item = $token;
type Error = Error;
fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {
let (opt, removed_len) = {
let str_src = &src[..];
println!("Decoding `{:?}`", str_src);
combine::stream::decode(
any_partial_state(mk_parser!($parser, self, ($($custom_state)*))),
&mut easy::Stream(combine::stream::PartialStream(str_src)),
&mut self.0,
).map_err(|err| {
// Since err contains references into `src` we must remove these before
// returning the error and before we call `advance` to remove the input we
// just committed
let err = err.map_range(|r| format!("{:?}", r))
.map_position(|p| p.translate_position(&str_src[..]));
format!("{}\nIn input: `{:?}`", err, str_src)
})?
};
src.advance(removed_len);
match opt {
None => println!("Need more input!"),
Some(_) => (),
}
Ok(opt)
}
}
}
}
use partial_io::{GenNoErrors, GenWouldBlock, PartialOp, PartialWithErrors};
fn run_decoder<B, D, S>(input: &B, seq: S, decoder: D) -> Result<Vec<D::Item>, D::Error>
where
D: Decoder<Error = Error>,
D::Item: ::std::fmt::Debug,
S: IntoIterator<Item = PartialOp> + 'static,
S::IntoIter: Send,
B: ?Sized + AsRef<[u8]>,
{
let ref mut reader = Cursor::new(input.as_ref());
let partial_reader = PartialAsyncRead::new(reader, seq);
tokio_02_dep::runtime::Builder::new()
.basic_scheduler()
.build()
.unwrap()
.block_on(
FramedRead::new(partial_reader, decoder)
.map_ok(|x| {
println!("Decoded `{:?}`", x);
x
})
.try_collect(),
)
}
parser! {
type PartialState = AnyPartialState;
fn basic_parser['a, Input]()(Input) -> String
where [ Input: RangeStream<Token = char, Range = &'a str> ]
{
any_partial_state(
many1(digit()).skip(range(&"\r\n"[..])),
)
}
}
impl_decoder! { Basic, String, basic_parser() }
#[test]
fn many1_skip_no_errors() {
let input = "123\r\n\
456\r\n";
let result = run_decoder(input, vec![], Basic::default());
assert!(result.as_ref().is_ok(), "{}", result.unwrap_err());
assert_eq!(result.unwrap(), vec!["123".to_string(), "456".to_string()]);
}
parser! {
type PartialState = AnyPartialState;
fn prefix_many_then_parser['a, Input]()(Input) -> String
where [ Input: RangeStream<Token = char, Range = &'a str> ]
{
let integer = from_str(many1::<String, _, _>(digit()));
any_partial_state((char('#'), skip_many(char(' ')), integer)
.then_partial(|t| {
let c = t.2;
count_min_max(c, c, any())
})
)
}
}
parser! {
type PartialState = AnyPartialState;
fn choice_parser['a, Input]()(Input) -> String
where [ Input: RangeStream<Token = char, Range = &'a str> ]
{
any_partial_state(
many1(digit())
.or(many1(letter()))
.skip(range(&"\r\n"[..]))
)
}
}
fn content_length<'a, Input>(
) -> impl Parser<Input, Output = String, PartialState = AnySendPartialState> + 'a
where
Input: RangeStream<Token = char, Range = &'a str> + 'a,
{
let content_length = range("Content-Length: ").with(
range::recognize(skip_many1(digit())).and_then(|digits: &str| {
// Convert the error from `.parse` into an error combine understands
digits
.parse::<usize>()
.map_err(StreamErrorFor::<Input>::other)
}),
);
any_send_partial_state(
(
skip_many(range("\r\n")),
content_length,
range("\r\n\r\n").map(|_| ()),
)
.then_partial(|&mut (_, message_length, _)| {
take(message_length).map(|bytes: &str| bytes.to_owned())
}),
)
}
quickcheck! {
fn many1_skip_test(seq: PartialWithErrors<GenWouldBlock>) -> () {
let input = "123\r\n\
456\r\n\
1\r\n\
5\r\n\
666666\r\n";
let result = run_decoder(input, seq, Basic::default());
assert!(result.as_ref().is_ok(), "{}", result.unwrap_err());
assert_eq!(
result.unwrap(),
vec!["123".to_string(), "456".to_string(), "1".to_string(), "5".to_string(), "666666".to_string()]
);
}
fn prefix_many_then_test(seq: PartialWithErrors<GenWouldBlock>) -> () {
impl_decoder!{ TestParser, String, prefix_many_then_parser() }
let input = "# 1a\
# 4abcd\
#0\
#3:?a\
#10abcdefghij";
let result = run_decoder(input, seq, TestParser::default());
assert!(result.as_ref().is_ok(), "{}", result.unwrap_err());
assert_eq!(
result.unwrap(),
["a", "abcd", "", ":?a", "abcdefghij"]
);
}
fn choice_test(seq: PartialWithErrors<GenWouldBlock>) -> () {
impl_decoder!{ TestParser, String, choice_parser() }
let input = "1\r\n\
abcd\r\n\
123\r\n\
abc\r\n\
1232751\r\n";
let result = run_decoder(input, seq, TestParser::default());
assert!(result.as_ref().is_ok(), "{}", result.unwrap_err());
assert_eq!(
result.unwrap(),
["1", "abcd", "123", "abc", "1232751"]
);
}
fn recognize_test(seq: PartialWithErrors<GenWouldBlock>) -> () {
impl_decoder!{ TestParser, String,
recognize(
(skip_many1(digit()), optional((char('.'), skip_many(digit()))))
)
.skip(range(&"\r\n"[..]))
}
let input = "1.0\r\n\
123.123\r\n\
17824\r\n\
3.14\r\n\
1.\r\n\
2\r\n";
let result = run_decoder(input, seq, TestParser::default());
assert!(result.as_ref().is_ok(), "{}", result.unwrap_err());
assert_eq!(
result.unwrap(),
["1.0", "123.123", "17824", "3.14", "1.", "2"]
);
}
fn recognize_range_test(seq: PartialWithErrors<GenWouldBlock>) -> () {
impl_decoder!{ TestParser, String,
recognize_with_value(
(skip_many1(digit()), optional((char('.'), skip_many(digit()))))
)
.map(|(r, _)| String::from(r))
.skip(range(&"\r\n"[..]))
}
let input = "1.0\r\n\
123.123\r\n\
17824\r\n\
3.14\r\n\
1.\r\n\
2\r\n";
let result = run_decoder(input, seq, TestParser::default());
assert!(result.as_ref().is_ok(), "{}", result.unwrap_err());
assert_eq!(
result.unwrap(),
["1.0", "123.123", "17824", "3.14", "1.", "2"]
);
}
fn take_while_test(seq: PartialWithErrors<GenWouldBlock>) -> () {
impl_decoder!{ TestParser, String,
|counter: Rc<Cell<i32>>|
take_while(move |c| { counter.set(counter.get() + 1); c != '\r' })
.map(String::from)
.skip(range("\r\n")),
Rc<Cell<i32>>
}
let input = "1.0\r\n\
123.123\r\n\
17824\r\n\
3.14\r\n\
\r\n\
2\r\n";
let counter = Rc::new(Cell::new(0));
let result = run_decoder(input, seq, TestParser(Default::default(), counter.clone()));
assert!(result.as_ref().is_ok(), "{}", result.unwrap_err());
assert_eq!(
result.unwrap(),
["1.0", "123.123", "17824", "3.14", "", "2"]
);
assert_eq!(counter.get(), 26);
}
fn take_while1_test(seq: PartialWithErrors<GenWouldBlock>) -> () {
impl_decoder!{ TestParser, String,
|count: Rc<Cell<i32>>|
take_while1(move |c| { count.set(count.get() + 1); c != '\r' })
.map(String::from)
.skip(range("\r\n")),
Rc<Cell<i32>>
}
let input = "1.0\r\n\
123.123\r\n\
17824\r\n\
3.14\r\n\
1.\r\n\
2\r\n";
let counter = Rc::new(Cell::new(0));
let result = run_decoder(input, seq, TestParser(Default::default(), counter.clone()));
assert!(result.as_ref().is_ok(), "{}", result.unwrap_err());
assert_eq!(
result.unwrap(),
["1.0", "123.123", "17824", "3.14", "1.", "2"]
);
assert_eq!(counter.get(), 28);
}
fn take_until(seq: PartialWithErrors<GenWouldBlock>) -> () {
impl_decoder!{ TestParser, String,
|count: Rc<Cell<i32>>|
repeat::take_until(token(',').map(move |_| count.set(count.get() + 1))).skip(token(',')),
Rc<Cell<i32>>
}
let input = "123,456,789,";
let counter = Rc::new(Cell::new(0));
let result = run_decoder(input, seq, TestParser(Default::default(), counter.clone()));
assert!(result.as_ref().is_ok(), "{}", result.unwrap_err());
assert_eq!(
result.unwrap(),
["123", "456", "789"]
);
assert_eq!(counter.get(), 3);
}
fn take_until_committed(seq: PartialWithErrors<GenWouldBlock>) -> () {
impl_decoder!{ TestParser, String,
|count: Rc<Cell<i32>>| {
let end = attempt((token(':').map(move |_| count.set(count.get() + 1)), token(':')));
repeat::take_until(end).skip((token(':'), token(':')))
},
Rc<Cell<i32>>
}
let input = "123::456::789::";
let counter = Rc::new(Cell::new(0));
let result = run_decoder(input, seq, TestParser(Default::default(), counter.clone()));
assert!(result.as_ref().is_ok(), "{}", result.unwrap_err());
assert_eq!(
result.unwrap(),
["123", "456", "789"]
);
assert_eq!(counter.get(), 3);
}
fn take_until_range_committed(seq: PartialWithErrors<GenWouldBlock>) -> () {
impl_decoder!{ TestParser, String,
take_until_range("::").map(String::from).skip((token(':'), token(':')))
}
let input = "123::456::789::";
let result = run_decoder(input, seq, TestParser(Default::default()));
assert!(result.as_ref().is_ok(), "{}", result.unwrap_err());
assert_eq!(result.unwrap(), ["123", "456", "789"]);
}
fn any_send_partial_state_do_not_forget_state(sizes: Vec<usize>, seq: PartialWithErrors<GenWouldBlock>) -> () {
impl_decoder!{ TestParser, usize,
any_send_partial_state(content_length().map(|bytes| bytes.len()))
}
let input : String = sizes
.iter()
.map(|s| {
format!(
"Content-Length: {}\r\n\r\n{}\r\n",
s,
::std::iter::repeat('a').take(*s).collect::<String>()
)
})
.collect();
let result = run_decoder(input.as_bytes(), seq, TestParser(Default::default()));
assert!(result.as_ref().is_ok(), "{}", result.unwrap_err());
assert_eq!(result.unwrap(), sizes);
}
fn take_fn_test(sizes: Vec<usize>, seq: PartialWithErrors<GenWouldBlock>) -> () {
impl_decoder!{ TestParser, usize,
take_fn(|s: &str| s.find("\r\n")).map(|bytes: &str| bytes.parse::<usize>().unwrap()).skip(take(2))
}
let input : String = sizes
.iter()
.map(|s| {
format!(
"{}\r\n",
s,
)
})
.collect();
let result = run_decoder(input.as_bytes(), seq, TestParser(Default::default()));
assert!(result.as_ref().is_ok(), "{}", result.unwrap_err());
assert_eq!(result.unwrap(), sizes);
}
fn take_until_bytes_test(sizes: Vec<usize>, seq: PartialWithErrors<GenWouldBlock>) -> () {
impl_decoder!{ TestParser, usize,
take_until_bytes("\r\n".as_bytes())
.map(|bytes: &str| bytes.parse::<usize>().unwrap())
.skip(take(2))
}
let input : String = sizes
.iter()
.map(|s| {
format!(
"{}\r\n",
s,
)
})
.collect();
let result = run_decoder(input.as_bytes(), seq, TestParser(Default::default()));
assert!(result.as_ref().is_ok(), "{}", result.unwrap_err());
assert_eq!(result.unwrap(), sizes);
}
fn num_test(ints: Vec<u16>, seq: PartialWithErrors<GenWouldBlock>) -> () {
impl_byte_decoder!{ TestParser, u16,
num::be_u16()
.skip(take(2))
}
let input: Vec<u8> = ints.iter()
.flat_map(|i| {
let mut v = Vec::new();
v.extend_from_slice(&i.to_be_bytes());
v.extend_from_slice(b"\r\n");
v
})
.collect();
let result = run_decoder(&input, seq, TestParser(Default::default()));
assert!(result.as_ref().is_ok(), "{}", result.unwrap_err());
assert_eq!(result.unwrap(), ints);
}
fn sep_end_by_test(seq: PartialWithErrors<GenWouldBlock>) -> () {
impl_decoder!{ TestParser, Vec<String>,
repeat::sep_end_by((digit(), digit(), digit()).map(|(a, b, c)| vec![a, b, c].into_iter().collect()), no_partial(string("::")))
.skip(no_partial(string("\r\n")))
}
let input = "123::456::789::\r\n";
let result = run_decoder(&input, seq, TestParser(Default::default()));
assert!(result.as_ref().is_ok(), "{}", result.unwrap_err());
assert_eq!(result.unwrap(), vec![vec!["123".to_string(), "456".to_string(), "789".to_string()]]);
}
}
#[test]
fn skip_count_min_max_test() {
let seq = vec![PartialOp::Limited(1)];
impl_decoder! { TestParser, String,
repeat::skip_count_min_max(1, 2, char('_')).skip(char('.')).map(|_| "".to_string())
}
let input = "_.";
let result = run_decoder(input, seq, TestParser::default());
assert!(result.as_ref().is_ok(), "{}", result.unwrap_err());
assert_eq!(result.unwrap(), [""]);
}
const WORDS_IN_README: usize = 773;
#[test]
fn decode_std() {
quickcheck(
(|ops: PartialWithErrors<GenNoErrors>| {
let buf = include_bytes!("../README.md");
let mut read = PartialRead::new(&buf[..], ops);
let mut decoder =
combine::stream::Decoder::<_, combine::stream::PointerOffset<_>>::new();
let is_whitespace = |b: u8| b == b' ' || b == b'\r' || b == b'\n';
assert_eq!(
combine::decode!(
decoder,
read,
{
let word = many1(satisfy(|b| !is_whitespace(b)));
sep_end_by(word, skip_many1(satisfy(is_whitespace)))
.map(|words: Vec<Vec<u8>>| words.len())
},
|input, _| combine::easy::Stream::from(input)
)
.map_err(From::from)
.map_err(
|err: combine::easy::Errors<u8, &[u8], combine::stream::PointerOffset<_>>| err
.map_position(|p| p.0)
),
Ok(WORDS_IN_README),
);
}) as fn(_) -> _,
)
}
#[test]
fn decode_tokio_02() {
quickcheck(
(|ops: PartialWithErrors<GenWouldBlock>| {
let buf = include_bytes!("../README.md");
let runtime = tokio::runtime::Builder::new_current_thread()
.build()
.unwrap();
runtime.block_on(async {
let mut read = PartialAsyncRead::new(&buf[..], ops);
let mut decoder =
combine::stream::Decoder::<_, combine::stream::PointerOffset<[u8]>>::new();
let is_whitespace = |b: u8| b == b' ' || b == b'\r' || b == b'\n';
assert_eq!(
combine::decode_tokio_02!(
decoder,
read,
{
let word = many1(satisfy(|b| !is_whitespace(b)));
sep_end_by(word, skip_many1(satisfy(is_whitespace)))
.map(|words: Vec<Vec<u8>>| words.len())
},
|input, _| combine::easy::Stream::from(input)
)
.map_err(From::from)
.map_err(
|err: combine::easy::Errors<u8, &[u8], _>| err.map_range(|r| r.to_owned())
)
.map_err(|err| err.map_position(|p| p.translate_position(&decoder.buffer()))),
Ok(WORDS_IN_README),
);
})
}) as fn(_) -> _,
)
}
#[test]
fn decode_tokio_03() {
quickcheck(
(|ops: PartialWithErrors<GenWouldBlock>| {
let buf = include_bytes!("../README.md");
let runtime = tokio::runtime::Builder::new_current_thread()
.build()
.unwrap();
runtime.block_on(async {
let mut read = PartialAsyncRead::new(&buf[..], ops);
let mut decoder =
combine::stream::Decoder::<_, combine::stream::PointerOffset<[u8]>>::new();
let is_whitespace = |b: u8| b == b' ' || b == b'\r' || b == b'\n';
assert_eq!(
combine::decode_tokio_03!(
decoder,
read,
{
let word = many1(satisfy(|b| !is_whitespace(b)));
sep_end_by(word, skip_many1(satisfy(is_whitespace)))
.map(|words: Vec<Vec<u8>>| words.len())
},
|input, _| combine::easy::Stream::from(input)
)
.map_err(From::from)
.map_err(
|err: combine::easy::Errors<u8, &[u8], _>| err.map_range(|r| r.to_owned())
)
.map_err(|err| err.map_position(|p| p.translate_position(&decoder.buffer()))),
Ok(WORDS_IN_README),
);
})
}) as fn(_) -> _,
)
}
#[test]
fn decode_tokio() {
quickcheck(
(|ops: PartialWithErrors<GenWouldBlock>| {
let buf = include_bytes!("../README.md");
let runtime = tokio::runtime::Builder::new_current_thread()
.build()
.unwrap();
runtime.block_on(async {
let mut read = PartialAsyncRead::new(&buf[..], ops);
let mut decoder =
combine::stream::Decoder::<_, combine::stream::PointerOffset<[u8]>>::new();
let is_whitespace = |b: u8| b == b' ' || b == b'\r' || b == b'\n';
assert_eq!(
combine::decode_tokio!(
decoder,
read,
{
let word = many1(satisfy(|b| !is_whitespace(b)));
sep_end_by(word, skip_many1(satisfy(is_whitespace)))
.map(|words: Vec<Vec<u8>>| words.len())
},
|input, _| combine::easy::Stream::from(input)
)
.map_err(From::from)
.map_err(
|err: combine::easy::Errors<u8, &[u8], _>| err.map_range(|r| r.to_owned())
)
.map_err(|err| err.map_position(|p| p.translate_position(&decoder.buffer()))),
Ok(WORDS_IN_README),
);
})
}) as fn(_) -> _,
)
}
#[test]
fn decode_async_std() {
quickcheck(
(|ops: PartialWithErrors<GenWouldBlock>| {
let buf = include_bytes!("../README.md");
async_std::task::block_on(async {
let mut read = FuturesPartialAsyncRead::new(&buf[..], ops);
let mut decoder =
combine::stream::Decoder::<_, combine::stream::PointerOffset<[u8]>>::new();
let is_whitespace = |b: u8| b == b' ' || b == b'\r' || b == b'\n';
assert_eq!(
combine::decode_futures_03!(
decoder,
read,
{
let word = many1(satisfy(|b| !is_whitespace(b)));
sep_end_by(word, skip_many1(satisfy(is_whitespace)))
.map(|words: Vec<Vec<u8>>| words.len())
},
|input, _| combine::easy::Stream::from(input),
)
.map_err(From::from)
.map_err(|err: combine::easy::Errors<u8, &[u8], _>| err),
Ok(WORDS_IN_README),
);
})
}) as fn(_) -> _,
)
}
#[tokio::test]
async fn decode_loop() {
use tokio::fs::File;
use combine::{
decode_tokio, many1, satisfy, skip_many1,
stream::{buf_reader::BufReader, Decoder},
};
let mut read = BufReader::new(File::open("README.md").await.unwrap());
let mut decoder = Decoder::new_bufferless();
let is_whitespace = |b: u8| b == b' ' || b == b'\r' || b == b'\n';
let mut count = 0;
loop {
// async block suppresses a warning about duplicate label
if async {
decode_tokio!(
decoder,
read,
many1(satisfy(|b| !is_whitespace(b))),
|input, _position| combine::easy::Stream::from(input),
)
.is_err()
}
.await
{
break;
}
count += 1;
{
if decode_tokio!(
decoder,
read,
skip_many1(satisfy(is_whitespace)),
|input, _position| combine::easy::Stream::from(input),
)
.is_err()
{
break;
}
}
}
assert_eq!(WORDS_IN_README, count);
}

116
vendor/combine/tests/buffered_stream.rs vendored Normal file
View File

@@ -0,0 +1,116 @@
#![cfg(feature = "std")]
use combine::{
attempt, choice, many, many1,
parser::{
char::{char, digit, spaces, string},
combinator::recognize,
},
sep_by, skip_many1,
stream::{
buffered,
easy::{self, Error, Errors},
position, IteratorStream,
},
Parser, Positioned,
};
#[test]
fn shared_stream_buffer() {
// Iterator that can't be cloned
let text = "10,222,3,44".chars().map(|c| {
if c.is_digit(10) {
(c as u8 + 1) as char
} else {
c
}
});
let buffer = buffered::Stream::new(position::Stream::new(IteratorStream::new(text)), 1);
let int: &mut dyn Parser<_, Output = _, PartialState = _> =
&mut many(digit()).map(|s: String| s.parse::<i64>().unwrap());
let result = sep_by(int, char(',')).parse(buffer).map(|t| t.0);
assert_eq!(result, Ok(vec![21, 333, 4, 55]));
}
#[test]
fn shared_stream_backtrack() {
let text = "apple,apple,ananas,orangeblah";
let mut iter = text.chars();
// Iterator that can't be cloned
let stream = buffered::Stream::new(position::Stream::new(IteratorStream::new(&mut iter)), 2);
let value: &mut dyn Parser<_, Output = _, PartialState = _> = &mut choice([
attempt(string("apple")),
attempt(string("orange")),
attempt(string("ananas")),
]);
let mut parser = sep_by(value, char(','));
let result = parser.parse(stream).map(|t| t.0);
assert_eq!(result, Ok(vec!["apple", "apple", "ananas", "orange"]));
}
#[test]
fn shared_stream_insufficent_backtrack() {
let text = "apple,apple,ananas,orangeblah";
let mut iter = text.chars();
// Iterator that can't be cloned
let stream = buffered::Stream::new(
easy::Stream(position::Stream::new(IteratorStream::new(&mut iter))),
1,
);
let value: &mut dyn Parser<_, Output = _, PartialState = _> = &mut choice([
attempt(string("apple")),
attempt(string("orange")),
attempt(string("ananas")),
]);
let mut parser = sep_by(value, char(','));
let result: Result<Vec<&str>, _> = parser.parse(stream).map(|t| t.0);
assert!(result.is_err());
assert!(
result
.as_ref()
.unwrap_err()
.errors
.iter()
.any(|err| *err == Error::Message("Backtracked to far".into())),
"{}",
result.unwrap_err()
);
}
/// Test which checks that a stream which has ended does not repeat the last token in some cases in
/// which case this test would loop forever
#[test]
fn always_output_end_of_input_after_end_of_input() {
let text = "10".chars();
let buffer = buffered::Stream::new(position::Stream::new(IteratorStream::new(text)), 1);
let int = many1(digit()).map(|s: String| s.parse::<i64>().unwrap());
let result = many(spaces().with(int)).parse(buffer).map(|t| t.0);
assert_eq!(result, Ok(vec![10]));
}
#[test]
fn position() {
let text = "10abc".chars();
let stream = buffered::Stream::new(position::Stream::new(IteratorStream::new(text)), 3);
assert_eq!(stream.position(), 0);
let result = many1::<Vec<_>, _, _>(digit()).parse(stream);
assert!(result.is_ok());
assert_eq!(result.unwrap().1.position(), 2);
}
#[test]
fn buffered_stream_recognize_issue_256() {
let mut parser = recognize::<String, _, _>(skip_many1(digit()));
let input = "12 ";
assert_eq!(
parser
.parse(buffered::Stream::new(easy::Stream(input), 1))
.map_err(|err| err.map_position(|pos| pos.translate_position(input))),
Err(Errors {
position: 2,
errors: vec![easy::Error::Message("Backtracked to far".into())]
})
);
}

673
vendor/combine/tests/parser.rs vendored Normal file
View File

@@ -0,0 +1,673 @@
use combine::{
parser::{
char::{digit, letter},
choice::choice,
combinator::not_followed_by,
range::range,
token::{any, eof, token, Token},
},
Parser,
};
#[test]
fn choice_empty() {
let mut parser = choice::<_, &mut [Token<&str>]>(&mut []);
let result_err = parser.parse("a");
assert!(result_err.is_err());
}
#[test]
fn tuple() {
let mut parser = (digit(), token(','), digit(), token(','), letter());
assert_eq!(parser.parse("1,2,z"), Ok((('1', ',', '2', ',', 'z'), "")));
}
#[test]
fn issue_99() {
let result = any().map(|_| ()).or(eof()).parse("");
assert!(result.is_ok(), "{:?}", result);
}
#[test]
fn not_followed_by_does_not_consume_any_input() {
let mut parser = not_followed_by(range("a")).map(|_| "").or(range("a"));
assert_eq!(parser.parse("a"), Ok(("a", "")));
let mut parser = range("a").skip(not_followed_by(range("aa")));
assert_eq!(parser.parse("aa"), Ok(("a", "a")));
assert!(parser.parse("aaa").is_err());
}
#[cfg(feature = "std")]
mod tests_std {
use super::*;
use combine::easy::{Error, Errors};
use combine::parser::byte::alpha_num;
use combine::parser::byte::bytes;
use combine::parser::byte::bytes_cmp;
use combine::parser::byte::num::be_u32;
use combine::parser::char::char;
use combine::parser::char::{string, string_cmp};
use combine::parser::combinator::no_partial;
use combine::parser::range;
use combine::parser::repeat::{skip_until, take_until};
use combine::stream::position;
use combine::stream::position::SourcePosition;
use combine::{
attempt, count, count_min_max, easy, many, optional, position, sep_by, sep_end_by1,
unexpected, value, EasyParser,
};
#[derive(Clone, PartialEq, Debug)]
struct CloneOnly {
s: String,
}
#[test]
fn token_clone_but_not_copy() {
// Verify we can use token() with a StreamSlice with an token type that is Clone but not
// Copy.
let input = &[
CloneOnly { s: "x".to_string() },
CloneOnly { s: "y".to_string() },
][..];
let result = token(CloneOnly { s: "x".to_string() }).easy_parse(input);
assert_eq!(
result,
Ok((
CloneOnly { s: "x".to_string() },
&[CloneOnly { s: "y".to_string() }][..]
))
);
}
#[test]
fn sep_by_committed_error() {
type TwoLettersList = Vec<(char, char)>;
let mut parser2 = sep_by((letter(), letter()), token(','));
let result_err: Result<(TwoLettersList, &str), easy::ParseError<&str>> =
parser2.easy_parse("a,bc");
assert!(result_err.is_err());
}
/// The expected combinator should retain only errors that are not `Expected`
#[test]
fn expected_retain_errors() {
let mut parser = digit()
.message("message")
.expected("N/A")
.expected("my expected digit");
assert_eq!(
parser.easy_parse(position::Stream::new("a")),
Err(Errors {
position: SourcePosition::default(),
errors: vec![
Error::Unexpected('a'.into()),
Error::Message("message".into()),
Error::Expected("my expected digit".into()),
],
})
);
}
#[test]
fn tuple_parse_error() {
let mut parser = (digit(), digit());
let result = parser.easy_parse(position::Stream::new("a"));
assert_eq!(
result,
Err(Errors {
position: SourcePosition::default(),
errors: vec![
Error::Unexpected('a'.into()),
Error::Expected("digit".into()),
],
})
);
}
#[test]
fn message_tests() {
// Ensure message adds to both committed and empty errors, interacting with parse_lazy and
// parse_stream correctly on either side
let input = "hi";
let mut ok = char('h').message("not expected");
let mut empty0 = char('o').message("expected message");
let mut empty1 = char('o').message("expected message").map(|x| x);
let mut empty2 = char('o').map(|x| x).message("expected message");
let mut committed0 = char('h').with(char('o')).message("expected message");
let mut committed1 = char('h')
.with(char('o'))
.message("expected message")
.map(|x| x);
let mut committed2 = char('h')
.with(char('o'))
.map(|x| x)
.message("expected message");
assert!(ok.easy_parse(position::Stream::new(input)).is_ok());
let empty_expected = Err(Errors {
position: SourcePosition { line: 1, column: 1 },
errors: vec![
Error::Unexpected('h'.into()),
Error::Expected('o'.into()),
Error::Message("expected message".into()),
],
});
let committed_expected = Err(Errors {
position: SourcePosition { line: 1, column: 2 },
errors: vec![
Error::Unexpected('i'.into()),
Error::Expected('o'.into()),
Error::Message("expected message".into()),
],
});
assert_eq!(
empty0.easy_parse(position::Stream::new(input)),
empty_expected
);
assert_eq!(
empty1.easy_parse(position::Stream::new(input)),
empty_expected
);
assert_eq!(
empty2.easy_parse(position::Stream::new(input)),
empty_expected
);
assert_eq!(
committed0.easy_parse(position::Stream::new(input)),
committed_expected
);
assert_eq!(
committed1.easy_parse(position::Stream::new(input)),
committed_expected
);
assert_eq!(
committed2.easy_parse(position::Stream::new(input)),
committed_expected
);
}
#[test]
fn expected_tests() {
// Ensure `expected` replaces only empty errors, interacting with parse_lazy and
// parse_stream correctly on either side
let input = "hi";
let mut ok = char('h').expected("not expected");
let mut empty0 = char('o').expected("expected message");
let mut empty1 = char('o').expected("expected message").map(|x| x);
let mut empty2 = char('o').map(|x| x).expected("expected message");
let mut committed0 = char('h').with(char('o')).expected("expected message");
let mut committed1 = char('h')
.with(char('o'))
.expected("expected message")
.map(|x| x);
let mut committed2 = char('h')
.with(char('o'))
.map(|x| x)
.expected("expected message");
assert!(ok.easy_parse(position::Stream::new(input)).is_ok());
let empty_expected = Err(Errors {
position: SourcePosition { line: 1, column: 1 },
errors: vec![
Error::Unexpected('h'.into()),
Error::Expected("expected message".into()),
],
});
let committed_expected = Err(Errors {
position: SourcePosition { line: 1, column: 2 },
errors: vec![Error::Unexpected('i'.into()), Error::Expected('o'.into())],
});
assert_eq!(
empty0.easy_parse(position::Stream::new(input)),
empty_expected
);
assert_eq!(
empty1.easy_parse(position::Stream::new(input)),
empty_expected
);
assert_eq!(
empty2.easy_parse(position::Stream::new(input)),
empty_expected
);
assert_eq!(
committed0.easy_parse(position::Stream::new(input)),
committed_expected
);
assert_eq!(
committed1.easy_parse(position::Stream::new(input)),
committed_expected
);
assert_eq!(
committed2.easy_parse(position::Stream::new(input)),
committed_expected
);
}
#[test]
fn try_tests() {
// Ensure attempt adds error messages exactly once
assert_eq!(
attempt(unexpected("test")).easy_parse(position::Stream::new("hi")),
Err(Errors {
position: SourcePosition { line: 1, column: 1 },
errors: vec![
Error::Unexpected('h'.into()),
Error::Unexpected("test".into()),
],
})
);
assert_eq!(
attempt(char('h').with(unexpected("test"))).easy_parse(position::Stream::new("hi")),
Err(Errors {
position: SourcePosition { line: 1, column: 2 },
errors: vec![
Error::Unexpected('i'.into()),
Error::Unexpected("test".into()),
],
})
);
}
#[test]
fn sequence_error() {
let mut parser = (char('a'), char('b'), char('c'));
assert_eq!(
parser.easy_parse(position::Stream::new("c")),
Err(Errors {
position: SourcePosition { line: 1, column: 1 },
errors: vec![Error::Unexpected('c'.into()), Error::Expected('a'.into())],
})
);
assert_eq!(
parser.easy_parse(position::Stream::new("ac")),
Err(Errors {
position: SourcePosition { line: 1, column: 2 },
errors: vec![Error::Unexpected('c'.into()), Error::Expected('b'.into())],
})
);
}
#[test]
fn optional_empty_ok_then_error() {
let mut parser = (optional(char('a')), char('b'));
assert_eq!(
parser.easy_parse(position::Stream::new("c")),
Err(Errors {
position: SourcePosition { line: 1, column: 1 },
errors: vec![
Error::Unexpected('c'.into()),
Error::Expected('a'.into()),
Error::Expected('b'.into()),
],
})
);
}
#[test]
fn nested_optional_empty_ok_then_error() {
let mut parser = ((optional(char('a')), char('b')), char('c'));
assert_eq!(
parser.easy_parse(position::Stream::new("c")),
Err(Errors {
position: SourcePosition { line: 1, column: 1 },
errors: vec![
Error::Unexpected('c'.into()),
Error::Expected('a'.into()),
Error::Expected('b'.into()),
],
})
);
}
#[test]
fn committed_then_optional_empty_ok_then_error() {
let mut parser = (char('b'), optional(char('a')), char('b'));
assert_eq!(
parser.easy_parse(position::Stream::new("bc")),
Err(Errors {
position: SourcePosition { line: 1, column: 2 },
errors: vec![
Error::Unexpected('c'.into()),
Error::Expected('a'.into()),
Error::Expected('b'.into()),
],
})
);
}
#[test]
fn sequence_in_choice_parser_empty_err() {
let mut parser = choice((
(optional(char('a')), char('1')),
(optional(char('b')), char('2')).skip(char('d')),
));
assert_eq!(
parser.easy_parse(position::Stream::new("c")),
Err(Errors {
position: SourcePosition { line: 1, column: 1 },
errors: vec![
Error::Expected('a'.into()),
Error::Expected('1'.into()),
Error::Expected('b'.into()),
Error::Expected('2'.into()),
Error::Unexpected('c'.into()),
],
})
);
}
#[test]
fn sequence_in_choice_array_parser_empty_err() {
let mut parser = choice([
(optional(char('a')), char('1')),
(optional(char('b')), char('2')),
]);
assert_eq!(
parser.easy_parse(position::Stream::new("c")),
Err(Errors {
position: SourcePosition { line: 1, column: 1 },
errors: vec![
Error::Expected('a'.into()),
Error::Expected('1'.into()),
Error::Expected('b'.into()),
Error::Expected('2'.into()),
Error::Unexpected('c'.into()),
],
})
);
}
#[test]
fn sequence_in_choice_array_parser_empty_err_where_first_parser_delay_errors() {
let mut p1 = char('1');
let mut p2 = no_partial((optional(char('b')), char('2')).map(|t| t.1));
let mut parser =
choice::<_, [&mut dyn Parser<_, Output = _, PartialState = _>; 2]>([&mut p1, &mut p2]);
assert_eq!(
parser.easy_parse(position::Stream::new("c")),
Err(Errors {
position: SourcePosition { line: 1, column: 1 },
errors: vec![
Error::Expected('1'.into()),
Error::Expected('b'.into()),
Error::Expected('2'.into()),
Error::Unexpected('c'.into()),
],
})
);
}
#[test]
fn sep_end_by1_dont_eat_separator_twice() {
let mut parser = sep_end_by1(digit(), token(';'));
assert_eq!(parser.parse("1;;"), Ok((vec!['1'], ";")));
}
#[test]
fn count_min_max_empty_error() {
assert_eq!(
count_min_max(1, 1, char('a')).or(value(vec![])).parse("b"),
Ok((vec![], "b"))
);
}
#[test]
fn sequence_parser_resets_partial_state_issue_168() {
assert_eq!(
take_until::<String, _, _>(attempt((char('a'), char('b')))).parse("aaab"),
Ok((String::from("aa"), "ab"))
);
}
#[test]
fn parser_macro_must_impl_parse_mode_issue_168() {
assert_eq!(
skip_until(attempt((char('a'), char('b')))).parse("aaab"),
Ok(((), "ab"))
);
}
#[test]
fn recognize_parser_issue_168() {
assert_eq!(
range::recognize(skip_until(attempt((char('a'), char('b'))))).parse("aaab"),
Ok(("aa", "ab"))
);
}
#[test]
fn sequence_in_optional_report_delayed_error() {
assert_eq!(
optional(position().with(char('a')))
.skip(char('}'))
.easy_parse("b")
.map_err(|e| e.errors),
Err(vec![
Error::Unexpected('b'.into()),
Error::Expected('a'.into()),
Error::Expected('}'.into()),
]),
);
}
#[test]
fn sequence_in_optional_nested_report_delayed_error() {
assert_eq!(
optional(position().with(char('a')))
.skip(optional(position().with(char('c'))))
.skip(char('}'))
.easy_parse("b")
.map_err(|e| e.errors),
Err(vec![
Error::Unexpected('b'.into()),
Error::Expected('a'.into()),
Error::Expected('c'.into()),
Error::Expected('}'.into()),
]),
);
}
#[test]
fn sequence_in_optional_nested_2_report_delayed_error() {
assert_eq!(
(
char('{'),
optional(position().with(char('a')))
.skip(optional(position().with(char('c'))))
.skip(char('}'))
)
.easy_parse("{b")
.map_err(|e| e.errors),
Err(vec![
Error::Unexpected('b'.into()),
Error::Expected('a'.into()),
Error::Expected('c'.into()),
Error::Expected('}'.into()),
]),
);
}
macro_rules! sequence_many_test {
($many:expr, $seq:expr) => {
let mut parser = $seq($many(position().with(char('a'))), char('}'));
let expected_error = Err(vec![
Error::Unexpected('b'.into()),
Error::Expected('a'.into()),
Error::Expected('}'.into()),
]);
assert_eq!(
parser.easy_parse("ab").map_err(|e| e.errors),
expected_error,
);
};
}
#[test]
fn sequence_in_many_report_delayed_error() {
use combine::parser::{repeat, sequence};
sequence_many_test!(repeat::many::<Vec<_>, _, _>, sequence::skip);
sequence_many_test!(repeat::many1::<Vec<_>, _, _>, sequence::skip);
sequence_many_test!(repeat::many::<Vec<_>, _, _>, sequence::with);
sequence_many_test!(repeat::many1::<Vec<_>, _, _>, sequence::with);
sequence_many_test!(repeat::many::<Vec<_>, _, _>, |l, x| sequence::between(
l,
char('|'),
x,
));
sequence_many_test!(repeat::many1::<Vec<_>, _, _>, |l, x| sequence::between(
l,
char('|'),
x,
));
}
macro_rules! sequence_sep_by_test {
($many:expr, $seq:expr) => {
let mut parser = $seq($many(position().with(char('a')), char(',')), char('}'));
let expected_error = Err(vec![
Error::Unexpected('b'.into()),
Error::Expected(','.into()),
Error::Expected('}'.into()),
]);
assert_eq!(
parser.easy_parse("a,ab").map_err(|e| e.errors),
expected_error,
);
};
}
#[test]
fn sequence_in_sep_by_report_delayed_error() {
use combine::parser::{repeat, sequence};
sequence_sep_by_test!(repeat::sep_by::<Vec<_>, _, _, _>, sequence::skip);
sequence_sep_by_test!(repeat::sep_by1::<Vec<_>, _, _, _>, sequence::skip);
sequence_sep_by_test!(repeat::sep_by::<Vec<_>, _, _, _>, sequence::with);
sequence_sep_by_test!(repeat::sep_by1::<Vec<_>, _, _, _>, sequence::with);
}
#[test]
fn choice_compose_on_error() {
let ident = |s| attempt(string(s));
let mut parser = choice((ident("aa").skip(string(";")), choice((ident("cc"),))));
assert_eq!(
parser.easy_parse("c").map_err(|err| err.errors),
Err(vec![
Error::Unexpected('c'.into()),
Error::Expected("aa".into()),
Error::Unexpected("end of input".into()),
Error::Expected("cc".into()),
]),
);
}
#[test]
fn choice_compose_issue_175() {
let ident = |s| attempt(string(s));
let mut parser = many::<Vec<_>, _, _>(position().and(choice((
ident("aa").skip(string(";")),
choice((ident("bb"), ident("cc"))),
))))
.skip(string("."));
assert_eq!(
parser.easy_parse("c").map_err(|err| err.errors),
Err(vec![
Error::Unexpected('c'.into()),
Error::Expected("aa".into()),
Error::Expected("bb".into()),
Error::Expected("cc".into()),
]),
);
}
#[test]
fn test() {
let mut parser = (digit(), letter());
assert_eq!(
parser.easy_parse("11").map_err(|err| err.errors),
Err(vec![
Error::Unexpected('1'.into()),
Error::Expected("letter".into()),
]),
);
}
#[test]
fn lifetime_inference() {
fn _string(source: &str) {
range::take(1).or(string("a")).parse(source).ok();
range::take(1)
.or(string_cmp("a", |x, y| x == y))
.parse(source)
.ok();
let _: &'static str = string("a").parse(source).unwrap().0;
let _: &'static str = string_cmp("a", |x, y| x == y).parse(source).unwrap().0;
}
fn _bytes(source: &[u8]) {
range::take(1).or(bytes(&[0u8])).parse(source).ok();
range::take(1)
.or(bytes_cmp(&[0u8], |x, y| x == y))
.parse(source)
.ok();
let _: &'static [u8] = bytes(&[0u8]).parse(source).unwrap().0;
let _: &'static [u8] = bytes_cmp(&[0u8], |x, y| x == y).parse(source).unwrap().0;
}
}
#[test]
fn test_nested_count_overflow() {
let key = || count::<Vec<_>, _, _>(64, alpha_num());
let value_bytes =
|| be_u32().then_partial(|&mut size| count::<Vec<_>, _, _>(size as usize, any()));
let value_messages =
(be_u32(), be_u32()).then_partial(|&mut (_body_size, message_count)| {
count::<Vec<_>, _, _>(message_count as usize, value_bytes())
});
let put = (bytes(b"PUT"), key())
.map(|(_, key)| key)
.and(value_messages);
let parser = || put.map(|(_, messages)| messages);
let command = &b"PUTkey\x00\x00\x00\x12\x00\x00\x00\x02\x00\x00\x00\x04\xDE\xAD\xBE\xEF\x00\x00\x00\x02\xBE\xEF"[..];
let result = parser().parse(command).unwrap();
assert_eq!(2, result.0.len());
}
#[test]
fn not_followed_by_empty_error_issue_220() {
let mut parser = string("let").skip(not_followed_by(eof().map(|_| "EOF")));
assert_eq!(
parser.easy_parse("let").map_err(|err| err.errors),
Err(vec![]),
);
}
}

38
vendor/combine/tests/parser_macro.rs vendored Normal file
View File

@@ -0,0 +1,38 @@
#![allow(clippy::single_match)]
#[macro_use]
extern crate combine;
parser! {
pub fn test[Input]()(Input) -> ()
where [Input: ::combine::Stream<Token = char>]
{
use combine::parser::token::value;
let _ = ();
fn _test() { }
match Some(1) {
Some(_) => (),
None => (),
}
value(())
}
}
parser! {
pub fn test_that_parsers_with_unnamed_types_can_be_in_same_scope[Input]()(Input) -> ()
where [Input: ::combine::Stream<Token = char>]
{
use combine::parser::token::value;
value(())
}
}
#[test]
fn test_that_we_dont_need_imports_for_this_macro_to_work() {
test::<&str>();
test_that_parsers_with_unnamed_types_can_be_in_same_scope::<&str>();
}

186
vendor/combine/tests/support/mod.rs vendored Normal file
View File

@@ -0,0 +1,186 @@
#![allow(dead_code)]
use std::{
io,
marker::Unpin,
pin::Pin,
task::{self, Poll},
};
use {futures_03_dep::ready, partial_io::PartialOp};
pub struct PartialAsyncRead<R> {
inner: R,
ops: Box<dyn Iterator<Item = PartialOp> + Send>,
}
impl<R> PartialAsyncRead<R>
where
R: Unpin,
{
pub fn new<I>(inner: R, ops: I) -> Self
where
I: IntoIterator<Item = PartialOp>,
I::IntoIter: Send + 'static,
{
PartialAsyncRead {
inner,
ops: Box::new(ops.into_iter()),
}
}
}
impl<R> tokio_02_dep::io::AsyncRead for PartialAsyncRead<R>
where
R: tokio_02_dep::io::AsyncRead + Unpin,
{
fn poll_read(
mut self: Pin<&mut Self>,
cx: &mut task::Context<'_>,
buf: &mut [u8],
) -> Poll<io::Result<usize>> {
match self.ops.next() {
Some(PartialOp::Limited(n)) => {
let len = std::cmp::min(n, buf.len());
Pin::new(&mut self.inner).poll_read(cx, &mut buf[..len])
}
Some(PartialOp::Err(err)) => {
if err == io::ErrorKind::WouldBlock {
cx.waker().wake_by_ref();
Poll::Pending
} else {
Err(io::Error::new(
err,
"error during read, generated by partial-io",
))
.into()
}
}
Some(PartialOp::Unlimited) | None => Pin::new(&mut self.inner).poll_read(cx, buf),
}
}
}
impl<R> tokio_03_dep::io::AsyncRead for PartialAsyncRead<R>
where
R: tokio_03_dep::io::AsyncRead + Unpin,
{
fn poll_read(
mut self: Pin<&mut Self>,
cx: &mut task::Context<'_>,
buf: &mut tokio_03_dep::io::ReadBuf<'_>,
) -> Poll<io::Result<()>> {
match self.ops.next() {
Some(PartialOp::Limited(n)) => {
let len = std::cmp::min(n, buf.remaining());
buf.initialize_unfilled();
let mut sub_buf = buf.take(len);
ready!(Pin::new(&mut self.inner).poll_read(cx, &mut sub_buf))?;
let filled = sub_buf.filled().len();
buf.advance(filled);
Poll::Ready(Ok(()))
}
Some(PartialOp::Err(err)) => {
if err == io::ErrorKind::WouldBlock {
cx.waker().wake_by_ref();
Poll::Pending
} else {
Err(io::Error::new(
err,
"error during read, generated by partial-io",
))
.into()
}
}
Some(PartialOp::Unlimited) | None => Pin::new(&mut self.inner).poll_read(cx, buf),
}
}
}
impl<R> tokio_dep::io::AsyncRead for PartialAsyncRead<R>
where
R: tokio_dep::io::AsyncRead + Unpin,
{
fn poll_read(
mut self: Pin<&mut Self>,
cx: &mut task::Context<'_>,
buf: &mut tokio_dep::io::ReadBuf<'_>,
) -> Poll<io::Result<()>> {
match self.ops.next() {
Some(PartialOp::Limited(n)) => {
let len = std::cmp::min(n, buf.remaining());
buf.initialize_unfilled();
let mut sub_buf = buf.take(len);
ready!(Pin::new(&mut self.inner).poll_read(cx, &mut sub_buf))?;
let filled = sub_buf.filled().len();
buf.advance(filled);
Poll::Ready(Ok(()))
}
Some(PartialOp::Err(err)) => {
if err == io::ErrorKind::WouldBlock {
cx.waker().wake_by_ref();
Poll::Pending
} else {
Err(io::Error::new(
err,
"error during read, generated by partial-io",
))
.into()
}
}
Some(PartialOp::Unlimited) | None => Pin::new(&mut self.inner).poll_read(cx, buf),
}
}
}
pub struct FuturesPartialAsyncRead<R> {
inner: R,
ops: Box<dyn Iterator<Item = PartialOp> + Send>,
}
impl<R> FuturesPartialAsyncRead<R>
where
R: crate::futures::io::AsyncRead + Unpin,
{
pub fn new<I>(inner: R, ops: I) -> Self
where
I: IntoIterator<Item = PartialOp>,
I::IntoIter: Send + 'static,
{
FuturesPartialAsyncRead {
inner,
ops: Box::new(ops.into_iter()),
}
}
}
impl<R> crate::futures::io::AsyncRead for FuturesPartialAsyncRead<R>
where
R: crate::futures::io::AsyncRead + Unpin,
{
fn poll_read(
mut self: Pin<&mut Self>,
cx: &mut task::Context<'_>,
buf: &mut [u8],
) -> Poll<io::Result<usize>> {
match self.ops.next() {
Some(PartialOp::Limited(n)) => {
let len = std::cmp::min(n, buf.len());
Pin::new(&mut self.inner).poll_read(cx, &mut buf[..len])
}
Some(PartialOp::Err(err)) => {
if err == io::ErrorKind::WouldBlock {
cx.waker().wake_by_ref();
Poll::Pending
} else {
Err(io::Error::new(
err,
"error during read, generated by partial-io",
))
.into()
}
}
Some(PartialOp::Unlimited) | None => Pin::new(&mut self.inner).poll_read(cx, buf),
}
}
}