bstr-0.2.17/.cargo_vcs_info.json0000644000000001120000000000100120670ustar { "git": { "sha1": "e38e7a7ca986f9499b30202f49d79e531d14d192" } } bstr-0.2.17/.gitignore000064400000000000000000000000370072674642500127050ustar 00000000000000.*.swp tags target /Cargo.lock bstr-0.2.17/COPYING000064400000000000000000000004040072674642500117460ustar 00000000000000This project is licensed under either of * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0) * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) at your option. bstr-0.2.17/Cargo.lock0000644000000070730000000000100100570ustar # This file is automatically @generated by Cargo. # It is not intended for manual editing. version = 3 [[package]] name = "bstr" version = "0.2.17" dependencies = [ "lazy_static", "memchr", "quickcheck", "regex-automata", "serde", "ucd-parse", "unicode-segmentation", ] [[package]] name = "byteorder" version = "1.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de" [[package]] name = "cfg-if" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "getrandom" version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7fcd999463524c52659517fe2cea98493cfe485d10565e7b0fb07dbba7ad2753" dependencies = [ "cfg-if", "libc", "wasi", ] [[package]] name = "lazy_static" version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "libc" version = "0.2.79" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2448f6066e80e3bfc792e9c98bf705b4b0fc6e8ef5b43e5889aff0eaa9c58743" [[package]] name = "memchr" version = "2.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a" [[package]] name = "quickcheck" version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "588f6378e4dd99458b60ec275b4477add41ce4fa9f64dcba6f15adccb19b50d6" dependencies = [ "rand", ] [[package]] name = "rand" version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2e7573632e6454cf6b99d7aac4ccca54be06da05aca2ef7423d22d27d4d4bcd8" dependencies = [ "rand_core", ] [[package]] name = "rand_core" version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7" dependencies = [ "getrandom", ] [[package]] name = "regex" version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8963b85b8ce3074fecffde43b4b0dded83ce2f367dc8d363afc56679f3ee820b" dependencies = [ "regex-syntax", ] [[package]] name = "regex-automata" version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae1ded71d66a4a97f5e961fd0cb25a5f366a42a41570d16a763a69c092c26ae4" dependencies = [ "byteorder", ] [[package]] name = "regex-syntax" version = "0.6.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8cab7a364d15cde1e505267766a2d3c4e22a843e1a601f0fa7564c0f82ced11c" [[package]] name = "serde" version = "1.0.117" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b88fa983de7720629c9387e9f517353ed404164b1e482c970a90c1a4aaf7dc1a" [[package]] name = "ucd-parse" version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5269f8d35df6b8b60758343a6d742ecf09e4bca13faee32af5503aebd1e11b7c" dependencies = [ "lazy_static", "regex", ] [[package]] name = "unicode-segmentation" version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e83e153d1053cbb5a118eeff7fd5be06ed99153f00dbcd8ae310c5fb2b22edc0" [[package]] name = "wasi" version = "0.10.2+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6" bstr-0.2.17/Cargo.toml0000644000000031740000000000100101000ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2018" name = "bstr" version = "0.2.17" authors = ["Andrew Gallant "] exclude = ["/.github"] description = "A string type that is not required to be valid UTF-8." homepage = "https://github.com/BurntSushi/bstr" documentation = "https://docs.rs/bstr" readme = "README.md" keywords = ["string", "str", "byte", "bytes", "text"] categories = ["text-processing", "encoding"] license = "MIT OR Apache-2.0" repository = "https://github.com/BurntSushi/bstr" [profile.release] debug = true [lib] bench = false [dependencies.lazy_static] version = "1.2.0" optional = true [dependencies.memchr] version = "2.4.0" default-features = false [dependencies.regex-automata] version = "0.1.5" optional = true default-features = false [dependencies.serde] version = "1.0.85" optional = true default-features = false [dev-dependencies.quickcheck] version = "1" default-features = false [dev-dependencies.ucd-parse] version = "0.1.3" [dev-dependencies.unicode-segmentation] version = "1.2.1" [features] default = ["std", "unicode"] serde1 = ["std", "serde1-nostd", "serde/std"] serde1-nostd = ["serde"] std = ["memchr/std"] unicode = ["lazy_static", "regex-automata"] bstr-0.2.17/Cargo.toml.orig000064400000000000000000000022250072674642500136050ustar 00000000000000[package] name = "bstr" version = "0.2.17" #:version authors = ["Andrew Gallant "] description = "A string type that is not required to be valid UTF-8." documentation = "https://docs.rs/bstr" homepage = "https://github.com/BurntSushi/bstr" repository = "https://github.com/BurntSushi/bstr" readme = "README.md" keywords = ["string", "str", "byte", "bytes", "text"] license = "MIT OR Apache-2.0" categories = ["text-processing", "encoding"] exclude = ["/.github"] edition = "2018" [workspace] members = ["bench"] [lib] bench = false [features] default = ["std", "unicode"] std = ["memchr/std"] unicode = ["lazy_static", "regex-automata"] serde1 = ["std", "serde1-nostd", "serde/std"] serde1-nostd = ["serde"] [dependencies] memchr = { version = "2.4.0", default-features = false } lazy_static = { version = "1.2.0", optional = true } regex-automata = { version = "0.1.5", default-features = false, optional = true } serde = { version = "1.0.85", default-features = false, optional = true } [dev-dependencies] quickcheck = { version = "1", default-features = false } ucd-parse = "0.1.3" unicode-segmentation = "1.2.1" [profile.release] debug = true bstr-0.2.17/LICENSE-APACHE000064400000000000000000000251370072674642500126510ustar 00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. bstr-0.2.17/LICENSE-MIT000064400000000000000000000020760072674642500123560ustar 00000000000000The MIT License (MIT) Copyright (c) 2018-2019 Andrew Gallant Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. bstr-0.2.17/README.md000064400000000000000000000206300072674642500121750ustar 00000000000000bstr ==== This crate provides extension traits for `&[u8]` and `Vec` that enable their use as byte strings, where byte strings are _conventionally_ UTF-8. This differs from the standard library's `String` and `str` types in that they are not required to be valid UTF-8, but may be fully or partially valid UTF-8. [![Build status](https://github.com/BurntSushi/bstr/workflows/ci/badge.svg)](https://github.com/BurntSushi/bstr/actions) [![](https://meritbadge.herokuapp.com/bstr)](https://crates.io/crates/bstr) ### Documentation https://docs.rs/bstr ### When should I use byte strings? See this part of the documentation for more details: https://docs.rs/bstr/0.2.*/bstr/#when-should-i-use-byte-strings. The short story is that byte strings are useful when it is inconvenient or incorrect to require valid UTF-8. ### Usage Add this to your `Cargo.toml`: ```toml [dependencies] bstr = "0.2" ``` ### Examples The following two examples exhibit both the API features of byte strings and the I/O convenience functions provided for reading line-by-line quickly. This first example simply shows how to efficiently iterate over lines in stdin, and print out lines containing a particular substring: ```rust use std::error::Error; use std::io::{self, Write}; use bstr::{ByteSlice, io::BufReadExt}; fn main() -> Result<(), Box> { let stdin = io::stdin(); let mut stdout = io::BufWriter::new(io::stdout()); stdin.lock().for_byte_line_with_terminator(|line| { if line.contains_str("Dimension") { stdout.write_all(line)?; } Ok(true) })?; Ok(()) } ``` This example shows how to count all of the words (Unicode-aware) in stdin, line-by-line: ```rust use std::error::Error; use std::io; use bstr::{ByteSlice, io::BufReadExt}; fn main() -> Result<(), Box> { let stdin = io::stdin(); let mut words = 0; stdin.lock().for_byte_line_with_terminator(|line| { words += line.words().count(); Ok(true) })?; println!("{}", words); Ok(()) } ``` This example shows how to convert a stream on stdin to uppercase without performing UTF-8 validation _and_ amortizing allocation. On standard ASCII text, this is quite a bit faster than what you can (easily) do with standard library APIs. (N.B. Any invalid UTF-8 bytes are passed through unchanged.) ```rust use std::error::Error; use std::io::{self, Write}; use bstr::{ByteSlice, io::BufReadExt}; fn main() -> Result<(), Box> { let stdin = io::stdin(); let mut stdout = io::BufWriter::new(io::stdout()); let mut upper = vec![]; stdin.lock().for_byte_line_with_terminator(|line| { upper.clear(); line.to_uppercase_into(&mut upper); stdout.write_all(&upper)?; Ok(true) })?; Ok(()) } ``` This example shows how to extract the first 10 visual characters (as grapheme clusters) from each line, where invalid UTF-8 sequences are generally treated as a single character and are passed through correctly: ```rust use std::error::Error; use std::io::{self, Write}; use bstr::{ByteSlice, io::BufReadExt}; fn main() -> Result<(), Box> { let stdin = io::stdin(); let mut stdout = io::BufWriter::new(io::stdout()); stdin.lock().for_byte_line_with_terminator(|line| { let end = line .grapheme_indices() .map(|(_, end, _)| end) .take(10) .last() .unwrap_or(line.len()); stdout.write_all(line[..end].trim_end())?; stdout.write_all(b"\n")?; Ok(true) })?; Ok(()) } ``` ### Cargo features This crates comes with a few features that control standard library, serde and Unicode support. * `std` - **Enabled** by default. This provides APIs that require the standard library, such as `Vec`. * `unicode` - **Enabled** by default. This provides APIs that require sizable Unicode data compiled into the binary. This includes, but is not limited to, grapheme/word/sentence segmenters. When this is disabled, basic support such as UTF-8 decoding is still included. * `serde1` - **Disabled** by default. Enables implementations of serde traits for the `BStr` and `BString` types. * `serde1-nostd` - **Disabled** by default. Enables implementations of serde traits for the `BStr` type only, intended for use without the standard library. Generally, you either want `serde1` or `serde1-nostd`, not both. ### Minimum Rust version policy This crate's minimum supported `rustc` version (MSRV) is `1.41.1`. In general, this crate will be conservative with respect to the minimum supported version of Rust. MSRV may be bumped in minor version releases. ### Future work Since this is meant to be a core crate, getting a `1.0` release is a priority. My hope is to move to `1.0` within the next year and commit to its API so that `bstr` can be used as a public dependency. A large part of the API surface area was taken from the standard library, so from an API design perspective, a good portion of this crate should be on solid ground already. The main differences from the standard library are in how the various substring search routines work. The standard library provides generic infrastructure for supporting different types of searches with a single method, where as this library prefers to define new methods for each type of search and drop the generic infrastructure. Some _probable_ future considerations for APIs include, but are not limited to: * A convenience layer on top of the `aho-corasick` crate. * Unicode normalization. * More sophisticated support for dealing with Unicode case, perhaps by combining the use cases supported by [`caseless`](https://docs.rs/caseless) and [`unicase`](https://docs.rs/unicase). * Add facilities for dealing with OS strings and file paths, probably via simple conversion routines. Here are some examples that are _probably_ out of scope for this crate: * Regular expressions. * Unicode collation. The exact scope isn't quite clear, but I expect we can iterate on it. In general, as stated below, this crate brings lots of related APIs together into a single crate while simultaneously attempting to keep the total number of dependencies low. Indeed, every dependency of `bstr`, except for `memchr`, is optional. ### High level motivation Strictly speaking, the `bstr` crate provides very little that can't already be achieved with the standard library `Vec`/`&[u8]` APIs and the ecosystem of library crates. For example: * The standard library's [`Utf8Error`](https://doc.rust-lang.org/std/str/struct.Utf8Error.html) can be used for incremental lossy decoding of `&[u8]`. * The [`unicode-segmentation`](https://unicode-rs.github.io/unicode-segmentation/unicode_segmentation/index.html) crate can be used for iterating over graphemes (or words), but is only implemented for `&str` types. One could use `Utf8Error` above to implement grapheme iteration with the same semantics as what `bstr` provides (automatic Unicode replacement codepoint substitution). * The [`twoway`](https://docs.rs/twoway) crate can be used for fast substring searching on `&[u8]`. So why create `bstr`? Part of the point of the `bstr` crate is to provide a uniform API of coupled components instead of relying on users to piece together loosely coupled components from the crate ecosystem. For example, if you wanted to perform a search and replace in a `Vec`, then writing the code to do that with the `twoway` crate is not that difficult, but it's still additional glue code you have to write. This work adds up depending on what you're doing. Consider, for example, trimming and splitting, along with their different variants. In other words, `bstr` is partially a way of pushing back against the micro-crate ecosystem that appears to be evolving. Namely, it is a goal of `bstr` to keep its dependency list lightweight. For example, `serde` is an optional dependency because there is no feasible alternative. In service of this philosophy, currently, the only required dependency of `bstr` is `memchr`. ### License This project is licensed under either of * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or https://www.apache.org/licenses/LICENSE-2.0) * MIT license ([LICENSE-MIT](LICENSE-MIT) or https://opensource.org/licenses/MIT) at your option. The data in `src/unicode/data/` is licensed under the Unicode License Agreement ([LICENSE-UNICODE](https://www.unicode.org/copyright.html#License)), although this data is only used in tests. bstr-0.2.17/examples/graphemes-std.rs000064400000000000000000000012360072674642500156460ustar 00000000000000use std::error::Error; use std::io::{self, BufRead, Write}; use unicode_segmentation::UnicodeSegmentation; fn main() -> Result<(), Box> { let stdin = io::stdin(); let mut stdin = stdin.lock(); let mut stdout = io::BufWriter::new(io::stdout()); let mut line = String::new(); while stdin.read_line(&mut line)? > 0 { let end = line .grapheme_indices(true) .map(|(start, g)| start + g.len()) .take(10) .last() .unwrap_or(line.len()); stdout.write_all(line[..end].trim_end().as_bytes())?; stdout.write_all(b"\n")?; line.clear(); } Ok(()) } bstr-0.2.17/examples/graphemes.rs000064400000000000000000000010700072674642500150520ustar 00000000000000use std::error::Error; use std::io::{self, Write}; use bstr::{io::BufReadExt, ByteSlice}; fn main() -> Result<(), Box> { let stdin = io::stdin(); let mut stdout = io::BufWriter::new(io::stdout()); stdin.lock().for_byte_line_with_terminator(|line| { let end = line .grapheme_indices() .map(|(_, end, _)| end) .take(10) .last() .unwrap_or(line.len()); stdout.write_all(line[..end].trim_end())?; stdout.write_all(b"\n")?; Ok(true) })?; Ok(()) } bstr-0.2.17/examples/lines-std.rs000064400000000000000000000006670072674642500150140ustar 00000000000000use std::error::Error; use std::io::{self, BufRead, Write}; fn main() -> Result<(), Box> { let stdin = io::stdin(); let mut stdin = stdin.lock(); let mut stdout = io::BufWriter::new(io::stdout()); let mut line = String::new(); while stdin.read_line(&mut line)? > 0 { if line.contains("Dimension") { stdout.write_all(line.as_bytes())?; } line.clear(); } Ok(()) } bstr-0.2.17/examples/lines.rs000064400000000000000000000006250072674642500142160ustar 00000000000000use std::error::Error; use std::io::{self, Write}; use bstr::{io::BufReadExt, ByteSlice}; fn main() -> Result<(), Box> { let stdin = io::stdin(); let mut stdout = io::BufWriter::new(io::stdout()); stdin.lock().for_byte_line_with_terminator(|line| { if line.contains_str("Dimension") { stdout.write_all(line)?; } Ok(true) })?; Ok(()) } bstr-0.2.17/examples/uppercase-std.rs000064400000000000000000000006200072674642500156560ustar 00000000000000use std::error::Error; use std::io::{self, BufRead, Write}; fn main() -> Result<(), Box> { let stdin = io::stdin(); let mut stdin = stdin.lock(); let mut stdout = io::BufWriter::new(io::stdout()); let mut line = String::new(); while stdin.read_line(&mut line)? > 0 { stdout.write_all(line.to_uppercase().as_bytes())?; line.clear(); } Ok(()) } bstr-0.2.17/examples/uppercase.rs000064400000000000000000000006740072674642500150770ustar 00000000000000use std::error::Error; use std::io::{self, Write}; use bstr::{io::BufReadExt, ByteSlice}; fn main() -> Result<(), Box> { let stdin = io::stdin(); let mut stdout = io::BufWriter::new(io::stdout()); let mut upper = vec![]; stdin.lock().for_byte_line_with_terminator(|line| { upper.clear(); line.to_uppercase_into(&mut upper); stdout.write_all(&upper)?; Ok(true) })?; Ok(()) } bstr-0.2.17/examples/words-std.rs000064400000000000000000000006500072674642500150300ustar 00000000000000use std::error::Error; use std::io::{self, BufRead}; use unicode_segmentation::UnicodeSegmentation; fn main() -> Result<(), Box> { let stdin = io::stdin(); let mut stdin = stdin.lock(); let mut words = 0; let mut line = String::new(); while stdin.read_line(&mut line)? > 0 { words += line.unicode_words().count(); line.clear(); } println!("{}", words); Ok(()) } bstr-0.2.17/examples/words.rs000064400000000000000000000005140072674642500142370ustar 00000000000000use std::error::Error; use std::io; use bstr::{io::BufReadExt, ByteSlice}; fn main() -> Result<(), Box> { let stdin = io::stdin(); let mut words = 0; stdin.lock().for_byte_line_with_terminator(|line| { words += line.words().count(); Ok(true) })?; println!("{}", words); Ok(()) } bstr-0.2.17/rustfmt.toml000064400000000000000000000000540072674642500133150ustar 00000000000000max_width = 79 use_small_heuristics = "max" bstr-0.2.17/scripts/generate-unicode-data000075500000000000000000000070350072674642500164640ustar 00000000000000#!/bin/sh set -e D="$(dirname "$0")" # Convenience function for checking that a command exists. requires() { cmd="$1" if ! command -v "$cmd" > /dev/null 2>&1; then echo "DEPENDENCY MISSING: $cmd must be installed" >&2 exit 1 fi } # Test if an array ($2) contains a particular element ($1). array_exists() { needle="$1" shift for el in "$@"; do if [ "$el" = "$needle" ]; then return 0 fi done return 1 } graphemes() { regex="$(sh "$D/regex/grapheme.sh")" echo "generating forward grapheme DFA" ucd-generate dfa \ --name GRAPHEME_BREAK_FWD \ --sparse --minimize --anchored --state-size 2 \ src/unicode/fsm/ \ "$regex" echo "generating reverse grapheme DFA" ucd-generate dfa \ --name GRAPHEME_BREAK_REV \ --reverse --longest \ --sparse --minimize --anchored --state-size 2 \ src/unicode/fsm/ \ "$regex" } words() { regex="$(sh "$D/regex/word.sh")" echo "generating forward word DFA (this can take a while)" ucd-generate dfa \ --name WORD_BREAK_FWD \ --sparse --minimize --anchored --state-size 4 \ src/unicode/fsm/ \ "$regex" } sentences() { regex="$(sh "$D/regex/sentence.sh")" echo "generating forward sentence DFA (this can take a while)" ucd-generate dfa \ --name SENTENCE_BREAK_FWD \ --minimize \ --sparse --anchored --state-size 4 \ src/unicode/fsm/ \ "$regex" } regional_indicator() { # For finding all occurrences of region indicators. This is used to handle # regional indicators as a special case for the reverse grapheme iterator # and the reverse word iterator. echo "generating regional indicator DFA" ucd-generate dfa \ --name REGIONAL_INDICATOR_REV \ --reverse \ --classes --minimize --anchored --premultiply --state-size 1 \ src/unicode/fsm/ \ "\p{gcb=Regional_Indicator}" } simple_word() { echo "generating forward simple word DFA" ucd-generate dfa \ --name SIMPLE_WORD_FWD \ --sparse --minimize --state-size 2 \ src/unicode/fsm/ \ "\w" } whitespace() { echo "generating forward whitespace DFA" ucd-generate dfa \ --name WHITESPACE_ANCHORED_FWD \ --anchored --classes --premultiply --minimize --state-size 1 \ src/unicode/fsm/ \ "\s+" echo "generating reverse whitespace DFA" ucd-generate dfa \ --name WHITESPACE_ANCHORED_REV \ --reverse \ --anchored --classes --premultiply --minimize --state-size 2 \ src/unicode/fsm/ \ "\s+" } main() { if array_exists "-h" "$@" || array_exists "--help" "$@"; then echo "Usage: $(basename "$0") [--list-commands] [] ..." >&2 exit fi commands=" graphemes sentences words regional-indicator simple-word whitespace " if array_exists "--list-commands" "$@"; then for cmd in $commands; do echo "$cmd" done exit fi # ucd-generate is used to compile regexes into DFAs. requires ucd-generate mkdir -p src/unicode/fsm/ cmds=$* if [ $# -eq 0 ] || array_exists "all" "$@"; then cmds=$commands fi for cmd in $cmds; do if array_exists "$cmd" $commands; then fun="$(echo "$cmd" | sed 's/-/_/g')" eval "$fun" else echo "unrecognized command: $cmd" >&2 fi done } main "$@" bstr-0.2.17/scripts/regex/grapheme.sh000064400000000000000000000014370072674642500156470ustar 00000000000000#!/bin/sh # vim: indentexpr= nosmartindent autoindent # vim: tabstop=2 shiftwidth=2 softtabstop=2 # This regex was manually written, derived from the rules in UAX #29. # Particularly, from Table 1c, which lays out a regex for grapheme clusters. CR="\p{gcb=CR}" LF="\p{gcb=LF}" Control="\p{gcb=Control}" Prepend="\p{gcb=Prepend}" L="\p{gcb=L}" V="\p{gcb=V}" LV="\p{gcb=LV}" LVT="\p{gcb=LVT}" T="\p{gcb=T}" RI="\p{gcb=RI}" Extend="\p{gcb=Extend}" ZWJ="\p{gcb=ZWJ}" SpacingMark="\p{gcb=SpacingMark}" Any="\p{any}" ExtendPict="\p{Extended_Pictographic}" echo "(?x) $CR $LF | $Control | $Prepend* ( ( ($L* ($V+ | $LV $V* | $LVT) $T*) | $L+ | $T+ ) | $RI $RI | $ExtendPict ($Extend* $ZWJ $ExtendPict)* | [^$Control $CR $LF] ) [$Extend $ZWJ $SpacingMark]* | $Any " bstr-0.2.17/scripts/regex/sentence.sh000064400000000000000000000155010072674642500156600ustar 00000000000000#!/bin/sh # vim: indentexpr= nosmartindent autoindent # vim: tabstop=2 shiftwidth=2 softtabstop=2 # This is a regex that I reverse engineered from the sentence boundary chain # rules in UAX #29. Unlike the grapheme regex, which is essentially provided # for us in UAX #29, no such sentence regex exists. # # I looked into how ICU achieves this, since UAX #29 hints that producing # finite state machines for grapheme/sentence/word/line breaking is possible, # but only easy to do for graphemes. ICU does this by implementing their own # DSL for describing the break algorithms in terms of the chaining rules # directly. You can see an example for sentences in # icu4c/source/data/brkitr/rules/sent.txt. ICU then builds a finite state # machine from those rules in a mostly standard way, but implements the # "chaining" aspect of the rules by connecting overlapping end and start # states. For example, given SB7: # # (Upper | Lower) ATerm x Upper # # Then the naive way to convert this into a regex would be something like # # [\p{sb=Upper}\p{sb=Lower}]\p{sb=ATerm}\p{sb=Upper} # # Unfortunately, this is incorrect. Why? Well, consider an example like so: # # U.S.A. # # A correct implementation of the sentence breaking algorithm should not insert # any breaks here, exactly in accordance with repeatedly applying rule SB7 as # given above. Our regex fails to do this because it will first match `U.S` # without breaking them---which is correct---but will then start looking for # its next rule beginning with a full stop (in ATerm) and followed by an # uppercase letter (A). This will wind up triggering rule SB11 (without # matching `A`), which inserts a break. # # The reason why this happens is because our initial application of rule SB7 # "consumes" the next uppercase letter (S), which we want to reuse as a prefix # in the next rule application. A natural way to express this would be with # look-around, although it's not clear that works in every case since you # ultimately might want to consume that ending uppercase letter. In any case, # we can't use look-around in our truly regular regexes, so we must fix this. # The approach we take is to explicitly repeat rules when a suffix of a rule # is a prefix of another rule. In the case of SB7, the end of the rule, an # uppercase letter, also happens to match the beginning of the rule. This can # in turn be repeated indefinitely. Thus, our actual translation to a regex is: # # [\p{sb=Upper}\p{sb=Lower}]\p{sb=ATerm}\p{sb=Upper}(\p{sb=ATerm}\p{sb=Upper}* # # It turns out that this is exactly what ICU does, but in their case, they do # it automatically. In our case, we connect the chaining rules manually. It's # tedious. With that said, we do no implement Unicode line breaking with this # approach, which is a far scarier beast. In that case, it would probably be # worth writing the code to do what ICU does. # # In the case of sentence breaks, there aren't *too* many overlaps of this # nature. We list them out exhaustively to make this clear, because it's # essentially impossible to easily observe this in the regex. (It took me a # full day to figure all of this out.) Rules marked with N/A mean that they # specify a break, and this strategy only really applies to stringing together # non-breaks. # # SB1 - N/A # SB2 - N/A # SB3 - None # SB4 - N/A # SB5 - None # SB6 - None # SB7 - End overlaps with beginning of SB7 # SB8 - End overlaps with beginning of SB7 # SB8a - End overlaps with beginning of SB6, SB8, SB8a, SB9, SB10, SB11 # SB9 - None # SB10 - None # SB11 - None # SB998 - N/A # # SB8a is in particular quite tricky to get right without look-ahead, since it # allows ping-ponging between match rules SB8a and SB9-11, where SB9-11 # otherwise indicate that a break has been found. In the regex below, we tackle # this by only permitting part of SB8a to match inside our core non-breaking # repetition. In particular, we only allow the parts of SB8a to match that # permit the non-breaking components to continue. If a part of SB8a matches # that guarantees a pop out to SB9-11, (like `STerm STerm`), then we let it # happen. This still isn't correct because an SContinue might be seen which # would allow moving back into SB998 and thus the non-breaking repetition, so # we handle that case as well. # # Finally, the last complication here is the sprinkling of $Ex* everywhere. # This essentially corresponds to the implementation of SB5 by following # UAX #29's recommendation in S6.2. Essentially, we use it avoid ever breaking # in the middle of a grapheme cluster. CR="\p{sb=CR}" LF="\p{sb=LF}" Sep="\p{sb=Sep}" Close="\p{sb=Close}" Sp="\p{sb=Sp}" STerm="\p{sb=STerm}" ATerm="\p{sb=ATerm}" SContinue="\p{sb=SContinue}" Numeric="\p{sb=Numeric}" Upper="\p{sb=Upper}" Lower="\p{sb=Lower}" OLetter="\p{sb=OLetter}" Ex="[\p{sb=Extend}\p{sb=Format}]" ParaSep="[$Sep $CR $LF]" SATerm="[$STerm $ATerm]" LetterSepTerm="[$OLetter $Upper $Lower $ParaSep $SATerm]" echo "(?x) ( # SB6 $ATerm $Ex* $Numeric | # SB7 [$Upper $Lower] $Ex* $ATerm $Ex* $Upper $Ex* # overlap with SB7 ($ATerm $Ex* $Upper $Ex*)* | # SB8 $ATerm $Ex* $Close* $Ex* $Sp* $Ex* ([^$LetterSepTerm] $Ex*)* $Lower $Ex* # overlap with SB7 ($ATerm $Ex* $Upper $Ex*)* | # SB8a $SATerm $Ex* $Close* $Ex* $Sp* $Ex* ( $SContinue | $ATerm $Ex* # Permit repetition of SB8a (($Close $Ex*)* ($Sp $Ex*)* $SATerm)* # In order to continue non-breaking matching, we now must observe # a match with a rule that keeps us in SB6-8a. Otherwise, we've entered # one of SB9-11 and know that a break must follow. ( # overlap with SB6 $Numeric | # overlap with SB8 ($Close $Ex*)* ($Sp $Ex*)* ([^$LetterSepTerm] $Ex*)* $Lower $Ex* # overlap with SB7 ($ATerm $Ex* $Upper $Ex*)* | # overlap with SB8a ($Close $Ex*)* ($Sp $Ex*)* $SContinue ) | $STerm $Ex* # Permit repetition of SB8a (($Close $Ex*)* ($Sp $Ex*)* $SATerm)* # As with ATerm above, in order to continue non-breaking matching, we # must now observe a match with a rule that keeps us out of SB9-11. # For STerm, the only such possibility is to see an SContinue. Anything # else will result in a break. ($Close $Ex*)* ($Sp $Ex*)* $SContinue ) | # SB998 # The logic behind this catch-all is that if we get to this point and # see a Sep, CR, LF, STerm or ATerm, then it has to fall into one of # SB9, SB10 or SB11. In the cases of SB9-11, we always find a break since # SB11 acts as a catch-all to induce a break following a SATerm that isn't # handled by rules SB6-SB8a. [^$ParaSep $SATerm] )* # The following collapses rules SB3, SB4, part of SB8a, SB9, SB10 and SB11. ($SATerm $Ex* ($Close $Ex*)* ($Sp $Ex*)*)* ($CR $LF | $ParaSep)? " bstr-0.2.17/scripts/regex/word.sh000064400000000000000000000066460072674642500150410ustar 00000000000000#!/bin/sh # vim: indentexpr= nosmartindent autoindent # vim: tabstop=2 shiftwidth=2 softtabstop=2 # See the comments in regex/sentence.sh for the general approach to how this # regex was written. # # Writing the regex for this was *hard*. It took me two days of hacking to get # this far, and that was after I had finished the sentence regex, so my brain # was fully cached on this. Unlike the sentence regex, the rules in the regex # below don't correspond as nicely to the rules in UAX #29. In particular, the # UAX #29 rules have a ton of overlap with each other, which requires crazy # stuff in the regex. I'm not even sure the regex below is 100% correct or even # minimal, however, I did compare this with the ICU word segmenter on a few # different corpora, and it produces identical results. (In addition to of # course passing the UCD tests.) # # In general, I consider this approach to be a failure. Firstly, this is # clearly a write-only regex. Secondly, building the minimized DFA for this is # incredibly slow. Thirdly, the DFA is itself very large (~240KB). Fourthly, # reversing this regex (for reverse word iteration) results in a >19MB DFA. # Yes. That's MB. Wat. And it took 5 minutes to build. # # I think we might consider changing our approach to this problem. The normal # path I've seen, I think, is to decode codepoints one at a time, and then # thread them through a state machine in the code itself. We could take this # approach, or possibly combine it with a DFA that tells us which Word_Break # value a codepoint has. I'd prefer the latter approach, but it requires adding # RegexSet support to regex-automata. Something that should definitely be done, # but is a fair amount of work. # # Gah. CR="\p{wb=CR}" LF="\p{wb=LF}" Newline="\p{wb=Newline}" ZWJ="\p{wb=ZWJ}" RI="\p{wb=Regional_Indicator}" Katakana="\p{wb=Katakana}" HebrewLet="\p{wb=HebrewLetter}" ALetter="\p{wb=ALetter}" SingleQuote="\p{wb=SingleQuote}" DoubleQuote="\p{wb=DoubleQuote}" MidNumLet="\p{wb=MidNumLet}" MidLetter="\p{wb=MidLetter}" MidNum="\p{wb=MidNum}" Numeric="\p{wb=Numeric}" ExtendNumLet="\p{wb=ExtendNumLet}" WSegSpace="\p{wb=WSegSpace}" Any="\p{any}" Ex="[\p{wb=Extend} \p{wb=Format} $ZWJ]" ExtendPict="\p{Extended_Pictographic}" AHLetter="[$ALetter $HebrewLet]" MidNumLetQ="[$MidNumLet $SingleQuote]" AHLetterRepeat="$AHLetter $Ex* ([$MidLetter $MidNumLetQ] $Ex* $AHLetter $Ex*)*" NumericRepeat="$Numeric $Ex* ([$MidNum $MidNumLetQ] $Ex* $Numeric $Ex*)*" echo "(?x) $CR $LF | [$Newline $CR $LF] | $WSegSpace $WSegSpace+ | ( ([^$Newline $CR $LF]? $Ex* $ZWJ $ExtendPict $Ex*)+ | ($ExtendNumLet $Ex*)* $AHLetter $Ex* ( ( ($NumericRepeat | $ExtendNumLet $Ex*)* | [$MidLetter $MidNumLetQ] $Ex* ) $AHLetter $Ex* )+ ($NumericRepeat | $ExtendNumLet $Ex*)* | ($ExtendNumLet $Ex*)* $AHLetter $Ex* ($NumericRepeat | $ExtendNumLet $Ex*)+ | ($ExtendNumLet $Ex*)* $Numeric $Ex* ( ( ($AHLetterRepeat | $ExtendNumLet $Ex*)* | [$MidNum $MidNumLetQ] $Ex* ) $Numeric $Ex* )+ ($AHLetterRepeat | $ExtendNumLet $Ex*)* | ($ExtendNumLet $Ex*)* $Numeric $Ex* ($AHLetterRepeat | $ExtendNumLet $Ex*)+ | $Katakana $Ex* (($Katakana | $ExtendNumLet) $Ex*)+ | $ExtendNumLet $Ex* (($ExtendNumLet | $AHLetter | $Numeric | $Katakana) $Ex*)+ )+ | $HebrewLet $Ex* $SingleQuote $Ex* | ($HebrewLet $Ex* $DoubleQuote $Ex*)+ $HebrewLet $Ex* | $RI $Ex* $RI $Ex* | $Any $Ex* " bstr-0.2.17/src/ascii.rs000064400000000000000000000274420072674642500131530ustar 00000000000000use core::mem; // The following ~400 lines of code exists for exactly one purpose, which is // to optimize this code: // // byte_slice.iter().position(|&b| b > 0x7F).unwrap_or(byte_slice.len()) // // Yes... Overengineered is a word that comes to mind, but this is effectively // a very similar problem to memchr, and virtually nobody has been able to // resist optimizing the crap out of that (except for perhaps the BSD and MUSL // folks). In particular, this routine makes a very common case (ASCII) very // fast, which seems worth it. We do stop short of adding AVX variants of the // code below in order to retain our sanity and also to avoid needing to deal // with runtime target feature detection. RESIST! // // In order to understand the SIMD version below, it would be good to read this // comment describing how my memchr routine works: // https://github.com/BurntSushi/rust-memchr/blob/b0a29f267f4a7fad8ffcc8fe8377a06498202883/src/x86/sse2.rs#L19-L106 // // The primary difference with memchr is that for ASCII, we can do a bit less // work. In particular, we don't need to detect the presence of a specific // byte, but rather, whether any byte has its most significant bit set. That // means we can effectively skip the _mm_cmpeq_epi8 step and jump straight to // _mm_movemask_epi8. #[cfg(any(test, not(target_arch = "x86_64")))] const USIZE_BYTES: usize = mem::size_of::(); #[cfg(any(test, not(target_arch = "x86_64")))] const FALLBACK_LOOP_SIZE: usize = 2 * USIZE_BYTES; // This is a mask where the most significant bit of each byte in the usize // is set. We test this bit to determine whether a character is ASCII or not. // Namely, a single byte is regarded as an ASCII codepoint if and only if it's // most significant bit is not set. #[cfg(any(test, not(target_arch = "x86_64")))] const ASCII_MASK_U64: u64 = 0x8080808080808080; #[cfg(any(test, not(target_arch = "x86_64")))] const ASCII_MASK: usize = ASCII_MASK_U64 as usize; /// Returns the index of the first non ASCII byte in the given slice. /// /// If slice only contains ASCII bytes, then the length of the slice is /// returned. pub fn first_non_ascii_byte(slice: &[u8]) -> usize { #[cfg(not(target_arch = "x86_64"))] { first_non_ascii_byte_fallback(slice) } #[cfg(target_arch = "x86_64")] { first_non_ascii_byte_sse2(slice) } } #[cfg(any(test, not(target_arch = "x86_64")))] fn first_non_ascii_byte_fallback(slice: &[u8]) -> usize { let align = USIZE_BYTES - 1; let start_ptr = slice.as_ptr(); let end_ptr = slice[slice.len()..].as_ptr(); let mut ptr = start_ptr; unsafe { if slice.len() < USIZE_BYTES { return first_non_ascii_byte_slow(start_ptr, end_ptr, ptr); } let chunk = read_unaligned_usize(ptr); let mask = chunk & ASCII_MASK; if mask != 0 { return first_non_ascii_byte_mask(mask); } ptr = ptr_add(ptr, USIZE_BYTES - (start_ptr as usize & align)); debug_assert!(ptr > start_ptr); debug_assert!(ptr_sub(end_ptr, USIZE_BYTES) >= start_ptr); if slice.len() >= FALLBACK_LOOP_SIZE { while ptr <= ptr_sub(end_ptr, FALLBACK_LOOP_SIZE) { debug_assert_eq!(0, (ptr as usize) % USIZE_BYTES); let a = *(ptr as *const usize); let b = *(ptr_add(ptr, USIZE_BYTES) as *const usize); if (a | b) & ASCII_MASK != 0 { // What a kludge. We wrap the position finding code into // a non-inlineable function, which makes the codegen in // the tight loop above a bit better by avoiding a // couple extra movs. We pay for it by two additional // stores, but only in the case of finding a non-ASCII // byte. #[inline(never)] unsafe fn findpos( start_ptr: *const u8, ptr: *const u8, ) -> usize { let a = *(ptr as *const usize); let b = *(ptr_add(ptr, USIZE_BYTES) as *const usize); let mut at = sub(ptr, start_ptr); let maska = a & ASCII_MASK; if maska != 0 { return at + first_non_ascii_byte_mask(maska); } at += USIZE_BYTES; let maskb = b & ASCII_MASK; debug_assert!(maskb != 0); return at + first_non_ascii_byte_mask(maskb); } return findpos(start_ptr, ptr); } ptr = ptr_add(ptr, FALLBACK_LOOP_SIZE); } } first_non_ascii_byte_slow(start_ptr, end_ptr, ptr) } } #[cfg(target_arch = "x86_64")] fn first_non_ascii_byte_sse2(slice: &[u8]) -> usize { use core::arch::x86_64::*; const VECTOR_SIZE: usize = mem::size_of::<__m128i>(); const VECTOR_ALIGN: usize = VECTOR_SIZE - 1; const VECTOR_LOOP_SIZE: usize = 4 * VECTOR_SIZE; let start_ptr = slice.as_ptr(); let end_ptr = slice[slice.len()..].as_ptr(); let mut ptr = start_ptr; unsafe { if slice.len() < VECTOR_SIZE { return first_non_ascii_byte_slow(start_ptr, end_ptr, ptr); } let chunk = _mm_loadu_si128(ptr as *const __m128i); let mask = _mm_movemask_epi8(chunk); if mask != 0 { return mask.trailing_zeros() as usize; } ptr = ptr.add(VECTOR_SIZE - (start_ptr as usize & VECTOR_ALIGN)); debug_assert!(ptr > start_ptr); debug_assert!(end_ptr.sub(VECTOR_SIZE) >= start_ptr); if slice.len() >= VECTOR_LOOP_SIZE { while ptr <= ptr_sub(end_ptr, VECTOR_LOOP_SIZE) { debug_assert_eq!(0, (ptr as usize) % VECTOR_SIZE); let a = _mm_load_si128(ptr as *const __m128i); let b = _mm_load_si128(ptr.add(VECTOR_SIZE) as *const __m128i); let c = _mm_load_si128(ptr.add(2 * VECTOR_SIZE) as *const __m128i); let d = _mm_load_si128(ptr.add(3 * VECTOR_SIZE) as *const __m128i); let or1 = _mm_or_si128(a, b); let or2 = _mm_or_si128(c, d); let or3 = _mm_or_si128(or1, or2); if _mm_movemask_epi8(or3) != 0 { let mut at = sub(ptr, start_ptr); let mask = _mm_movemask_epi8(a); if mask != 0 { return at + mask.trailing_zeros() as usize; } at += VECTOR_SIZE; let mask = _mm_movemask_epi8(b); if mask != 0 { return at + mask.trailing_zeros() as usize; } at += VECTOR_SIZE; let mask = _mm_movemask_epi8(c); if mask != 0 { return at + mask.trailing_zeros() as usize; } at += VECTOR_SIZE; let mask = _mm_movemask_epi8(d); debug_assert!(mask != 0); return at + mask.trailing_zeros() as usize; } ptr = ptr_add(ptr, VECTOR_LOOP_SIZE); } } while ptr <= end_ptr.sub(VECTOR_SIZE) { debug_assert!(sub(end_ptr, ptr) >= VECTOR_SIZE); let chunk = _mm_loadu_si128(ptr as *const __m128i); let mask = _mm_movemask_epi8(chunk); if mask != 0 { return sub(ptr, start_ptr) + mask.trailing_zeros() as usize; } ptr = ptr.add(VECTOR_SIZE); } first_non_ascii_byte_slow(start_ptr, end_ptr, ptr) } } #[inline(always)] unsafe fn first_non_ascii_byte_slow( start_ptr: *const u8, end_ptr: *const u8, mut ptr: *const u8, ) -> usize { debug_assert!(start_ptr <= ptr); debug_assert!(ptr <= end_ptr); while ptr < end_ptr { if *ptr > 0x7F { return sub(ptr, start_ptr); } ptr = ptr.offset(1); } sub(end_ptr, start_ptr) } /// Compute the position of the first ASCII byte in the given mask. /// /// The mask should be computed by `chunk & ASCII_MASK`, where `chunk` is /// 8 contiguous bytes of the slice being checked where *at least* one of those /// bytes is not an ASCII byte. /// /// The position returned is always in the inclusive range [0, 7]. #[cfg(any(test, not(target_arch = "x86_64")))] fn first_non_ascii_byte_mask(mask: usize) -> usize { #[cfg(target_endian = "little")] { mask.trailing_zeros() as usize / 8 } #[cfg(target_endian = "big")] { mask.leading_zeros() as usize / 8 } } /// Increment the given pointer by the given amount. unsafe fn ptr_add(ptr: *const u8, amt: usize) -> *const u8 { debug_assert!(amt < ::core::isize::MAX as usize); ptr.offset(amt as isize) } /// Decrement the given pointer by the given amount. unsafe fn ptr_sub(ptr: *const u8, amt: usize) -> *const u8 { debug_assert!(amt < ::core::isize::MAX as usize); ptr.offset((amt as isize).wrapping_neg()) } #[cfg(any(test, not(target_arch = "x86_64")))] unsafe fn read_unaligned_usize(ptr: *const u8) -> usize { use core::ptr; let mut n: usize = 0; ptr::copy_nonoverlapping(ptr, &mut n as *mut _ as *mut u8, USIZE_BYTES); n } /// Subtract `b` from `a` and return the difference. `a` should be greater than /// or equal to `b`. fn sub(a: *const u8, b: *const u8) -> usize { debug_assert!(a >= b); (a as usize) - (b as usize) } #[cfg(test)] mod tests { use super::*; // Our testing approach here is to try and exhaustively test every case. // This includes the position at which a non-ASCII byte occurs in addition // to the alignment of the slice that we're searching. #[test] fn positive_fallback_forward() { for i in 0..517 { let s = "a".repeat(i); assert_eq!( i, first_non_ascii_byte_fallback(s.as_bytes()), "i: {:?}, len: {:?}, s: {:?}", i, s.len(), s ); } } #[test] #[cfg(target_arch = "x86_64")] fn positive_sse2_forward() { for i in 0..517 { let b = "a".repeat(i).into_bytes(); assert_eq!(b.len(), first_non_ascii_byte_sse2(&b)); } } #[test] fn negative_fallback_forward() { for i in 0..517 { for align in 0..65 { let mut s = "a".repeat(i); s.push_str("☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃"); let s = s.get(align..).unwrap_or(""); assert_eq!( i.saturating_sub(align), first_non_ascii_byte_fallback(s.as_bytes()), "i: {:?}, align: {:?}, len: {:?}, s: {:?}", i, align, s.len(), s ); } } } #[test] #[cfg(target_arch = "x86_64")] fn negative_sse2_forward() { for i in 0..517 { for align in 0..65 { let mut s = "a".repeat(i); s.push_str("☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃"); let s = s.get(align..).unwrap_or(""); assert_eq!( i.saturating_sub(align), first_non_ascii_byte_sse2(s.as_bytes()), "i: {:?}, align: {:?}, len: {:?}, s: {:?}", i, align, s.len(), s ); } } } } bstr-0.2.17/src/bstr.rs000064400000000000000000000042240072674642500130260ustar 00000000000000use core::mem; /// A wrapper for `&[u8]` that provides convenient string oriented trait impls. /// /// If you need ownership or a growable byte string buffer, then use /// [`BString`](struct.BString.html). /// /// Using a `&BStr` is just like using a `&[u8]`, since `BStr` /// implements `Deref` to `[u8]`. So all methods available on `[u8]` /// are also available on `BStr`. /// /// # Representation /// /// A `&BStr` has the same representation as a `&str`. That is, a `&BStr` is /// a fat pointer which consists of a pointer to some bytes and a length. /// /// # Trait implementations /// /// The `BStr` type has a number of trait implementations, and in particular, /// defines equality and ordinal comparisons between `&BStr`, `&str` and /// `&[u8]` for convenience. /// /// The `Debug` implementation for `BStr` shows its bytes as a normal string. /// For invalid UTF-8, hex escape sequences are used. /// /// The `Display` implementation behaves as if `BStr` were first lossily /// converted to a `str`. Invalid UTF-8 bytes are substituted with the Unicode /// replacement codepoint, which looks like this: �. #[derive(Hash)] #[repr(transparent)] pub struct BStr { pub(crate) bytes: [u8], } impl BStr { #[inline] pub(crate) fn new>(bytes: &B) -> &BStr { BStr::from_bytes(bytes.as_ref()) } #[inline] pub(crate) fn new_mut>( bytes: &mut B, ) -> &mut BStr { BStr::from_bytes_mut(bytes.as_mut()) } #[inline] pub(crate) fn from_bytes(slice: &[u8]) -> &BStr { unsafe { mem::transmute(slice) } } #[inline] pub(crate) fn from_bytes_mut(slice: &mut [u8]) -> &mut BStr { unsafe { mem::transmute(slice) } } #[inline] #[cfg(feature = "std")] pub(crate) fn from_boxed_bytes(slice: Box<[u8]>) -> Box { unsafe { Box::from_raw(Box::into_raw(slice) as _) } } #[inline] #[cfg(feature = "std")] pub(crate) fn into_boxed_bytes(slice: Box) -> Box<[u8]> { unsafe { Box::from_raw(Box::into_raw(slice) as _) } } #[inline] pub(crate) fn as_bytes(&self) -> &[u8] { &self.bytes } } bstr-0.2.17/src/bstring.rs000064400000000000000000000033200072674642500135200ustar 00000000000000use crate::bstr::BStr; /// A wrapper for `Vec` that provides convenient string oriented trait /// impls. /// /// A `BString` has ownership over its contents and corresponds to /// a growable or shrinkable buffer. Its borrowed counterpart is a /// [`BStr`](struct.BStr.html), called a byte string slice. /// /// Using a `BString` is just like using a `Vec`, since `BString` /// implements `Deref` to `Vec`. So all methods available on `Vec` /// are also available on `BString`. /// /// # Examples /// /// You can create a new `BString` from a `Vec` via a `From` impl: /// /// ``` /// use bstr::BString; /// /// let s = BString::from("Hello, world!"); /// ``` /// /// # Deref /// /// The `BString` type implements `Deref` and `DerefMut`, where the target /// types are `&Vec` and `&mut Vec`, respectively. `Deref` permits all of the /// methods defined on `Vec` to be implicitly callable on any `BString`. /// /// For more information about how deref works, see the documentation for the /// [`std::ops::Deref`](https://doc.rust-lang.org/std/ops/trait.Deref.html) /// trait. /// /// # Representation /// /// A `BString` has the same representation as a `Vec` and a `String`. /// That is, it is made up of three word sized components: a pointer to a /// region of memory containing the bytes, a length and a capacity. #[derive(Clone, Hash)] pub struct BString { pub(crate) bytes: Vec, } impl BString { #[inline] pub(crate) fn as_bytes(&self) -> &[u8] { &self.bytes } #[inline] pub(crate) fn as_bstr(&self) -> &BStr { BStr::new(&self.bytes) } #[inline] pub(crate) fn as_mut_bstr(&mut self) -> &mut BStr { BStr::new_mut(&mut self.bytes) } } bstr-0.2.17/src/byteset/mod.rs000064400000000000000000000070130072674642500143110ustar 00000000000000use memchr::{memchr, memchr2, memchr3, memrchr, memrchr2, memrchr3}; mod scalar; #[inline] fn build_table(byteset: &[u8]) -> [u8; 256] { let mut table = [0u8; 256]; for &b in byteset { table[b as usize] = 1; } table } #[inline] pub(crate) fn find(haystack: &[u8], byteset: &[u8]) -> Option { match byteset.len() { 0 => return None, 1 => memchr(byteset[0], haystack), 2 => memchr2(byteset[0], byteset[1], haystack), 3 => memchr3(byteset[0], byteset[1], byteset[2], haystack), _ => { let table = build_table(byteset); scalar::forward_search_bytes(haystack, |b| table[b as usize] != 0) } } } #[inline] pub(crate) fn rfind(haystack: &[u8], byteset: &[u8]) -> Option { match byteset.len() { 0 => return None, 1 => memrchr(byteset[0], haystack), 2 => memrchr2(byteset[0], byteset[1], haystack), 3 => memrchr3(byteset[0], byteset[1], byteset[2], haystack), _ => { let table = build_table(byteset); scalar::reverse_search_bytes(haystack, |b| table[b as usize] != 0) } } } #[inline] pub(crate) fn find_not(haystack: &[u8], byteset: &[u8]) -> Option { if haystack.is_empty() { return None; } match byteset.len() { 0 => return Some(0), 1 => scalar::inv_memchr(byteset[0], haystack), 2 => scalar::forward_search_bytes(haystack, |b| { b != byteset[0] && b != byteset[1] }), 3 => scalar::forward_search_bytes(haystack, |b| { b != byteset[0] && b != byteset[1] && b != byteset[2] }), _ => { let table = build_table(byteset); scalar::forward_search_bytes(haystack, |b| table[b as usize] == 0) } } } #[inline] pub(crate) fn rfind_not(haystack: &[u8], byteset: &[u8]) -> Option { if haystack.is_empty() { return None; } match byteset.len() { 0 => return Some(haystack.len() - 1), 1 => scalar::inv_memrchr(byteset[0], haystack), 2 => scalar::reverse_search_bytes(haystack, |b| { b != byteset[0] && b != byteset[1] }), 3 => scalar::reverse_search_bytes(haystack, |b| { b != byteset[0] && b != byteset[1] && b != byteset[2] }), _ => { let table = build_table(byteset); scalar::reverse_search_bytes(haystack, |b| table[b as usize] == 0) } } } #[cfg(test)] mod tests { quickcheck::quickcheck! { fn qc_byteset_forward_matches_naive( haystack: Vec, needles: Vec ) -> bool { super::find(&haystack, &needles) == haystack.iter().position(|b| needles.contains(b)) } fn qc_byteset_backwards_matches_naive( haystack: Vec, needles: Vec ) -> bool { super::rfind(&haystack, &needles) == haystack.iter().rposition(|b| needles.contains(b)) } fn qc_byteset_forward_not_matches_naive( haystack: Vec, needles: Vec ) -> bool { super::find_not(&haystack, &needles) == haystack.iter().position(|b| !needles.contains(b)) } fn qc_byteset_backwards_not_matches_naive( haystack: Vec, needles: Vec ) -> bool { super::rfind_not(&haystack, &needles) == haystack.iter().rposition(|b| !needles.contains(b)) } } } bstr-0.2.17/src/byteset/scalar.rs000064400000000000000000000224010072674642500147750ustar 00000000000000// This is adapted from `fallback.rs` from rust-memchr. It's modified to return // the 'inverse' query of memchr, e.g. finding the first byte not in the provided // set. This is simple for the 1-byte case. use core::cmp; use core::usize; #[cfg(target_pointer_width = "32")] const USIZE_BYTES: usize = 4; #[cfg(target_pointer_width = "64")] const USIZE_BYTES: usize = 8; // The number of bytes to loop at in one iteration of memchr/memrchr. const LOOP_SIZE: usize = 2 * USIZE_BYTES; /// Repeat the given byte into a word size number. That is, every 8 bits /// is equivalent to the given byte. For example, if `b` is `\x4E` or /// `01001110` in binary, then the returned value on a 32-bit system would be: /// `01001110_01001110_01001110_01001110`. #[inline(always)] fn repeat_byte(b: u8) -> usize { (b as usize) * (usize::MAX / 255) } pub fn inv_memchr(n1: u8, haystack: &[u8]) -> Option { let vn1 = repeat_byte(n1); let confirm = |byte| byte != n1; let loop_size = cmp::min(LOOP_SIZE, haystack.len()); let align = USIZE_BYTES - 1; let start_ptr = haystack.as_ptr(); let end_ptr = haystack[haystack.len()..].as_ptr(); let mut ptr = start_ptr; unsafe { if haystack.len() < USIZE_BYTES { return forward_search(start_ptr, end_ptr, ptr, confirm); } let chunk = read_unaligned_usize(ptr); if (chunk ^ vn1) != 0 { return forward_search(start_ptr, end_ptr, ptr, confirm); } ptr = ptr.add(USIZE_BYTES - (start_ptr as usize & align)); debug_assert!(ptr > start_ptr); debug_assert!(end_ptr.sub(USIZE_BYTES) >= start_ptr); while loop_size == LOOP_SIZE && ptr <= end_ptr.sub(loop_size) { debug_assert_eq!(0, (ptr as usize) % USIZE_BYTES); let a = *(ptr as *const usize); let b = *(ptr.add(USIZE_BYTES) as *const usize); let eqa = (a ^ vn1) != 0; let eqb = (b ^ vn1) != 0; if eqa || eqb { break; } ptr = ptr.add(LOOP_SIZE); } forward_search(start_ptr, end_ptr, ptr, confirm) } } /// Return the last index not matching the byte `x` in `text`. pub fn inv_memrchr(n1: u8, haystack: &[u8]) -> Option { let vn1 = repeat_byte(n1); let confirm = |byte| byte != n1; let loop_size = cmp::min(LOOP_SIZE, haystack.len()); let align = USIZE_BYTES - 1; let start_ptr = haystack.as_ptr(); let end_ptr = haystack[haystack.len()..].as_ptr(); let mut ptr = end_ptr; unsafe { if haystack.len() < USIZE_BYTES { return reverse_search(start_ptr, end_ptr, ptr, confirm); } let chunk = read_unaligned_usize(ptr.sub(USIZE_BYTES)); if (chunk ^ vn1) != 0 { return reverse_search(start_ptr, end_ptr, ptr, confirm); } ptr = (end_ptr as usize & !align) as *const u8; debug_assert!(start_ptr <= ptr && ptr <= end_ptr); while loop_size == LOOP_SIZE && ptr >= start_ptr.add(loop_size) { debug_assert_eq!(0, (ptr as usize) % USIZE_BYTES); let a = *(ptr.sub(2 * USIZE_BYTES) as *const usize); let b = *(ptr.sub(1 * USIZE_BYTES) as *const usize); let eqa = (a ^ vn1) != 0; let eqb = (b ^ vn1) != 0; if eqa || eqb { break; } ptr = ptr.sub(loop_size); } reverse_search(start_ptr, end_ptr, ptr, confirm) } } #[inline(always)] unsafe fn forward_search bool>( start_ptr: *const u8, end_ptr: *const u8, mut ptr: *const u8, confirm: F, ) -> Option { debug_assert!(start_ptr <= ptr); debug_assert!(ptr <= end_ptr); while ptr < end_ptr { if confirm(*ptr) { return Some(sub(ptr, start_ptr)); } ptr = ptr.offset(1); } None } #[inline(always)] unsafe fn reverse_search bool>( start_ptr: *const u8, end_ptr: *const u8, mut ptr: *const u8, confirm: F, ) -> Option { debug_assert!(start_ptr <= ptr); debug_assert!(ptr <= end_ptr); while ptr > start_ptr { ptr = ptr.offset(-1); if confirm(*ptr) { return Some(sub(ptr, start_ptr)); } } None } unsafe fn read_unaligned_usize(ptr: *const u8) -> usize { (ptr as *const usize).read_unaligned() } /// Subtract `b` from `a` and return the difference. `a` should be greater than /// or equal to `b`. fn sub(a: *const u8, b: *const u8) -> usize { debug_assert!(a >= b); (a as usize) - (b as usize) } /// Safe wrapper around `forward_search` #[inline] pub(crate) fn forward_search_bytes bool>( s: &[u8], confirm: F, ) -> Option { unsafe { let start = s.as_ptr(); let end = start.add(s.len()); forward_search(start, end, start, confirm) } } /// Safe wrapper around `reverse_search` #[inline] pub(crate) fn reverse_search_bytes bool>( s: &[u8], confirm: F, ) -> Option { unsafe { let start = s.as_ptr(); let end = start.add(s.len()); reverse_search(start, end, end, confirm) } } #[cfg(test)] mod tests { use super::{inv_memchr, inv_memrchr}; // search string, search byte, inv_memchr result, inv_memrchr result. // these are expanded into a much larger set of tests in build_tests const TESTS: &[(&[u8], u8, usize, usize)] = &[ (b"z", b'a', 0, 0), (b"zz", b'a', 0, 1), (b"aza", b'a', 1, 1), (b"zaz", b'a', 0, 2), (b"zza", b'a', 0, 1), (b"zaa", b'a', 0, 0), (b"zzz", b'a', 0, 2), ]; type TestCase = (Vec, u8, Option<(usize, usize)>); fn build_tests() -> Vec { let mut result = vec![]; for &(search, byte, fwd_pos, rev_pos) in TESTS { result.push((search.to_vec(), byte, Some((fwd_pos, rev_pos)))); for i in 1..515 { // add a bunch of copies of the search byte to the end. let mut suffixed: Vec = search.into(); suffixed.extend(std::iter::repeat(byte).take(i)); result.push((suffixed, byte, Some((fwd_pos, rev_pos)))); // add a bunch of copies of the search byte to the start. let mut prefixed: Vec = std::iter::repeat(byte).take(i).collect(); prefixed.extend(search); result.push(( prefixed, byte, Some((fwd_pos + i, rev_pos + i)), )); // add a bunch of copies of the search byte to both ends. let mut surrounded: Vec = std::iter::repeat(byte).take(i).collect(); surrounded.extend(search); surrounded.extend(std::iter::repeat(byte).take(i)); result.push(( surrounded, byte, Some((fwd_pos + i, rev_pos + i)), )); } } // build non-matching tests for several sizes for i in 0..515 { result.push(( std::iter::repeat(b'\0').take(i).collect(), b'\0', None, )); } result } #[test] fn test_inv_memchr() { use crate::{ByteSlice, B}; for (search, byte, matching) in build_tests() { assert_eq!( inv_memchr(byte, &search), matching.map(|m| m.0), "inv_memchr when searching for {:?} in {:?}", byte as char, // better printing B(&search).as_bstr(), ); assert_eq!( inv_memrchr(byte, &search), matching.map(|m| m.1), "inv_memrchr when searching for {:?} in {:?}", byte as char, // better printing B(&search).as_bstr(), ); // Test a rather large number off offsets for potential alignment issues for offset in 1..130 { if offset >= search.len() { break; } // If this would cause us to shift the results off the end, skip // it so that we don't have to recompute them. if let Some((f, r)) = matching { if offset > f || offset > r { break; } } let realigned = &search[offset..]; let forward_pos = matching.map(|m| m.0 - offset); let reverse_pos = matching.map(|m| m.1 - offset); assert_eq!( inv_memchr(byte, &realigned), forward_pos, "inv_memchr when searching (realigned by {}) for {:?} in {:?}", offset, byte as char, realigned.as_bstr(), ); assert_eq!( inv_memrchr(byte, &realigned), reverse_pos, "inv_memrchr when searching (realigned by {}) for {:?} in {:?}", offset, byte as char, realigned.as_bstr(), ); } } } } bstr-0.2.17/src/ext_slice.rs000064400000000000000000003447060072674642500140470ustar 00000000000000#[cfg(feature = "std")] use std::borrow::Cow; #[cfg(feature = "std")] use std::ffi::OsStr; #[cfg(feature = "std")] use std::path::Path; use core::{iter, ops, ptr, slice, str}; use memchr::{memchr, memmem, memrchr}; use crate::ascii; use crate::bstr::BStr; use crate::byteset; #[cfg(feature = "std")] use crate::ext_vec::ByteVec; #[cfg(feature = "unicode")] use crate::unicode::{ whitespace_len_fwd, whitespace_len_rev, GraphemeIndices, Graphemes, SentenceIndices, Sentences, WordIndices, Words, WordsWithBreakIndices, WordsWithBreaks, }; use crate::utf8::{self, CharIndices, Chars, Utf8Chunks, Utf8Error}; /// A short-hand constructor for building a `&[u8]`. /// /// This idiosyncratic constructor is useful for concisely building byte string /// slices. Its primary utility is in conveniently writing byte string literals /// in a uniform way. For example, consider this code that does not compile: /// /// ```ignore /// let strs = vec![b"a", b"xy"]; /// ``` /// /// The above code doesn't compile because the type of the byte string literal /// `b"a"` is `&'static [u8; 1]`, and the type of `b"xy"` is /// `&'static [u8; 2]`. Since their types aren't the same, they can't be stored /// in the same `Vec`. (This is dissimilar from normal Unicode string slices, /// where both `"a"` and `"xy"` have the same type of `&'static str`.) /// /// One way of getting the above code to compile is to convert byte strings to /// slices. You might try this: /// /// ```ignore /// let strs = vec![&b"a", &b"xy"]; /// ``` /// /// But this just creates values with type `& &'static [u8; 1]` and /// `& &'static [u8; 2]`. Instead, you need to force the issue like so: /// /// ``` /// let strs = vec![&b"a"[..], &b"xy"[..]]; /// // or /// let strs = vec![b"a".as_ref(), b"xy".as_ref()]; /// ``` /// /// But neither of these are particularly convenient to type, especially when /// it's something as common as a string literal. Thus, this constructor /// permits writing the following instead: /// /// ``` /// use bstr::B; /// /// let strs = vec![B("a"), B(b"xy")]; /// ``` /// /// Notice that this also lets you mix and match both string literals and byte /// string literals. This can be quite convenient! #[allow(non_snake_case)] #[inline] pub fn B<'a, B: ?Sized + AsRef<[u8]>>(bytes: &'a B) -> &'a [u8] { bytes.as_ref() } impl ByteSlice for [u8] { #[inline] fn as_bytes(&self) -> &[u8] { self } #[inline] fn as_bytes_mut(&mut self) -> &mut [u8] { self } } /// Ensure that callers cannot implement `ByteSlice` by making an /// umplementable trait its super trait. pub trait Sealed {} impl Sealed for [u8] {} /// A trait that extends `&[u8]` with string oriented methods. pub trait ByteSlice: Sealed { /// A method for accessing the raw bytes of this type. This is always a /// no-op and callers shouldn't care about it. This only exists for making /// the extension trait work. #[doc(hidden)] fn as_bytes(&self) -> &[u8]; /// A method for accessing the raw bytes of this type, mutably. This is /// always a no-op and callers shouldn't care about it. This only exists /// for making the extension trait work. #[doc(hidden)] fn as_bytes_mut(&mut self) -> &mut [u8]; /// Return this byte slice as a `&BStr`. /// /// Use `&BStr` is useful because of its `fmt::Debug` representation /// and various other trait implementations (such as `PartialEq` and /// `PartialOrd`). In particular, the `Debug` implementation for `BStr` /// shows its bytes as a normal string. For invalid UTF-8, hex escape /// sequences are used. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteSlice; /// /// println!("{:?}", b"foo\xFFbar".as_bstr()); /// ``` #[inline] fn as_bstr(&self) -> &BStr { BStr::new(self.as_bytes()) } /// Return this byte slice as a `&mut BStr`. /// /// Use `&mut BStr` is useful because of its `fmt::Debug` representation /// and various other trait implementations (such as `PartialEq` and /// `PartialOrd`). In particular, the `Debug` implementation for `BStr` /// shows its bytes as a normal string. For invalid UTF-8, hex escape /// sequences are used. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteSlice; /// /// let mut bytes = *b"foo\xFFbar"; /// println!("{:?}", &mut bytes.as_bstr_mut()); /// ``` #[inline] fn as_bstr_mut(&mut self) -> &mut BStr { BStr::new_mut(self.as_bytes_mut()) } /// Create an immutable byte string from an OS string slice. /// /// On Unix, this always succeeds and is zero cost. On non-Unix systems, /// this returns `None` if the given OS string is not valid UTF-8. (For /// example, on Windows, file paths are allowed to be a sequence of /// arbitrary 16-bit integers. Not all such sequences can be transcoded to /// valid UTF-8.) /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::ffi::OsStr; /// /// use bstr::{B, ByteSlice}; /// /// let os_str = OsStr::new("foo"); /// let bs = <[u8]>::from_os_str(os_str).expect("should be valid UTF-8"); /// assert_eq!(bs, B("foo")); /// ``` #[cfg(feature = "std")] #[inline] fn from_os_str(os_str: &OsStr) -> Option<&[u8]> { #[cfg(unix)] #[inline] fn imp(os_str: &OsStr) -> Option<&[u8]> { use std::os::unix::ffi::OsStrExt; Some(os_str.as_bytes()) } #[cfg(not(unix))] #[inline] fn imp(os_str: &OsStr) -> Option<&[u8]> { os_str.to_str().map(|s| s.as_bytes()) } imp(os_str) } /// Create an immutable byte string from a file path. /// /// On Unix, this always succeeds and is zero cost. On non-Unix systems, /// this returns `None` if the given path is not valid UTF-8. (For example, /// on Windows, file paths are allowed to be a sequence of arbitrary 16-bit /// integers. Not all such sequences can be transcoded to valid UTF-8.) /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::path::Path; /// /// use bstr::{B, ByteSlice}; /// /// let path = Path::new("foo"); /// let bs = <[u8]>::from_path(path).expect("should be valid UTF-8"); /// assert_eq!(bs, B("foo")); /// ``` #[cfg(feature = "std")] #[inline] fn from_path(path: &Path) -> Option<&[u8]> { Self::from_os_str(path.as_os_str()) } /// Safely convert this byte string into a `&str` if it's valid UTF-8. /// /// If this byte string is not valid UTF-8, then an error is returned. The /// error returned indicates the first invalid byte found and the length /// of the error. /// /// In cases where a lossy conversion to `&str` is acceptable, then use one /// of the [`to_str_lossy`](trait.ByteSlice.html#method.to_str_lossy) or /// [`to_str_lossy_into`](trait.ByteSlice.html#method.to_str_lossy_into) /// methods. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::{B, ByteSlice, ByteVec}; /// /// # fn example() -> Result<(), bstr::Utf8Error> { /// let s = B("☃βツ").to_str()?; /// assert_eq!("☃βツ", s); /// /// let mut bstring = >::from("☃βツ"); /// bstring.push(b'\xFF'); /// let err = bstring.to_str().unwrap_err(); /// assert_eq!(8, err.valid_up_to()); /// # Ok(()) }; example().unwrap() /// ``` #[inline] fn to_str(&self) -> Result<&str, Utf8Error> { utf8::validate(self.as_bytes()).map(|_| { // SAFETY: This is safe because of the guarantees provided by // utf8::validate. unsafe { str::from_utf8_unchecked(self.as_bytes()) } }) } /// Unsafely convert this byte string into a `&str`, without checking for /// valid UTF-8. /// /// # Safety /// /// Callers *must* ensure that this byte string is valid UTF-8 before /// calling this method. Converting a byte string into a `&str` that is /// not valid UTF-8 is considered undefined behavior. /// /// This routine is useful in performance sensitive contexts where the /// UTF-8 validity of the byte string is already known and it is /// undesirable to pay the cost of an additional UTF-8 validation check /// that [`to_str`](trait.ByteSlice.html#method.to_str) performs. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// // SAFETY: This is safe because string literals are guaranteed to be /// // valid UTF-8 by the Rust compiler. /// let s = unsafe { B("☃βツ").to_str_unchecked() }; /// assert_eq!("☃βツ", s); /// ``` #[inline] unsafe fn to_str_unchecked(&self) -> &str { str::from_utf8_unchecked(self.as_bytes()) } /// Convert this byte string to a valid UTF-8 string by replacing invalid /// UTF-8 bytes with the Unicode replacement codepoint (`U+FFFD`). /// /// If the byte string is already valid UTF-8, then no copying or /// allocation is performed and a borrrowed string slice is returned. If /// the byte string is not valid UTF-8, then an owned string buffer is /// returned with invalid bytes replaced by the replacement codepoint. /// /// This method uses the "substitution of maximal subparts" (Unicode /// Standard, Chapter 3, Section 9) strategy for inserting the replacement /// codepoint. Specifically, a replacement codepoint is inserted whenever a /// byte is found that cannot possibly lead to a valid code unit sequence. /// If there were previous bytes that represented a prefix of a well-formed /// code unit sequence, then all of those bytes are substituted with a /// single replacement codepoint. The "substitution of maximal subparts" /// strategy is the same strategy used by /// [W3C's Encoding standard](https://www.w3.org/TR/encoding/). /// For a more precise description of the maximal subpart strategy, see /// the Unicode Standard, Chapter 3, Section 9. See also /// [Public Review Issue #121](http://www.unicode.org/review/pr-121.html). /// /// N.B. Rust's standard library also appears to use the same strategy, /// but it does not appear to be an API guarantee. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::borrow::Cow; /// /// use bstr::ByteSlice; /// /// let mut bstring = >::from("☃βツ"); /// assert_eq!(Cow::Borrowed("☃βツ"), bstring.to_str_lossy()); /// /// // Add a byte that makes the sequence invalid. /// bstring.push(b'\xFF'); /// assert_eq!(Cow::Borrowed("☃βツ\u{FFFD}"), bstring.to_str_lossy()); /// ``` /// /// This demonstrates the "maximal subpart" substitution logic. /// /// ``` /// use bstr::{B, ByteSlice}; /// /// // \x61 is the ASCII codepoint for 'a'. /// // \xF1\x80\x80 is a valid 3-byte code unit prefix. /// // \xE1\x80 is a valid 2-byte code unit prefix. /// // \xC2 is a valid 1-byte code unit prefix. /// // \x62 is the ASCII codepoint for 'b'. /// // /// // In sum, each of the prefixes is replaced by a single replacement /// // codepoint since none of the prefixes are properly completed. This /// // is in contrast to other strategies that might insert a replacement /// // codepoint for every single byte. /// let bs = B(b"\x61\xF1\x80\x80\xE1\x80\xC2\x62"); /// assert_eq!("a\u{FFFD}\u{FFFD}\u{FFFD}b", bs.to_str_lossy()); /// ``` #[cfg(feature = "std")] #[inline] fn to_str_lossy(&self) -> Cow<'_, str> { match utf8::validate(self.as_bytes()) { Ok(()) => { // SAFETY: This is safe because of the guarantees provided by // utf8::validate. unsafe { Cow::Borrowed(str::from_utf8_unchecked(self.as_bytes())) } } Err(err) => { let mut lossy = String::with_capacity(self.as_bytes().len()); let (valid, after) = self.as_bytes().split_at(err.valid_up_to()); // SAFETY: This is safe because utf8::validate guarantees // that all of `valid` is valid UTF-8. lossy.push_str(unsafe { str::from_utf8_unchecked(valid) }); lossy.push_str("\u{FFFD}"); if let Some(len) = err.error_len() { after[len..].to_str_lossy_into(&mut lossy); } Cow::Owned(lossy) } } } /// Copy the contents of this byte string into the given owned string /// buffer, while replacing invalid UTF-8 code unit sequences with the /// Unicode replacement codepoint (`U+FFFD`). /// /// This method uses the same "substitution of maximal subparts" strategy /// for inserting the replacement codepoint as the /// [`to_str_lossy`](trait.ByteSlice.html#method.to_str_lossy) method. /// /// This routine is useful for amortizing allocation. However, unlike /// `to_str_lossy`, this routine will _always_ copy the contents of this /// byte string into the destination buffer, even if this byte string is /// valid UTF-8. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::borrow::Cow; /// /// use bstr::ByteSlice; /// /// let mut bstring = >::from("☃βツ"); /// // Add a byte that makes the sequence invalid. /// bstring.push(b'\xFF'); /// /// let mut dest = String::new(); /// bstring.to_str_lossy_into(&mut dest); /// assert_eq!("☃βツ\u{FFFD}", dest); /// ``` #[cfg(feature = "std")] #[inline] fn to_str_lossy_into(&self, dest: &mut String) { let mut bytes = self.as_bytes(); dest.reserve(bytes.len()); loop { match utf8::validate(bytes) { Ok(()) => { // SAFETY: This is safe because utf8::validate guarantees // that all of `bytes` is valid UTF-8. dest.push_str(unsafe { str::from_utf8_unchecked(bytes) }); break; } Err(err) => { let (valid, after) = bytes.split_at(err.valid_up_to()); // SAFETY: This is safe because utf8::validate guarantees // that all of `valid` is valid UTF-8. dest.push_str(unsafe { str::from_utf8_unchecked(valid) }); dest.push_str("\u{FFFD}"); match err.error_len() { None => break, Some(len) => bytes = &after[len..], } } } } } /// Create an OS string slice from this byte string. /// /// On Unix, this always succeeds and is zero cost. On non-Unix systems, /// this returns a UTF-8 decoding error if this byte string is not valid /// UTF-8. (For example, on Windows, file paths are allowed to be a /// sequence of arbitrary 16-bit integers. There is no obvious mapping from /// an arbitrary sequence of 8-bit integers to an arbitrary sequence of /// 16-bit integers.) /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let os_str = b"foo".to_os_str().expect("should be valid UTF-8"); /// assert_eq!(os_str, "foo"); /// ``` #[cfg(feature = "std")] #[inline] fn to_os_str(&self) -> Result<&OsStr, Utf8Error> { #[cfg(unix)] #[inline] fn imp(bytes: &[u8]) -> Result<&OsStr, Utf8Error> { use std::os::unix::ffi::OsStrExt; Ok(OsStr::from_bytes(bytes)) } #[cfg(not(unix))] #[inline] fn imp(bytes: &[u8]) -> Result<&OsStr, Utf8Error> { bytes.to_str().map(OsStr::new) } imp(self.as_bytes()) } /// Lossily create an OS string slice from this byte string. /// /// On Unix, this always succeeds and is zero cost. On non-Unix systems, /// this will perform a UTF-8 check and lossily convert this byte string /// into valid UTF-8 using the Unicode replacement codepoint. /// /// Note that this can prevent the correct roundtripping of file paths on /// non-Unix systems such as Windows, where file paths are an arbitrary /// sequence of 16-bit integers. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteSlice; /// /// let os_str = b"foo\xFFbar".to_os_str_lossy(); /// assert_eq!(os_str.to_string_lossy(), "foo\u{FFFD}bar"); /// ``` #[cfg(feature = "std")] #[inline] fn to_os_str_lossy(&self) -> Cow<'_, OsStr> { #[cfg(unix)] #[inline] fn imp(bytes: &[u8]) -> Cow<'_, OsStr> { use std::os::unix::ffi::OsStrExt; Cow::Borrowed(OsStr::from_bytes(bytes)) } #[cfg(not(unix))] #[inline] fn imp(bytes: &[u8]) -> Cow { use std::ffi::OsString; match bytes.to_str_lossy() { Cow::Borrowed(x) => Cow::Borrowed(OsStr::new(x)), Cow::Owned(x) => Cow::Owned(OsString::from(x)), } } imp(self.as_bytes()) } /// Create a path slice from this byte string. /// /// On Unix, this always succeeds and is zero cost. On non-Unix systems, /// this returns a UTF-8 decoding error if this byte string is not valid /// UTF-8. (For example, on Windows, file paths are allowed to be a /// sequence of arbitrary 16-bit integers. There is no obvious mapping from /// an arbitrary sequence of 8-bit integers to an arbitrary sequence of /// 16-bit integers.) /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteSlice; /// /// let path = b"foo".to_path().expect("should be valid UTF-8"); /// assert_eq!(path.as_os_str(), "foo"); /// ``` #[cfg(feature = "std")] #[inline] fn to_path(&self) -> Result<&Path, Utf8Error> { self.to_os_str().map(Path::new) } /// Lossily create a path slice from this byte string. /// /// On Unix, this always succeeds and is zero cost. On non-Unix systems, /// this will perform a UTF-8 check and lossily convert this byte string /// into valid UTF-8 using the Unicode replacement codepoint. /// /// Note that this can prevent the correct roundtripping of file paths on /// non-Unix systems such as Windows, where file paths are an arbitrary /// sequence of 16-bit integers. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteSlice; /// /// let bs = b"foo\xFFbar"; /// let path = bs.to_path_lossy(); /// assert_eq!(path.to_string_lossy(), "foo\u{FFFD}bar"); /// ``` #[cfg(feature = "std")] #[inline] fn to_path_lossy(&self) -> Cow<'_, Path> { use std::path::PathBuf; match self.to_os_str_lossy() { Cow::Borrowed(x) => Cow::Borrowed(Path::new(x)), Cow::Owned(x) => Cow::Owned(PathBuf::from(x)), } } /// Create a new byte string by repeating this byte string `n` times. /// /// # Panics /// /// This function panics if the capacity of the new byte string would /// overflow. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// assert_eq!(b"foo".repeatn(4), B("foofoofoofoo")); /// assert_eq!(b"foo".repeatn(0), B("")); /// ``` #[cfg(feature = "std")] #[inline] fn repeatn(&self, n: usize) -> Vec { let bs = self.as_bytes(); let mut dst = vec![0; bs.len() * n]; for i in 0..n { dst[i * bs.len()..(i + 1) * bs.len()].copy_from_slice(bs); } dst } /// Returns true if and only if this byte string contains the given needle. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteSlice; /// /// assert!(b"foo bar".contains_str("foo")); /// assert!(b"foo bar".contains_str("bar")); /// assert!(!b"foo".contains_str("foobar")); /// ``` #[inline] fn contains_str>(&self, needle: B) -> bool { self.find(needle).is_some() } /// Returns true if and only if this byte string has the given prefix. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteSlice; /// /// assert!(b"foo bar".starts_with_str("foo")); /// assert!(!b"foo bar".starts_with_str("bar")); /// assert!(!b"foo".starts_with_str("foobar")); /// ``` #[inline] fn starts_with_str>(&self, prefix: B) -> bool { self.as_bytes().starts_with(prefix.as_ref()) } /// Returns true if and only if this byte string has the given suffix. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteSlice; /// /// assert!(b"foo bar".ends_with_str("bar")); /// assert!(!b"foo bar".ends_with_str("foo")); /// assert!(!b"bar".ends_with_str("foobar")); /// ``` #[inline] fn ends_with_str>(&self, suffix: B) -> bool { self.as_bytes().ends_with(suffix.as_ref()) } /// Returns the index of the first occurrence of the given needle. /// /// The needle may be any type that can be cheaply converted into a /// `&[u8]`. This includes, but is not limited to, `&str` and `&[u8]`. /// /// Note that if you're are searching for the same needle in many /// different small haystacks, it may be faster to initialize a /// [`Finder`](struct.Finder.html) once, and reuse it for each search. /// /// # Complexity /// /// This routine is guaranteed to have worst case linear time complexity /// with respect to both the needle and the haystack. That is, this runs /// in `O(needle.len() + haystack.len())` time. /// /// This routine is also guaranteed to have worst case constant space /// complexity. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteSlice; /// /// let s = b"foo bar baz"; /// assert_eq!(Some(0), s.find("foo")); /// assert_eq!(Some(4), s.find("bar")); /// assert_eq!(None, s.find("quux")); /// ``` #[inline] fn find>(&self, needle: B) -> Option { Finder::new(needle.as_ref()).find(self.as_bytes()) } /// Returns the index of the last occurrence of the given needle. /// /// The needle may be any type that can be cheaply converted into a /// `&[u8]`. This includes, but is not limited to, `&str` and `&[u8]`. /// /// Note that if you're are searching for the same needle in many /// different small haystacks, it may be faster to initialize a /// [`FinderReverse`](struct.FinderReverse.html) once, and reuse it for /// each search. /// /// # Complexity /// /// This routine is guaranteed to have worst case linear time complexity /// with respect to both the needle and the haystack. That is, this runs /// in `O(needle.len() + haystack.len())` time. /// /// This routine is also guaranteed to have worst case constant space /// complexity. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteSlice; /// /// let s = b"foo bar baz"; /// assert_eq!(Some(0), s.rfind("foo")); /// assert_eq!(Some(4), s.rfind("bar")); /// assert_eq!(Some(8), s.rfind("ba")); /// assert_eq!(None, s.rfind("quux")); /// ``` #[inline] fn rfind>(&self, needle: B) -> Option { FinderReverse::new(needle.as_ref()).rfind(self.as_bytes()) } /// Returns an iterator of the non-overlapping occurrences of the given /// needle. The iterator yields byte offset positions indicating the start /// of each match. /// /// # Complexity /// /// This routine is guaranteed to have worst case linear time complexity /// with respect to both the needle and the haystack. That is, this runs /// in `O(needle.len() + haystack.len())` time. /// /// This routine is also guaranteed to have worst case constant space /// complexity. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteSlice; /// /// let s = b"foo bar foo foo quux foo"; /// let matches: Vec = s.find_iter("foo").collect(); /// assert_eq!(matches, vec![0, 8, 12, 21]); /// ``` /// /// An empty string matches at every position, including the position /// immediately following the last byte: /// /// ``` /// use bstr::ByteSlice; /// /// let matches: Vec = b"foo".find_iter("").collect(); /// assert_eq!(matches, vec![0, 1, 2, 3]); /// /// let matches: Vec = b"".find_iter("").collect(); /// assert_eq!(matches, vec![0]); /// ``` #[inline] fn find_iter<'a, B: ?Sized + AsRef<[u8]>>( &'a self, needle: &'a B, ) -> Find<'a> { Find::new(self.as_bytes(), needle.as_ref()) } /// Returns an iterator of the non-overlapping occurrences of the given /// needle in reverse. The iterator yields byte offset positions indicating /// the start of each match. /// /// # Complexity /// /// This routine is guaranteed to have worst case linear time complexity /// with respect to both the needle and the haystack. That is, this runs /// in `O(needle.len() + haystack.len())` time. /// /// This routine is also guaranteed to have worst case constant space /// complexity. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteSlice; /// /// let s = b"foo bar foo foo quux foo"; /// let matches: Vec = s.rfind_iter("foo").collect(); /// assert_eq!(matches, vec![21, 12, 8, 0]); /// ``` /// /// An empty string matches at every position, including the position /// immediately following the last byte: /// /// ``` /// use bstr::ByteSlice; /// /// let matches: Vec = b"foo".rfind_iter("").collect(); /// assert_eq!(matches, vec![3, 2, 1, 0]); /// /// let matches: Vec = b"".rfind_iter("").collect(); /// assert_eq!(matches, vec![0]); /// ``` #[inline] fn rfind_iter<'a, B: ?Sized + AsRef<[u8]>>( &'a self, needle: &'a B, ) -> FindReverse<'a> { FindReverse::new(self.as_bytes(), needle.as_ref()) } /// Returns the index of the first occurrence of the given byte. If the /// byte does not occur in this byte string, then `None` is returned. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteSlice; /// /// assert_eq!(Some(10), b"foo bar baz".find_byte(b'z')); /// assert_eq!(None, b"foo bar baz".find_byte(b'y')); /// ``` #[inline] fn find_byte(&self, byte: u8) -> Option { memchr(byte, self.as_bytes()) } /// Returns the index of the last occurrence of the given byte. If the /// byte does not occur in this byte string, then `None` is returned. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteSlice; /// /// assert_eq!(Some(10), b"foo bar baz".rfind_byte(b'z')); /// assert_eq!(None, b"foo bar baz".rfind_byte(b'y')); /// ``` #[inline] fn rfind_byte(&self, byte: u8) -> Option { memrchr(byte, self.as_bytes()) } /// Returns the index of the first occurrence of the given codepoint. /// If the codepoint does not occur in this byte string, then `None` is /// returned. /// /// Note that if one searches for the replacement codepoint, `\u{FFFD}`, /// then only explicit occurrences of that encoding will be found. Invalid /// UTF-8 sequences will not be matched. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// assert_eq!(Some(10), b"foo bar baz".find_char('z')); /// assert_eq!(Some(4), B("αβγγδ").find_char('γ')); /// assert_eq!(None, b"foo bar baz".find_char('y')); /// ``` #[inline] fn find_char(&self, ch: char) -> Option { self.find(ch.encode_utf8(&mut [0; 4])) } /// Returns the index of the last occurrence of the given codepoint. /// If the codepoint does not occur in this byte string, then `None` is /// returned. /// /// Note that if one searches for the replacement codepoint, `\u{FFFD}`, /// then only explicit occurrences of that encoding will be found. Invalid /// UTF-8 sequences will not be matched. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// assert_eq!(Some(10), b"foo bar baz".rfind_char('z')); /// assert_eq!(Some(6), B("αβγγδ").rfind_char('γ')); /// assert_eq!(None, b"foo bar baz".rfind_char('y')); /// ``` #[inline] fn rfind_char(&self, ch: char) -> Option { self.rfind(ch.encode_utf8(&mut [0; 4])) } /// Returns the index of the first occurrence of any of the bytes in the /// provided set. /// /// The `byteset` may be any type that can be cheaply converted into a /// `&[u8]`. This includes, but is not limited to, `&str` and `&[u8]`, but /// note that passing a `&str` which contains multibyte characters may not /// behave as you expect: each byte in the `&str` is treated as an /// individual member of the byte set. /// /// Note that order is irrelevant for the `byteset` parameter, and /// duplicate bytes present in its body are ignored. /// /// # Complexity /// /// This routine is guaranteed to have worst case linear time complexity /// with respect to both the set of bytes and the haystack. That is, this /// runs in `O(byteset.len() + haystack.len())` time. /// /// This routine is also guaranteed to have worst case constant space /// complexity. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteSlice; /// /// assert_eq!(b"foo bar baz".find_byteset(b"zr"), Some(6)); /// assert_eq!(b"foo baz bar".find_byteset(b"bzr"), Some(4)); /// assert_eq!(None, b"foo baz bar".find_byteset(b"\t\n")); /// ``` #[inline] fn find_byteset>(&self, byteset: B) -> Option { byteset::find(self.as_bytes(), byteset.as_ref()) } /// Returns the index of the first occurrence of a byte that is not a member /// of the provided set. /// /// The `byteset` may be any type that can be cheaply converted into a /// `&[u8]`. This includes, but is not limited to, `&str` and `&[u8]`, but /// note that passing a `&str` which contains multibyte characters may not /// behave as you expect: each byte in the `&str` is treated as an /// individual member of the byte set. /// /// Note that order is irrelevant for the `byteset` parameter, and /// duplicate bytes present in its body are ignored. /// /// # Complexity /// /// This routine is guaranteed to have worst case linear time complexity /// with respect to both the set of bytes and the haystack. That is, this /// runs in `O(byteset.len() + haystack.len())` time. /// /// This routine is also guaranteed to have worst case constant space /// complexity. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteSlice; /// /// assert_eq!(b"foo bar baz".find_not_byteset(b"fo "), Some(4)); /// assert_eq!(b"\t\tbaz bar".find_not_byteset(b" \t\r\n"), Some(2)); /// assert_eq!(b"foo\nbaz\tbar".find_not_byteset(b"\t\n"), Some(0)); /// ``` #[inline] fn find_not_byteset>(&self, byteset: B) -> Option { byteset::find_not(self.as_bytes(), byteset.as_ref()) } /// Returns the index of the last occurrence of any of the bytes in the /// provided set. /// /// The `byteset` may be any type that can be cheaply converted into a /// `&[u8]`. This includes, but is not limited to, `&str` and `&[u8]`, but /// note that passing a `&str` which contains multibyte characters may not /// behave as you expect: each byte in the `&str` is treated as an /// individual member of the byte set. /// /// Note that order is irrelevant for the `byteset` parameter, and duplicate /// bytes present in its body are ignored. /// /// # Complexity /// /// This routine is guaranteed to have worst case linear time complexity /// with respect to both the set of bytes and the haystack. That is, this /// runs in `O(byteset.len() + haystack.len())` time. /// /// This routine is also guaranteed to have worst case constant space /// complexity. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteSlice; /// /// assert_eq!(b"foo bar baz".rfind_byteset(b"agb"), Some(9)); /// assert_eq!(b"foo baz bar".rfind_byteset(b"rabz "), Some(10)); /// assert_eq!(b"foo baz bar".rfind_byteset(b"\n123"), None); /// ``` #[inline] fn rfind_byteset>(&self, byteset: B) -> Option { byteset::rfind(self.as_bytes(), byteset.as_ref()) } /// Returns the index of the last occurrence of a byte that is not a member /// of the provided set. /// /// The `byteset` may be any type that can be cheaply converted into a /// `&[u8]`. This includes, but is not limited to, `&str` and `&[u8]`, but /// note that passing a `&str` which contains multibyte characters may not /// behave as you expect: each byte in the `&str` is treated as an /// individual member of the byte set. /// /// Note that order is irrelevant for the `byteset` parameter, and /// duplicate bytes present in its body are ignored. /// /// # Complexity /// /// This routine is guaranteed to have worst case linear time complexity /// with respect to both the set of bytes and the haystack. That is, this /// runs in `O(byteset.len() + haystack.len())` time. /// /// This routine is also guaranteed to have worst case constant space /// complexity. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteSlice; /// /// assert_eq!(b"foo bar baz,\t".rfind_not_byteset(b",\t"), Some(10)); /// assert_eq!(b"foo baz bar".rfind_not_byteset(b"rabz "), Some(2)); /// assert_eq!(None, b"foo baz bar".rfind_not_byteset(b"barfoz ")); /// ``` #[inline] fn rfind_not_byteset>(&self, byteset: B) -> Option { byteset::rfind_not(self.as_bytes(), byteset.as_ref()) } /// Returns an iterator over the fields in a byte string, separated by /// contiguous whitespace. /// /// # Example /// /// Basic usage: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let s = B(" foo\tbar\t\u{2003}\nquux \n"); /// let fields: Vec<&[u8]> = s.fields().collect(); /// assert_eq!(fields, vec![B("foo"), B("bar"), B("quux")]); /// ``` /// /// A byte string consisting of just whitespace yields no elements: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// assert_eq!(0, B(" \n\t\u{2003}\n \t").fields().count()); /// ``` #[inline] fn fields(&self) -> Fields<'_> { Fields::new(self.as_bytes()) } /// Returns an iterator over the fields in a byte string, separated by /// contiguous codepoints satisfying the given predicate. /// /// If this byte string is not valid UTF-8, then the given closure will /// be called with a Unicode replacement codepoint when invalid UTF-8 /// bytes are seen. /// /// # Example /// /// Basic usage: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let s = b"123foo999999bar1quux123456"; /// let fields: Vec<&[u8]> = s.fields_with(|c| c.is_numeric()).collect(); /// assert_eq!(fields, vec![B("foo"), B("bar"), B("quux")]); /// ``` /// /// A byte string consisting of all codepoints satisfying the predicate /// yields no elements: /// /// ``` /// use bstr::ByteSlice; /// /// assert_eq!(0, b"1911354563".fields_with(|c| c.is_numeric()).count()); /// ``` #[inline] fn fields_with bool>(&self, f: F) -> FieldsWith<'_, F> { FieldsWith::new(self.as_bytes(), f) } /// Returns an iterator over substrings of this byte string, separated /// by the given byte string. Each element yielded is guaranteed not to /// include the splitter substring. /// /// The splitter may be any type that can be cheaply converted into a /// `&[u8]`. This includes, but is not limited to, `&str` and `&[u8]`. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let x: Vec<&[u8]> = b"Mary had a little lamb".split_str(" ").collect(); /// assert_eq!(x, vec![ /// B("Mary"), B("had"), B("a"), B("little"), B("lamb"), /// ]); /// /// let x: Vec<&[u8]> = b"".split_str("X").collect(); /// assert_eq!(x, vec![b""]); /// /// let x: Vec<&[u8]> = b"lionXXtigerXleopard".split_str("X").collect(); /// assert_eq!(x, vec![B("lion"), B(""), B("tiger"), B("leopard")]); /// /// let x: Vec<&[u8]> = b"lion::tiger::leopard".split_str("::").collect(); /// assert_eq!(x, vec![B("lion"), B("tiger"), B("leopard")]); /// ``` /// /// If a string contains multiple contiguous separators, you will end up /// with empty strings yielded by the iterator: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let x: Vec<&[u8]> = b"||||a||b|c".split_str("|").collect(); /// assert_eq!(x, vec![ /// B(""), B(""), B(""), B(""), B("a"), B(""), B("b"), B("c"), /// ]); /// /// let x: Vec<&[u8]> = b"(///)".split_str("/").collect(); /// assert_eq!(x, vec![B("("), B(""), B(""), B(")")]); /// ``` /// /// Separators at the start or end of a string are neighbored by empty /// strings. /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let x: Vec<&[u8]> = b"010".split_str("0").collect(); /// assert_eq!(x, vec![B(""), B("1"), B("")]); /// ``` /// /// When the empty string is used as a separator, it splits every **byte** /// in the byte string, along with the beginning and end of the byte /// string. /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let x: Vec<&[u8]> = b"rust".split_str("").collect(); /// assert_eq!(x, vec![ /// B(""), B("r"), B("u"), B("s"), B("t"), B(""), /// ]); /// /// // Splitting by an empty string is not UTF-8 aware. Elements yielded /// // may not be valid UTF-8! /// let x: Vec<&[u8]> = B("☃").split_str("").collect(); /// assert_eq!(x, vec![ /// B(""), B(b"\xE2"), B(b"\x98"), B(b"\x83"), B(""), /// ]); /// ``` /// /// Contiguous separators, especially whitespace, can lead to possibly /// surprising behavior. For example, this code is correct: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let x: Vec<&[u8]> = b" a b c".split_str(" ").collect(); /// assert_eq!(x, vec![ /// B(""), B(""), B(""), B(""), B("a"), B(""), B("b"), B("c"), /// ]); /// ``` /// /// It does *not* give you `["a", "b", "c"]`. For that behavior, use /// [`fields`](#method.fields) instead. #[inline] fn split_str<'a, B: ?Sized + AsRef<[u8]>>( &'a self, splitter: &'a B, ) -> Split<'a> { Split::new(self.as_bytes(), splitter.as_ref()) } /// Returns an iterator over substrings of this byte string, separated by /// the given byte string, in reverse. Each element yielded is guaranteed /// not to include the splitter substring. /// /// The splitter may be any type that can be cheaply converted into a /// `&[u8]`. This includes, but is not limited to, `&str` and `&[u8]`. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let x: Vec<&[u8]> = /// b"Mary had a little lamb".rsplit_str(" ").collect(); /// assert_eq!(x, vec![ /// B("lamb"), B("little"), B("a"), B("had"), B("Mary"), /// ]); /// /// let x: Vec<&[u8]> = b"".rsplit_str("X").collect(); /// assert_eq!(x, vec![b""]); /// /// let x: Vec<&[u8]> = b"lionXXtigerXleopard".rsplit_str("X").collect(); /// assert_eq!(x, vec![B("leopard"), B("tiger"), B(""), B("lion")]); /// /// let x: Vec<&[u8]> = b"lion::tiger::leopard".rsplit_str("::").collect(); /// assert_eq!(x, vec![B("leopard"), B("tiger"), B("lion")]); /// ``` /// /// If a string contains multiple contiguous separators, you will end up /// with empty strings yielded by the iterator: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let x: Vec<&[u8]> = b"||||a||b|c".rsplit_str("|").collect(); /// assert_eq!(x, vec![ /// B("c"), B("b"), B(""), B("a"), B(""), B(""), B(""), B(""), /// ]); /// /// let x: Vec<&[u8]> = b"(///)".rsplit_str("/").collect(); /// assert_eq!(x, vec![B(")"), B(""), B(""), B("(")]); /// ``` /// /// Separators at the start or end of a string are neighbored by empty /// strings. /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let x: Vec<&[u8]> = b"010".rsplit_str("0").collect(); /// assert_eq!(x, vec![B(""), B("1"), B("")]); /// ``` /// /// When the empty string is used as a separator, it splits every **byte** /// in the byte string, along with the beginning and end of the byte /// string. /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let x: Vec<&[u8]> = b"rust".rsplit_str("").collect(); /// assert_eq!(x, vec![ /// B(""), B("t"), B("s"), B("u"), B("r"), B(""), /// ]); /// /// // Splitting by an empty string is not UTF-8 aware. Elements yielded /// // may not be valid UTF-8! /// let x: Vec<&[u8]> = B("☃").rsplit_str("").collect(); /// assert_eq!(x, vec![B(""), B(b"\x83"), B(b"\x98"), B(b"\xE2"), B("")]); /// ``` /// /// Contiguous separators, especially whitespace, can lead to possibly /// surprising behavior. For example, this code is correct: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let x: Vec<&[u8]> = b" a b c".rsplit_str(" ").collect(); /// assert_eq!(x, vec![ /// B("c"), B("b"), B(""), B("a"), B(""), B(""), B(""), B(""), /// ]); /// ``` /// /// It does *not* give you `["a", "b", "c"]`. #[inline] fn rsplit_str<'a, B: ?Sized + AsRef<[u8]>>( &'a self, splitter: &'a B, ) -> SplitReverse<'a> { SplitReverse::new(self.as_bytes(), splitter.as_ref()) } /// Returns an iterator of at most `limit` substrings of this byte string, /// separated by the given byte string. If `limit` substrings are yielded, /// then the last substring will contain the remainder of this byte string. /// /// The needle may be any type that can be cheaply converted into a /// `&[u8]`. This includes, but is not limited to, `&str` and `&[u8]`. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let x: Vec<_> = b"Mary had a little lamb".splitn_str(3, " ").collect(); /// assert_eq!(x, vec![B("Mary"), B("had"), B("a little lamb")]); /// /// let x: Vec<_> = b"".splitn_str(3, "X").collect(); /// assert_eq!(x, vec![b""]); /// /// let x: Vec<_> = b"lionXXtigerXleopard".splitn_str(3, "X").collect(); /// assert_eq!(x, vec![B("lion"), B(""), B("tigerXleopard")]); /// /// let x: Vec<_> = b"lion::tiger::leopard".splitn_str(2, "::").collect(); /// assert_eq!(x, vec![B("lion"), B("tiger::leopard")]); /// /// let x: Vec<_> = b"abcXdef".splitn_str(1, "X").collect(); /// assert_eq!(x, vec![B("abcXdef")]); /// /// let x: Vec<_> = b"abcdef".splitn_str(2, "X").collect(); /// assert_eq!(x, vec![B("abcdef")]); /// /// let x: Vec<_> = b"abcXdef".splitn_str(0, "X").collect(); /// assert!(x.is_empty()); /// ``` #[inline] fn splitn_str<'a, B: ?Sized + AsRef<[u8]>>( &'a self, limit: usize, splitter: &'a B, ) -> SplitN<'a> { SplitN::new(self.as_bytes(), splitter.as_ref(), limit) } /// Returns an iterator of at most `limit` substrings of this byte string, /// separated by the given byte string, in reverse. If `limit` substrings /// are yielded, then the last substring will contain the remainder of this /// byte string. /// /// The needle may be any type that can be cheaply converted into a /// `&[u8]`. This includes, but is not limited to, `&str` and `&[u8]`. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let x: Vec<_> = /// b"Mary had a little lamb".rsplitn_str(3, " ").collect(); /// assert_eq!(x, vec![B("lamb"), B("little"), B("Mary had a")]); /// /// let x: Vec<_> = b"".rsplitn_str(3, "X").collect(); /// assert_eq!(x, vec![b""]); /// /// let x: Vec<_> = b"lionXXtigerXleopard".rsplitn_str(3, "X").collect(); /// assert_eq!(x, vec![B("leopard"), B("tiger"), B("lionX")]); /// /// let x: Vec<_> = b"lion::tiger::leopard".rsplitn_str(2, "::").collect(); /// assert_eq!(x, vec![B("leopard"), B("lion::tiger")]); /// /// let x: Vec<_> = b"abcXdef".rsplitn_str(1, "X").collect(); /// assert_eq!(x, vec![B("abcXdef")]); /// /// let x: Vec<_> = b"abcdef".rsplitn_str(2, "X").collect(); /// assert_eq!(x, vec![B("abcdef")]); /// /// let x: Vec<_> = b"abcXdef".rsplitn_str(0, "X").collect(); /// assert!(x.is_empty()); /// ``` #[inline] fn rsplitn_str<'a, B: ?Sized + AsRef<[u8]>>( &'a self, limit: usize, splitter: &'a B, ) -> SplitNReverse<'a> { SplitNReverse::new(self.as_bytes(), splitter.as_ref(), limit) } /// Replace all matches of the given needle with the given replacement, and /// the result as a new `Vec`. /// /// This routine is useful as a convenience. If you need to reuse an /// allocation, use [`replace_into`](#method.replace_into) instead. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteSlice; /// /// let s = b"this is old".replace("old", "new"); /// assert_eq!(s, "this is new".as_bytes()); /// ``` /// /// When the pattern doesn't match: /// /// ``` /// use bstr::ByteSlice; /// /// let s = b"this is old".replace("nada nada", "limonada"); /// assert_eq!(s, "this is old".as_bytes()); /// ``` /// /// When the needle is an empty string: /// /// ``` /// use bstr::ByteSlice; /// /// let s = b"foo".replace("", "Z"); /// assert_eq!(s, "ZfZoZoZ".as_bytes()); /// ``` #[cfg(feature = "std")] #[inline] fn replace, R: AsRef<[u8]>>( &self, needle: N, replacement: R, ) -> Vec { let mut dest = Vec::with_capacity(self.as_bytes().len()); self.replace_into(needle, replacement, &mut dest); dest } /// Replace up to `limit` matches of the given needle with the given /// replacement, and the result as a new `Vec`. /// /// This routine is useful as a convenience. If you need to reuse an /// allocation, use [`replacen_into`](#method.replacen_into) instead. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteSlice; /// /// let s = b"foofoo".replacen("o", "z", 2); /// assert_eq!(s, "fzzfoo".as_bytes()); /// ``` /// /// When the pattern doesn't match: /// /// ``` /// use bstr::ByteSlice; /// /// let s = b"foofoo".replacen("a", "z", 2); /// assert_eq!(s, "foofoo".as_bytes()); /// ``` /// /// When the needle is an empty string: /// /// ``` /// use bstr::ByteSlice; /// /// let s = b"foo".replacen("", "Z", 2); /// assert_eq!(s, "ZfZoo".as_bytes()); /// ``` #[cfg(feature = "std")] #[inline] fn replacen, R: AsRef<[u8]>>( &self, needle: N, replacement: R, limit: usize, ) -> Vec { let mut dest = Vec::with_capacity(self.as_bytes().len()); self.replacen_into(needle, replacement, limit, &mut dest); dest } /// Replace all matches of the given needle with the given replacement, /// and write the result into the provided `Vec`. /// /// This does **not** clear `dest` before writing to it. /// /// This routine is useful for reusing allocation. For a more convenient /// API, use [`replace`](#method.replace) instead. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteSlice; /// /// let s = b"this is old"; /// /// let mut dest = vec![]; /// s.replace_into("old", "new", &mut dest); /// assert_eq!(dest, "this is new".as_bytes()); /// ``` /// /// When the pattern doesn't match: /// /// ``` /// use bstr::ByteSlice; /// /// let s = b"this is old"; /// /// let mut dest = vec![]; /// s.replace_into("nada nada", "limonada", &mut dest); /// assert_eq!(dest, "this is old".as_bytes()); /// ``` /// /// When the needle is an empty string: /// /// ``` /// use bstr::ByteSlice; /// /// let s = b"foo"; /// /// let mut dest = vec![]; /// s.replace_into("", "Z", &mut dest); /// assert_eq!(dest, "ZfZoZoZ".as_bytes()); /// ``` #[cfg(feature = "std")] #[inline] fn replace_into, R: AsRef<[u8]>>( &self, needle: N, replacement: R, dest: &mut Vec, ) { let (needle, replacement) = (needle.as_ref(), replacement.as_ref()); let mut last = 0; for start in self.find_iter(needle) { dest.push_str(&self.as_bytes()[last..start]); dest.push_str(replacement); last = start + needle.len(); } dest.push_str(&self.as_bytes()[last..]); } /// Replace up to `limit` matches of the given needle with the given /// replacement, and write the result into the provided `Vec`. /// /// This does **not** clear `dest` before writing to it. /// /// This routine is useful for reusing allocation. For a more convenient /// API, use [`replacen`](#method.replacen) instead. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteSlice; /// /// let s = b"foofoo"; /// /// let mut dest = vec![]; /// s.replacen_into("o", "z", 2, &mut dest); /// assert_eq!(dest, "fzzfoo".as_bytes()); /// ``` /// /// When the pattern doesn't match: /// /// ``` /// use bstr::ByteSlice; /// /// let s = b"foofoo"; /// /// let mut dest = vec![]; /// s.replacen_into("a", "z", 2, &mut dest); /// assert_eq!(dest, "foofoo".as_bytes()); /// ``` /// /// When the needle is an empty string: /// /// ``` /// use bstr::ByteSlice; /// /// let s = b"foo"; /// /// let mut dest = vec![]; /// s.replacen_into("", "Z", 2, &mut dest); /// assert_eq!(dest, "ZfZoo".as_bytes()); /// ``` #[cfg(feature = "std")] #[inline] fn replacen_into, R: AsRef<[u8]>>( &self, needle: N, replacement: R, limit: usize, dest: &mut Vec, ) { let (needle, replacement) = (needle.as_ref(), replacement.as_ref()); let mut last = 0; for start in self.find_iter(needle).take(limit) { dest.push_str(&self.as_bytes()[last..start]); dest.push_str(replacement); last = start + needle.len(); } dest.push_str(&self.as_bytes()[last..]); } /// Returns an iterator over the bytes in this byte string. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteSlice; /// /// let bs = b"foobar"; /// let bytes: Vec = bs.bytes().collect(); /// assert_eq!(bytes, bs); /// ``` #[inline] fn bytes(&self) -> Bytes<'_> { Bytes { it: self.as_bytes().iter() } } /// Returns an iterator over the Unicode scalar values in this byte string. /// If invalid UTF-8 is encountered, then the Unicode replacement codepoint /// is yielded instead. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteSlice; /// /// let bs = b"\xE2\x98\x83\xFF\xF0\x9D\x9E\x83\xE2\x98\x61"; /// let chars: Vec = bs.chars().collect(); /// assert_eq!(vec!['☃', '\u{FFFD}', '𝞃', '\u{FFFD}', 'a'], chars); /// ``` /// /// Codepoints can also be iterated over in reverse: /// /// ``` /// use bstr::ByteSlice; /// /// let bs = b"\xE2\x98\x83\xFF\xF0\x9D\x9E\x83\xE2\x98\x61"; /// let chars: Vec = bs.chars().rev().collect(); /// assert_eq!(vec!['a', '\u{FFFD}', '𝞃', '\u{FFFD}', '☃'], chars); /// ``` #[inline] fn chars(&self) -> Chars<'_> { Chars::new(self.as_bytes()) } /// Returns an iterator over the Unicode scalar values in this byte string /// along with their starting and ending byte index positions. If invalid /// UTF-8 is encountered, then the Unicode replacement codepoint is yielded /// instead. /// /// Note that this is slightly different from the `CharIndices` iterator /// provided by the standard library. Aside from working on possibly /// invalid UTF-8, this iterator provides both the corresponding starting /// and ending byte indices of each codepoint yielded. The ending position /// is necessary to slice the original byte string when invalid UTF-8 bytes /// are converted into a Unicode replacement codepoint, since a single /// replacement codepoint can substitute anywhere from 1 to 3 invalid bytes /// (inclusive). /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteSlice; /// /// let bs = b"\xE2\x98\x83\xFF\xF0\x9D\x9E\x83\xE2\x98\x61"; /// let chars: Vec<(usize, usize, char)> = bs.char_indices().collect(); /// assert_eq!(chars, vec![ /// (0, 3, '☃'), /// (3, 4, '\u{FFFD}'), /// (4, 8, '𝞃'), /// (8, 10, '\u{FFFD}'), /// (10, 11, 'a'), /// ]); /// ``` /// /// Codepoints can also be iterated over in reverse: /// /// ``` /// use bstr::ByteSlice; /// /// let bs = b"\xE2\x98\x83\xFF\xF0\x9D\x9E\x83\xE2\x98\x61"; /// let chars: Vec<(usize, usize, char)> = bs /// .char_indices() /// .rev() /// .collect(); /// assert_eq!(chars, vec![ /// (10, 11, 'a'), /// (8, 10, '\u{FFFD}'), /// (4, 8, '𝞃'), /// (3, 4, '\u{FFFD}'), /// (0, 3, '☃'), /// ]); /// ``` #[inline] fn char_indices(&self) -> CharIndices<'_> { CharIndices::new(self.as_bytes()) } /// Iterate over chunks of valid UTF-8. /// /// The iterator returned yields chunks of valid UTF-8 separated by invalid /// UTF-8 bytes, if they exist. Invalid UTF-8 bytes are always 1-3 bytes, /// which are determined via the "substitution of maximal subparts" /// strategy described in the docs for the /// [`ByteSlice::to_str_lossy`](trait.ByteSlice.html#method.to_str_lossy) /// method. /// /// # Examples /// /// This example shows how to gather all valid and invalid chunks from a /// byte slice: /// /// ``` /// use bstr::{ByteSlice, Utf8Chunk}; /// /// let bytes = b"foo\xFD\xFEbar\xFF"; /// /// let (mut valid_chunks, mut invalid_chunks) = (vec![], vec![]); /// for chunk in bytes.utf8_chunks() { /// if !chunk.valid().is_empty() { /// valid_chunks.push(chunk.valid()); /// } /// if !chunk.invalid().is_empty() { /// invalid_chunks.push(chunk.invalid()); /// } /// } /// /// assert_eq!(valid_chunks, vec!["foo", "bar"]); /// assert_eq!(invalid_chunks, vec![b"\xFD", b"\xFE", b"\xFF"]); /// ``` #[inline] fn utf8_chunks(&self) -> Utf8Chunks<'_> { Utf8Chunks { bytes: self.as_bytes() } } /// Returns an iterator over the grapheme clusters in this byte string. /// If invalid UTF-8 is encountered, then the Unicode replacement codepoint /// is yielded instead. /// /// # Examples /// /// This example shows how multiple codepoints can combine to form a /// single grapheme cluster: /// /// ``` /// use bstr::ByteSlice; /// /// let bs = "a\u{0300}\u{0316}\u{1F1FA}\u{1F1F8}".as_bytes(); /// let graphemes: Vec<&str> = bs.graphemes().collect(); /// assert_eq!(vec!["à̖", "🇺🇸"], graphemes); /// ``` /// /// This shows that graphemes can be iterated over in reverse: /// /// ``` /// use bstr::ByteSlice; /// /// let bs = "a\u{0300}\u{0316}\u{1F1FA}\u{1F1F8}".as_bytes(); /// let graphemes: Vec<&str> = bs.graphemes().rev().collect(); /// assert_eq!(vec!["🇺🇸", "à̖"], graphemes); /// ``` #[cfg(feature = "unicode")] #[inline] fn graphemes(&self) -> Graphemes<'_> { Graphemes::new(self.as_bytes()) } /// Returns an iterator over the grapheme clusters in this byte string /// along with their starting and ending byte index positions. If invalid /// UTF-8 is encountered, then the Unicode replacement codepoint is yielded /// instead. /// /// # Examples /// /// This example shows how to get the byte offsets of each individual /// grapheme cluster: /// /// ``` /// use bstr::ByteSlice; /// /// let bs = "a\u{0300}\u{0316}\u{1F1FA}\u{1F1F8}".as_bytes(); /// let graphemes: Vec<(usize, usize, &str)> = /// bs.grapheme_indices().collect(); /// assert_eq!(vec![(0, 5, "à̖"), (5, 13, "🇺🇸")], graphemes); /// ``` /// /// This example shows what happens when invalid UTF-8 is enountered. Note /// that the offsets are valid indices into the original string, and do /// not necessarily correspond to the length of the `&str` returned! /// /// ``` /// use bstr::{ByteSlice, ByteVec}; /// /// let mut bytes = vec![]; /// bytes.push_str("a\u{0300}\u{0316}"); /// bytes.push(b'\xFF'); /// bytes.push_str("\u{1F1FA}\u{1F1F8}"); /// /// let graphemes: Vec<(usize, usize, &str)> = /// bytes.grapheme_indices().collect(); /// assert_eq!( /// graphemes, /// vec![(0, 5, "à̖"), (5, 6, "\u{FFFD}"), (6, 14, "🇺🇸")] /// ); /// ``` #[cfg(feature = "unicode")] #[inline] fn grapheme_indices(&self) -> GraphemeIndices<'_> { GraphemeIndices::new(self.as_bytes()) } /// Returns an iterator over the words in this byte string. If invalid /// UTF-8 is encountered, then the Unicode replacement codepoint is yielded /// instead. /// /// This is similar to /// [`words_with_breaks`](trait.ByteSlice.html#method.words_with_breaks), /// except it only returns elements that contain a "word" character. A word /// character is defined by UTS #18 (Annex C) to be the combination of the /// `Alphabetic` and `Join_Control` properties, along with the /// `Decimal_Number`, `Mark` and `Connector_Punctuation` general /// categories. /// /// Since words are made up of one or more codepoints, this iterator /// yields `&str` elements. When invalid UTF-8 is encountered, replacement /// codepoints are [substituted](index.html#handling-of-invalid-utf-8). /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteSlice; /// /// let bs = br#"The quick ("brown") fox can't jump 32.3 feet, right?"#; /// let words: Vec<&str> = bs.words().collect(); /// assert_eq!(words, vec![ /// "The", "quick", "brown", "fox", "can't", /// "jump", "32.3", "feet", "right", /// ]); /// ``` #[cfg(feature = "unicode")] #[inline] fn words(&self) -> Words<'_> { Words::new(self.as_bytes()) } /// Returns an iterator over the words in this byte string along with /// their starting and ending byte index positions. /// /// This is similar to /// [`words_with_break_indices`](trait.ByteSlice.html#method.words_with_break_indices), /// except it only returns elements that contain a "word" character. A word /// character is defined by UTS #18 (Annex C) to be the combination of the /// `Alphabetic` and `Join_Control` properties, along with the /// `Decimal_Number`, `Mark` and `Connector_Punctuation` general /// categories. /// /// Since words are made up of one or more codepoints, this iterator /// yields `&str` elements. When invalid UTF-8 is encountered, replacement /// codepoints are [substituted](index.html#handling-of-invalid-utf-8). /// /// # Examples /// /// This example shows how to get the byte offsets of each individual /// word: /// /// ``` /// use bstr::ByteSlice; /// /// let bs = b"can't jump 32.3 feet"; /// let words: Vec<(usize, usize, &str)> = bs.word_indices().collect(); /// assert_eq!(words, vec![ /// (0, 5, "can't"), /// (6, 10, "jump"), /// (11, 15, "32.3"), /// (16, 20, "feet"), /// ]); /// ``` #[cfg(feature = "unicode")] #[inline] fn word_indices(&self) -> WordIndices<'_> { WordIndices::new(self.as_bytes()) } /// Returns an iterator over the words in this byte string, along with /// all breaks between the words. Concatenating all elements yielded by /// the iterator results in the original string (modulo Unicode replacement /// codepoint substitutions if invalid UTF-8 is encountered). /// /// Since words are made up of one or more codepoints, this iterator /// yields `&str` elements. When invalid UTF-8 is encountered, replacement /// codepoints are [substituted](index.html#handling-of-invalid-utf-8). /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteSlice; /// /// let bs = br#"The quick ("brown") fox can't jump 32.3 feet, right?"#; /// let words: Vec<&str> = bs.words_with_breaks().collect(); /// assert_eq!(words, vec![ /// "The", " ", "quick", " ", "(", "\"", "brown", "\"", ")", /// " ", "fox", " ", "can't", " ", "jump", " ", "32.3", " ", "feet", /// ",", " ", "right", "?", /// ]); /// ``` #[cfg(feature = "unicode")] #[inline] fn words_with_breaks(&self) -> WordsWithBreaks<'_> { WordsWithBreaks::new(self.as_bytes()) } /// Returns an iterator over the words and their byte offsets in this /// byte string, along with all breaks between the words. Concatenating /// all elements yielded by the iterator results in the original string /// (modulo Unicode replacement codepoint substitutions if invalid UTF-8 is /// encountered). /// /// Since words are made up of one or more codepoints, this iterator /// yields `&str` elements. When invalid UTF-8 is encountered, replacement /// codepoints are [substituted](index.html#handling-of-invalid-utf-8). /// /// # Examples /// /// This example shows how to get the byte offsets of each individual /// word: /// /// ``` /// use bstr::ByteSlice; /// /// let bs = b"can't jump 32.3 feet"; /// let words: Vec<(usize, usize, &str)> = /// bs.words_with_break_indices().collect(); /// assert_eq!(words, vec![ /// (0, 5, "can't"), /// (5, 6, " "), /// (6, 10, "jump"), /// (10, 11, " "), /// (11, 15, "32.3"), /// (15, 16, " "), /// (16, 20, "feet"), /// ]); /// ``` #[cfg(feature = "unicode")] #[inline] fn words_with_break_indices(&self) -> WordsWithBreakIndices<'_> { WordsWithBreakIndices::new(self.as_bytes()) } /// Returns an iterator over the sentences in this byte string. /// /// Typically, a sentence will include its trailing punctuation and /// whitespace. Concatenating all elements yielded by the iterator /// results in the original string (modulo Unicode replacement codepoint /// substitutions if invalid UTF-8 is encountered). /// /// Since sentences are made up of one or more codepoints, this iterator /// yields `&str` elements. When invalid UTF-8 is encountered, replacement /// codepoints are [substituted](index.html#handling-of-invalid-utf-8). /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteSlice; /// /// let bs = b"I want this. Not that. Right now."; /// let sentences: Vec<&str> = bs.sentences().collect(); /// assert_eq!(sentences, vec![ /// "I want this. ", /// "Not that. ", /// "Right now.", /// ]); /// ``` #[cfg(feature = "unicode")] #[inline] fn sentences(&self) -> Sentences<'_> { Sentences::new(self.as_bytes()) } /// Returns an iterator over the sentences in this byte string along with /// their starting and ending byte index positions. /// /// Typically, a sentence will include its trailing punctuation and /// whitespace. Concatenating all elements yielded by the iterator /// results in the original string (modulo Unicode replacement codepoint /// substitutions if invalid UTF-8 is encountered). /// /// Since sentences are made up of one or more codepoints, this iterator /// yields `&str` elements. When invalid UTF-8 is encountered, replacement /// codepoints are [substituted](index.html#handling-of-invalid-utf-8). /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteSlice; /// /// let bs = b"I want this. Not that. Right now."; /// let sentences: Vec<(usize, usize, &str)> = /// bs.sentence_indices().collect(); /// assert_eq!(sentences, vec![ /// (0, 13, "I want this. "), /// (13, 23, "Not that. "), /// (23, 33, "Right now."), /// ]); /// ``` #[cfg(feature = "unicode")] #[inline] fn sentence_indices(&self) -> SentenceIndices<'_> { SentenceIndices::new(self.as_bytes()) } /// An iterator over all lines in a byte string, without their /// terminators. /// /// For this iterator, the only line terminators recognized are `\r\n` and /// `\n`. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let s = b"\ /// foo /// /// bar\r /// baz /// /// /// quux"; /// let lines: Vec<&[u8]> = s.lines().collect(); /// assert_eq!(lines, vec![ /// B("foo"), B(""), B("bar"), B("baz"), B(""), B(""), B("quux"), /// ]); /// ``` #[inline] fn lines(&self) -> Lines<'_> { Lines::new(self.as_bytes()) } /// An iterator over all lines in a byte string, including their /// terminators. /// /// For this iterator, the only line terminator recognized is `\n`. (Since /// line terminators are included, this also handles `\r\n` line endings.) /// /// Line terminators are only included if they are present in the original /// byte string. For example, the last line in a byte string may not end /// with a line terminator. /// /// Concatenating all elements yielded by this iterator is guaranteed to /// yield the original byte string. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let s = b"\ /// foo /// /// bar\r /// baz /// /// /// quux"; /// let lines: Vec<&[u8]> = s.lines_with_terminator().collect(); /// assert_eq!(lines, vec![ /// B("foo\n"), /// B("\n"), /// B("bar\r\n"), /// B("baz\n"), /// B("\n"), /// B("\n"), /// B("quux"), /// ]); /// ``` #[inline] fn lines_with_terminator(&self) -> LinesWithTerminator<'_> { LinesWithTerminator::new(self.as_bytes()) } /// Return a byte string slice with leading and trailing whitespace /// removed. /// /// Whitespace is defined according to the terms of the `White_Space` /// Unicode property. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let s = B(" foo\tbar\t\u{2003}\n"); /// assert_eq!(s.trim(), B("foo\tbar")); /// ``` #[cfg(feature = "unicode")] #[inline] fn trim(&self) -> &[u8] { self.trim_start().trim_end() } /// Return a byte string slice with leading whitespace removed. /// /// Whitespace is defined according to the terms of the `White_Space` /// Unicode property. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let s = B(" foo\tbar\t\u{2003}\n"); /// assert_eq!(s.trim_start(), B("foo\tbar\t\u{2003}\n")); /// ``` #[cfg(feature = "unicode")] #[inline] fn trim_start(&self) -> &[u8] { let start = whitespace_len_fwd(self.as_bytes()); &self.as_bytes()[start..] } /// Return a byte string slice with trailing whitespace removed. /// /// Whitespace is defined according to the terms of the `White_Space` /// Unicode property. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let s = B(" foo\tbar\t\u{2003}\n"); /// assert_eq!(s.trim_end(), B(" foo\tbar")); /// ``` #[cfg(feature = "unicode")] #[inline] fn trim_end(&self) -> &[u8] { let end = whitespace_len_rev(self.as_bytes()); &self.as_bytes()[..end] } /// Return a byte string slice with leading and trailing characters /// satisfying the given predicate removed. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let s = b"123foo5bar789"; /// assert_eq!(s.trim_with(|c| c.is_numeric()), B("foo5bar")); /// ``` #[inline] fn trim_with bool>(&self, mut trim: F) -> &[u8] { self.trim_start_with(&mut trim).trim_end_with(&mut trim) } /// Return a byte string slice with leading characters satisfying the given /// predicate removed. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let s = b"123foo5bar789"; /// assert_eq!(s.trim_start_with(|c| c.is_numeric()), B("foo5bar789")); /// ``` #[inline] fn trim_start_with bool>(&self, mut trim: F) -> &[u8] { for (s, _, ch) in self.char_indices() { if !trim(ch) { return &self.as_bytes()[s..]; } } b"" } /// Return a byte string slice with trailing characters satisfying the /// given predicate removed. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let s = b"123foo5bar789"; /// assert_eq!(s.trim_end_with(|c| c.is_numeric()), B("123foo5bar")); /// ``` #[inline] fn trim_end_with bool>(&self, mut trim: F) -> &[u8] { for (_, e, ch) in self.char_indices().rev() { if !trim(ch) { return &self.as_bytes()[..e]; } } b"" } /// Returns a new `Vec` containing the lowercase equivalent of this /// byte string. /// /// In this case, lowercase is defined according to the `Lowercase` Unicode /// property. /// /// If invalid UTF-8 is seen, or if a character has no lowercase variant, /// then it is written to the given buffer unchanged. /// /// Note that some characters in this byte string may expand into multiple /// characters when changing the case, so the number of bytes written to /// the given byte string may not be equivalent to the number of bytes in /// this byte string. /// /// If you'd like to reuse an allocation for performance reasons, then use /// [`to_lowercase_into`](#method.to_lowercase_into) instead. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let s = B("HELLO Β"); /// assert_eq!("hello β".as_bytes(), s.to_lowercase().as_bytes()); /// ``` /// /// Scripts without case are not changed: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let s = B("农历新年"); /// assert_eq!("农历新年".as_bytes(), s.to_lowercase().as_bytes()); /// ``` /// /// Invalid UTF-8 remains as is: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let s = B(b"FOO\xFFBAR\xE2\x98BAZ"); /// assert_eq!(B(b"foo\xFFbar\xE2\x98baz"), s.to_lowercase().as_bytes()); /// ``` #[cfg(all(feature = "std", feature = "unicode"))] #[inline] fn to_lowercase(&self) -> Vec { let mut buf = vec![]; self.to_lowercase_into(&mut buf); buf } /// Writes the lowercase equivalent of this byte string into the given /// buffer. The buffer is not cleared before written to. /// /// In this case, lowercase is defined according to the `Lowercase` /// Unicode property. /// /// If invalid UTF-8 is seen, or if a character has no lowercase variant, /// then it is written to the given buffer unchanged. /// /// Note that some characters in this byte string may expand into multiple /// characters when changing the case, so the number of bytes written to /// the given byte string may not be equivalent to the number of bytes in /// this byte string. /// /// If you don't need to amortize allocation and instead prefer /// convenience, then use [`to_lowercase`](#method.to_lowercase) instead. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let s = B("HELLO Β"); /// /// let mut buf = vec![]; /// s.to_lowercase_into(&mut buf); /// assert_eq!("hello β".as_bytes(), buf.as_bytes()); /// ``` /// /// Scripts without case are not changed: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let s = B("农历新年"); /// /// let mut buf = vec![]; /// s.to_lowercase_into(&mut buf); /// assert_eq!("农历新年".as_bytes(), buf.as_bytes()); /// ``` /// /// Invalid UTF-8 remains as is: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let s = B(b"FOO\xFFBAR\xE2\x98BAZ"); /// /// let mut buf = vec![]; /// s.to_lowercase_into(&mut buf); /// assert_eq!(B(b"foo\xFFbar\xE2\x98baz"), buf.as_bytes()); /// ``` #[cfg(all(feature = "std", feature = "unicode"))] #[inline] fn to_lowercase_into(&self, buf: &mut Vec) { // TODO: This is the best we can do given what std exposes I think. // If we roll our own case handling, then we might be able to do this // a bit faster. We shouldn't roll our own case handling unless we // need to, e.g., for doing caseless matching or case folding. // TODO(BUG): This doesn't handle any special casing rules. buf.reserve(self.as_bytes().len()); for (s, e, ch) in self.char_indices() { if ch == '\u{FFFD}' { buf.push_str(&self.as_bytes()[s..e]); } else if ch.is_ascii() { buf.push_char(ch.to_ascii_lowercase()); } else { for upper in ch.to_lowercase() { buf.push_char(upper); } } } } /// Returns a new `Vec` containing the ASCII lowercase equivalent of /// this byte string. /// /// In this case, lowercase is only defined in ASCII letters. Namely, the /// letters `A-Z` are converted to `a-z`. All other bytes remain unchanged. /// In particular, the length of the byte string returned is always /// equivalent to the length of this byte string. /// /// If you'd like to reuse an allocation for performance reasons, then use /// [`make_ascii_lowercase`](#method.make_ascii_lowercase) to perform /// the conversion in place. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let s = B("HELLO Β"); /// assert_eq!("hello Β".as_bytes(), s.to_ascii_lowercase().as_bytes()); /// ``` /// /// Invalid UTF-8 remains as is: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let s = B(b"FOO\xFFBAR\xE2\x98BAZ"); /// assert_eq!(s.to_ascii_lowercase(), B(b"foo\xFFbar\xE2\x98baz")); /// ``` #[cfg(feature = "std")] #[inline] fn to_ascii_lowercase(&self) -> Vec { self.as_bytes().to_ascii_lowercase() } /// Convert this byte string to its lowercase ASCII equivalent in place. /// /// In this case, lowercase is only defined in ASCII letters. Namely, the /// letters `A-Z` are converted to `a-z`. All other bytes remain unchanged. /// /// If you don't need to do the conversion in /// place and instead prefer convenience, then use /// [`to_ascii_lowercase`](#method.to_ascii_lowercase) instead. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteSlice; /// /// let mut s = >::from("HELLO Β"); /// s.make_ascii_lowercase(); /// assert_eq!(s, "hello Β".as_bytes()); /// ``` /// /// Invalid UTF-8 remains as is: /// /// ``` /// use bstr::{B, ByteSlice, ByteVec}; /// /// let mut s = >::from_slice(b"FOO\xFFBAR\xE2\x98BAZ"); /// s.make_ascii_lowercase(); /// assert_eq!(s, B(b"foo\xFFbar\xE2\x98baz")); /// ``` #[inline] fn make_ascii_lowercase(&mut self) { self.as_bytes_mut().make_ascii_lowercase(); } /// Returns a new `Vec` containing the uppercase equivalent of this /// byte string. /// /// In this case, uppercase is defined according to the `Uppercase` /// Unicode property. /// /// If invalid UTF-8 is seen, or if a character has no uppercase variant, /// then it is written to the given buffer unchanged. /// /// Note that some characters in this byte string may expand into multiple /// characters when changing the case, so the number of bytes written to /// the given byte string may not be equivalent to the number of bytes in /// this byte string. /// /// If you'd like to reuse an allocation for performance reasons, then use /// [`to_uppercase_into`](#method.to_uppercase_into) instead. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let s = B("hello β"); /// assert_eq!(s.to_uppercase(), B("HELLO Β")); /// ``` /// /// Scripts without case are not changed: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let s = B("农历新年"); /// assert_eq!(s.to_uppercase(), B("农历新年")); /// ``` /// /// Invalid UTF-8 remains as is: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let s = B(b"foo\xFFbar\xE2\x98baz"); /// assert_eq!(s.to_uppercase(), B(b"FOO\xFFBAR\xE2\x98BAZ")); /// ``` #[cfg(all(feature = "std", feature = "unicode"))] #[inline] fn to_uppercase(&self) -> Vec { let mut buf = vec![]; self.to_uppercase_into(&mut buf); buf } /// Writes the uppercase equivalent of this byte string into the given /// buffer. The buffer is not cleared before written to. /// /// In this case, uppercase is defined according to the `Uppercase` /// Unicode property. /// /// If invalid UTF-8 is seen, or if a character has no uppercase variant, /// then it is written to the given buffer unchanged. /// /// Note that some characters in this byte string may expand into multiple /// characters when changing the case, so the number of bytes written to /// the given byte string may not be equivalent to the number of bytes in /// this byte string. /// /// If you don't need to amortize allocation and instead prefer /// convenience, then use [`to_uppercase`](#method.to_uppercase) instead. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let s = B("hello β"); /// /// let mut buf = vec![]; /// s.to_uppercase_into(&mut buf); /// assert_eq!(buf, B("HELLO Β")); /// ``` /// /// Scripts without case are not changed: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let s = B("农历新年"); /// /// let mut buf = vec![]; /// s.to_uppercase_into(&mut buf); /// assert_eq!(buf, B("农历新年")); /// ``` /// /// Invalid UTF-8 remains as is: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let s = B(b"foo\xFFbar\xE2\x98baz"); /// /// let mut buf = vec![]; /// s.to_uppercase_into(&mut buf); /// assert_eq!(buf, B(b"FOO\xFFBAR\xE2\x98BAZ")); /// ``` #[cfg(all(feature = "std", feature = "unicode"))] #[inline] fn to_uppercase_into(&self, buf: &mut Vec) { // TODO: This is the best we can do given what std exposes I think. // If we roll our own case handling, then we might be able to do this // a bit faster. We shouldn't roll our own case handling unless we // need to, e.g., for doing caseless matching or case folding. buf.reserve(self.as_bytes().len()); for (s, e, ch) in self.char_indices() { if ch == '\u{FFFD}' { buf.push_str(&self.as_bytes()[s..e]); } else if ch.is_ascii() { buf.push_char(ch.to_ascii_uppercase()); } else { for upper in ch.to_uppercase() { buf.push_char(upper); } } } } /// Returns a new `Vec` containing the ASCII uppercase equivalent of /// this byte string. /// /// In this case, uppercase is only defined in ASCII letters. Namely, the /// letters `a-z` are converted to `A-Z`. All other bytes remain unchanged. /// In particular, the length of the byte string returned is always /// equivalent to the length of this byte string. /// /// If you'd like to reuse an allocation for performance reasons, then use /// [`make_ascii_uppercase`](#method.make_ascii_uppercase) to perform /// the conversion in place. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let s = B("hello β"); /// assert_eq!(s.to_ascii_uppercase(), B("HELLO β")); /// ``` /// /// Invalid UTF-8 remains as is: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let s = B(b"foo\xFFbar\xE2\x98baz"); /// assert_eq!(s.to_ascii_uppercase(), B(b"FOO\xFFBAR\xE2\x98BAZ")); /// ``` #[cfg(feature = "std")] #[inline] fn to_ascii_uppercase(&self) -> Vec { self.as_bytes().to_ascii_uppercase() } /// Convert this byte string to its uppercase ASCII equivalent in place. /// /// In this case, uppercase is only defined in ASCII letters. Namely, the /// letters `a-z` are converted to `A-Z`. All other bytes remain unchanged. /// /// If you don't need to do the conversion in /// place and instead prefer convenience, then use /// [`to_ascii_uppercase`](#method.to_ascii_uppercase) instead. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let mut s = >::from("hello β"); /// s.make_ascii_uppercase(); /// assert_eq!(s, B("HELLO β")); /// ``` /// /// Invalid UTF-8 remains as is: /// /// ``` /// use bstr::{B, ByteSlice, ByteVec}; /// /// let mut s = >::from_slice(b"foo\xFFbar\xE2\x98baz"); /// s.make_ascii_uppercase(); /// assert_eq!(s, B(b"FOO\xFFBAR\xE2\x98BAZ")); /// ``` #[inline] fn make_ascii_uppercase(&mut self) { self.as_bytes_mut().make_ascii_uppercase(); } /// Reverse the bytes in this string, in place. /// /// This is not necessarily a well formed operation! For example, if this /// byte string contains valid UTF-8 that isn't ASCII, then reversing the /// string will likely result in invalid UTF-8 and otherwise non-sensical /// content. /// /// Note that this is equivalent to the generic `[u8]::reverse` method. /// This method is provided to permit callers to explicitly differentiate /// between reversing bytes, codepoints and graphemes. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteSlice; /// /// let mut s = >::from("hello"); /// s.reverse_bytes(); /// assert_eq!(s, "olleh".as_bytes()); /// ``` #[inline] fn reverse_bytes(&mut self) { self.as_bytes_mut().reverse(); } /// Reverse the codepoints in this string, in place. /// /// If this byte string is valid UTF-8, then its reversal by codepoint /// is also guaranteed to be valid UTF-8. /// /// This operation is equivalent to the following, but without allocating: /// /// ``` /// use bstr::ByteSlice; /// /// let mut s = >::from("foo☃bar"); /// /// let mut chars: Vec = s.chars().collect(); /// chars.reverse(); /// /// let reversed: String = chars.into_iter().collect(); /// assert_eq!(reversed, "rab☃oof"); /// ``` /// /// Note that this is not necessarily a well formed operation. For example, /// if this byte string contains grapheme clusters with more than one /// codepoint, then those grapheme clusters will not necessarily be /// preserved. If you'd like to preserve grapheme clusters, then use /// [`reverse_graphemes`](#method.reverse_graphemes) instead. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteSlice; /// /// let mut s = >::from("foo☃bar"); /// s.reverse_chars(); /// assert_eq!(s, "rab☃oof".as_bytes()); /// ``` /// /// This example shows that not all reversals lead to a well formed string. /// For example, in this case, combining marks are used to put accents over /// some letters, and those accent marks must appear after the codepoints /// they modify. /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let mut s = >::from("résumé"); /// s.reverse_chars(); /// assert_eq!(s, B(b"\xCC\x81emus\xCC\x81er")); /// ``` /// /// A word of warning: the above example relies on the fact that /// `résumé` is in decomposed normal form, which means there are separate /// codepoints for the accents above `e`. If it is instead in composed /// normal form, then the example works: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let mut s = >::from("résumé"); /// s.reverse_chars(); /// assert_eq!(s, B("émusér")); /// ``` /// /// The point here is to be cautious and not assume that just because /// `reverse_chars` works in one case, that it therefore works in all /// cases. #[inline] fn reverse_chars(&mut self) { let mut i = 0; loop { let (_, size) = utf8::decode(&self.as_bytes()[i..]); if size == 0 { break; } if size > 1 { self.as_bytes_mut()[i..i + size].reverse_bytes(); } i += size; } self.reverse_bytes(); } /// Reverse the graphemes in this string, in place. /// /// If this byte string is valid UTF-8, then its reversal by grapheme /// is also guaranteed to be valid UTF-8. /// /// This operation is equivalent to the following, but without allocating: /// /// ``` /// use bstr::ByteSlice; /// /// let mut s = >::from("foo☃bar"); /// /// let mut graphemes: Vec<&str> = s.graphemes().collect(); /// graphemes.reverse(); /// /// let reversed = graphemes.concat(); /// assert_eq!(reversed, "rab☃oof"); /// ``` /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteSlice; /// /// let mut s = >::from("foo☃bar"); /// s.reverse_graphemes(); /// assert_eq!(s, "rab☃oof".as_bytes()); /// ``` /// /// This example shows how this correctly handles grapheme clusters, /// unlike `reverse_chars`. /// /// ``` /// use bstr::ByteSlice; /// /// let mut s = >::from("résumé"); /// s.reverse_graphemes(); /// assert_eq!(s, "émusér".as_bytes()); /// ``` #[cfg(feature = "unicode")] #[inline] fn reverse_graphemes(&mut self) { use crate::unicode::decode_grapheme; let mut i = 0; loop { let (_, size) = decode_grapheme(&self.as_bytes()[i..]); if size == 0 { break; } if size > 1 { self.as_bytes_mut()[i..i + size].reverse_bytes(); } i += size; } self.reverse_bytes(); } /// Returns true if and only if every byte in this byte string is ASCII. /// /// ASCII is an encoding that defines 128 codepoints. A byte corresponds to /// an ASCII codepoint if and only if it is in the inclusive range /// `[0, 127]`. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// assert!(B("abc").is_ascii()); /// assert!(!B("☃βツ").is_ascii()); /// assert!(!B(b"\xFF").is_ascii()); /// ``` #[inline] fn is_ascii(&self) -> bool { ascii::first_non_ascii_byte(self.as_bytes()) == self.as_bytes().len() } /// Returns true if and only if the entire byte string is valid UTF-8. /// /// If you need location information about where a byte string's first /// invalid UTF-8 byte is, then use the [`to_str`](#method.to_str) method. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// assert!(B("abc").is_utf8()); /// assert!(B("☃βツ").is_utf8()); /// // invalid bytes /// assert!(!B(b"abc\xFF").is_utf8()); /// // surrogate encoding /// assert!(!B(b"\xED\xA0\x80").is_utf8()); /// // incomplete sequence /// assert!(!B(b"\xF0\x9D\x9Ca").is_utf8()); /// // overlong sequence /// assert!(!B(b"\xF0\x82\x82\xAC").is_utf8()); /// ``` #[inline] fn is_utf8(&self) -> bool { utf8::validate(self.as_bytes()).is_ok() } /// Returns the last byte in this byte string, if it's non-empty. If this /// byte string is empty, this returns `None`. /// /// Note that this is like the generic `[u8]::last`, except this returns /// the byte by value instead of a reference to the byte. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteSlice; /// /// assert_eq!(Some(b'z'), b"baz".last_byte()); /// assert_eq!(None, b"".last_byte()); /// ``` #[inline] fn last_byte(&self) -> Option { let bytes = self.as_bytes(); bytes.get(bytes.len().saturating_sub(1)).map(|&b| b) } /// Returns the index of the first non-ASCII byte in this byte string (if /// any such indices exist). Specifically, it returns the index of the /// first byte with a value greater than or equal to `0x80`. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::{ByteSlice, B}; /// /// assert_eq!(Some(3), b"abc\xff".find_non_ascii_byte()); /// assert_eq!(None, b"abcde".find_non_ascii_byte()); /// assert_eq!(Some(0), B("😀").find_non_ascii_byte()); /// ``` #[inline] fn find_non_ascii_byte(&self) -> Option { let index = ascii::first_non_ascii_byte(self.as_bytes()); if index == self.as_bytes().len() { None } else { Some(index) } } /// Copies elements from one part of the slice to another part of itself, /// where the parts may be overlapping. /// /// `src` is the range within this byte string to copy from, while `dest` /// is the starting index of the range within this byte string to copy to. /// The length indicated by `src` must be less than or equal to the number /// of bytes from `dest` to the end of the byte string. /// /// # Panics /// /// Panics if either range is out of bounds, or if `src` is too big to fit /// into `dest`, or if the end of `src` is before the start. /// /// # Examples /// /// Copying four bytes within a byte string: /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let mut buf = *b"Hello, World!"; /// let s = &mut buf; /// s.copy_within_str(1..5, 8); /// assert_eq!(s, B("Hello, Wello!")); /// ``` #[inline] fn copy_within_str(&mut self, src: R, dest: usize) where R: ops::RangeBounds, { // TODO: Deprecate this once slice::copy_within stabilizes. let src_start = match src.start_bound() { ops::Bound::Included(&n) => n, ops::Bound::Excluded(&n) => { n.checked_add(1).expect("attempted to index slice beyond max") } ops::Bound::Unbounded => 0, }; let src_end = match src.end_bound() { ops::Bound::Included(&n) => { n.checked_add(1).expect("attempted to index slice beyond max") } ops::Bound::Excluded(&n) => n, ops::Bound::Unbounded => self.as_bytes().len(), }; assert!(src_start <= src_end, "src end is before src start"); assert!(src_end <= self.as_bytes().len(), "src is out of bounds"); let count = src_end - src_start; assert!( dest <= self.as_bytes().len() - count, "dest is out of bounds", ); // SAFETY: This is safe because we use ptr::copy to handle overlapping // copies, and is also safe because we've checked all the bounds above. // Finally, we are only dealing with u8 data, which is Copy, which // means we can copy without worrying about ownership/destructors. unsafe { ptr::copy( self.as_bytes().get_unchecked(src_start), self.as_bytes_mut().get_unchecked_mut(dest), count, ); } } } /// A single substring searcher fixed to a particular needle. /// /// The purpose of this type is to permit callers to construct a substring /// searcher that can be used to search haystacks without the overhead of /// constructing the searcher in the first place. This is a somewhat niche /// concern when it's necessary to re-use the same needle to search multiple /// different haystacks with as little overhead as possible. In general, using /// [`ByteSlice::find`](trait.ByteSlice.html#method.find) /// or /// [`ByteSlice::find_iter`](trait.ByteSlice.html#method.find_iter) /// is good enough, but `Finder` is useful when you can meaningfully observe /// searcher construction time in a profile. /// /// When the `std` feature is enabled, then this type has an `into_owned` /// version which permits building a `Finder` that is not connected to the /// lifetime of its needle. #[derive(Clone, Debug)] pub struct Finder<'a>(memmem::Finder<'a>); impl<'a> Finder<'a> { /// Create a new finder for the given needle. #[inline] pub fn new>(needle: &'a B) -> Finder<'a> { Finder(memmem::Finder::new(needle.as_ref())) } /// Convert this finder into its owned variant, such that it no longer /// borrows the needle. /// /// If this is already an owned finder, then this is a no-op. Otherwise, /// this copies the needle. /// /// This is only available when the `std` feature is enabled. #[cfg(feature = "std")] #[inline] pub fn into_owned(self) -> Finder<'static> { Finder(self.0.into_owned()) } /// Returns the needle that this finder searches for. /// /// Note that the lifetime of the needle returned is tied to the lifetime /// of the finder, and may be shorter than the `'a` lifetime. Namely, a /// finder's needle can be either borrowed or owned, so the lifetime of the /// needle returned must necessarily be the shorter of the two. #[inline] pub fn needle(&self) -> &[u8] { self.0.needle() } /// Returns the index of the first occurrence of this needle in the given /// haystack. /// /// The haystack may be any type that can be cheaply converted into a /// `&[u8]`. This includes, but is not limited to, `&str` and `&[u8]`. /// /// # Complexity /// /// This routine is guaranteed to have worst case linear time complexity /// with respect to both the needle and the haystack. That is, this runs /// in `O(needle.len() + haystack.len())` time. /// /// This routine is also guaranteed to have worst case constant space /// complexity. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::Finder; /// /// let haystack = "foo bar baz"; /// assert_eq!(Some(0), Finder::new("foo").find(haystack)); /// assert_eq!(Some(4), Finder::new("bar").find(haystack)); /// assert_eq!(None, Finder::new("quux").find(haystack)); /// ``` #[inline] pub fn find>(&self, haystack: B) -> Option { self.0.find(haystack.as_ref()) } } /// A single substring reverse searcher fixed to a particular needle. /// /// The purpose of this type is to permit callers to construct a substring /// searcher that can be used to search haystacks without the overhead of /// constructing the searcher in the first place. This is a somewhat niche /// concern when it's necessary to re-use the same needle to search multiple /// different haystacks with as little overhead as possible. In general, using /// [`ByteSlice::rfind`](trait.ByteSlice.html#method.rfind) /// or /// [`ByteSlice::rfind_iter`](trait.ByteSlice.html#method.rfind_iter) /// is good enough, but `FinderReverse` is useful when you can meaningfully /// observe searcher construction time in a profile. /// /// When the `std` feature is enabled, then this type has an `into_owned` /// version which permits building a `FinderReverse` that is not connected to /// the lifetime of its needle. #[derive(Clone, Debug)] pub struct FinderReverse<'a>(memmem::FinderRev<'a>); impl<'a> FinderReverse<'a> { /// Create a new reverse finder for the given needle. #[inline] pub fn new>(needle: &'a B) -> FinderReverse<'a> { FinderReverse(memmem::FinderRev::new(needle.as_ref())) } /// Convert this finder into its owned variant, such that it no longer /// borrows the needle. /// /// If this is already an owned finder, then this is a no-op. Otherwise, /// this copies the needle. /// /// This is only available when the `std` feature is enabled. #[cfg(feature = "std")] #[inline] pub fn into_owned(self) -> FinderReverse<'static> { FinderReverse(self.0.into_owned()) } /// Returns the needle that this finder searches for. /// /// Note that the lifetime of the needle returned is tied to the lifetime /// of this finder, and may be shorter than the `'a` lifetime. Namely, /// a finder's needle can be either borrowed or owned, so the lifetime of /// the needle returned must necessarily be the shorter of the two. #[inline] pub fn needle(&self) -> &[u8] { self.0.needle() } /// Returns the index of the last occurrence of this needle in the given /// haystack. /// /// The haystack may be any type that can be cheaply converted into a /// `&[u8]`. This includes, but is not limited to, `&str` and `&[u8]`. /// /// # Complexity /// /// This routine is guaranteed to have worst case linear time complexity /// with respect to both the needle and the haystack. That is, this runs /// in `O(needle.len() + haystack.len())` time. /// /// This routine is also guaranteed to have worst case constant space /// complexity. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::FinderReverse; /// /// let haystack = "foo bar baz"; /// assert_eq!(Some(0), FinderReverse::new("foo").rfind(haystack)); /// assert_eq!(Some(4), FinderReverse::new("bar").rfind(haystack)); /// assert_eq!(None, FinderReverse::new("quux").rfind(haystack)); /// ``` #[inline] pub fn rfind>(&self, haystack: B) -> Option { self.0.rfind(haystack.as_ref()) } } /// An iterator over non-overlapping substring matches. /// /// Matches are reported by the byte offset at which they begin. /// /// `'a` is the shorter of two lifetimes: the byte string being searched or the /// byte string being looked for. #[derive(Debug)] pub struct Find<'a> { it: memmem::FindIter<'a, 'a>, haystack: &'a [u8], needle: &'a [u8], } impl<'a> Find<'a> { fn new(haystack: &'a [u8], needle: &'a [u8]) -> Find<'a> { Find { it: memmem::find_iter(haystack, needle), haystack, needle } } } impl<'a> Iterator for Find<'a> { type Item = usize; #[inline] fn next(&mut self) -> Option { self.it.next() } } /// An iterator over non-overlapping substring matches in reverse. /// /// Matches are reported by the byte offset at which they begin. /// /// `'a` is the shorter of two lifetimes: the byte string being searched or the /// byte string being looked for. #[derive(Debug)] pub struct FindReverse<'a> { it: memmem::FindRevIter<'a, 'a>, haystack: &'a [u8], needle: &'a [u8], } impl<'a> FindReverse<'a> { fn new(haystack: &'a [u8], needle: &'a [u8]) -> FindReverse<'a> { FindReverse { it: memmem::rfind_iter(haystack, needle), haystack, needle, } } fn haystack(&self) -> &'a [u8] { self.haystack } fn needle(&self) -> &[u8] { self.needle } } impl<'a> Iterator for FindReverse<'a> { type Item = usize; #[inline] fn next(&mut self) -> Option { self.it.next() } } /// An iterator over the bytes in a byte string. /// /// `'a` is the lifetime of the byte string being traversed. #[derive(Clone, Debug)] pub struct Bytes<'a> { it: slice::Iter<'a, u8>, } impl<'a> Bytes<'a> { /// Views the remaining underlying data as a subslice of the original data. /// This has the same lifetime as the original slice, /// and so the iterator can continue to be used while this exists. #[inline] pub fn as_slice(&self) -> &'a [u8] { self.it.as_slice() } } impl<'a> Iterator for Bytes<'a> { type Item = u8; #[inline] fn next(&mut self) -> Option { self.it.next().map(|&b| b) } #[inline] fn size_hint(&self) -> (usize, Option) { self.it.size_hint() } } impl<'a> DoubleEndedIterator for Bytes<'a> { #[inline] fn next_back(&mut self) -> Option { self.it.next_back().map(|&b| b) } } impl<'a> ExactSizeIterator for Bytes<'a> { #[inline] fn len(&self) -> usize { self.it.len() } } impl<'a> iter::FusedIterator for Bytes<'a> {} /// An iterator over the fields in a byte string, separated by whitespace. /// /// This iterator splits on contiguous runs of whitespace, such that the fields /// in `foo\t\t\n \nbar` are `foo` and `bar`. /// /// `'a` is the lifetime of the byte string being split. #[derive(Debug)] pub struct Fields<'a> { it: FieldsWith<'a, fn(char) -> bool>, } impl<'a> Fields<'a> { fn new(bytes: &'a [u8]) -> Fields<'a> { Fields { it: bytes.fields_with(|ch| ch.is_whitespace()) } } } impl<'a> Iterator for Fields<'a> { type Item = &'a [u8]; #[inline] fn next(&mut self) -> Option<&'a [u8]> { self.it.next() } } /// An iterator over fields in the byte string, separated by a predicate over /// codepoints. /// /// This iterator splits a byte string based on its predicate function such /// that the elements returned are separated by contiguous runs of codepoints /// for which the predicate returns true. /// /// `'a` is the lifetime of the byte string being split, while `F` is the type /// of the predicate, i.e., `FnMut(char) -> bool`. #[derive(Debug)] pub struct FieldsWith<'a, F> { f: F, bytes: &'a [u8], chars: CharIndices<'a>, } impl<'a, F: FnMut(char) -> bool> FieldsWith<'a, F> { fn new(bytes: &'a [u8], f: F) -> FieldsWith<'a, F> { FieldsWith { f, bytes, chars: bytes.char_indices() } } } impl<'a, F: FnMut(char) -> bool> Iterator for FieldsWith<'a, F> { type Item = &'a [u8]; #[inline] fn next(&mut self) -> Option<&'a [u8]> { let (start, mut end); loop { match self.chars.next() { None => return None, Some((s, e, ch)) => { if !(self.f)(ch) { start = s; end = e; break; } } } } while let Some((_, e, ch)) = self.chars.next() { if (self.f)(ch) { break; } end = e; } Some(&self.bytes[start..end]) } } /// An iterator over substrings in a byte string, split by a separator. /// /// `'a` is the lifetime of the byte string being split. #[derive(Debug)] pub struct Split<'a> { finder: Find<'a>, /// The end position of the previous match of our splitter. The element /// we yield corresponds to the substring starting at `last` up to the /// beginning of the next match of the splitter. last: usize, /// Only set when iteration is complete. A corner case here is when a /// splitter is matched at the end of the haystack. At that point, we still /// need to yield an empty string following it. done: bool, } impl<'a> Split<'a> { fn new(haystack: &'a [u8], splitter: &'a [u8]) -> Split<'a> { let finder = haystack.find_iter(splitter); Split { finder, last: 0, done: false } } } impl<'a> Iterator for Split<'a> { type Item = &'a [u8]; #[inline] fn next(&mut self) -> Option<&'a [u8]> { let haystack = self.finder.haystack; match self.finder.next() { Some(start) => { let next = &haystack[self.last..start]; self.last = start + self.finder.needle.len(); Some(next) } None => { if self.last >= haystack.len() { if !self.done { self.done = true; Some(b"") } else { None } } else { let s = &haystack[self.last..]; self.last = haystack.len(); self.done = true; Some(s) } } } } } /// An iterator over substrings in a byte string, split by a separator, in /// reverse. /// /// `'a` is the lifetime of the byte string being split, while `F` is the type /// of the predicate, i.e., `FnMut(char) -> bool`. #[derive(Debug)] pub struct SplitReverse<'a> { finder: FindReverse<'a>, /// The end position of the previous match of our splitter. The element /// we yield corresponds to the substring starting at `last` up to the /// beginning of the next match of the splitter. last: usize, /// Only set when iteration is complete. A corner case here is when a /// splitter is matched at the end of the haystack. At that point, we still /// need to yield an empty string following it. done: bool, } impl<'a> SplitReverse<'a> { fn new(haystack: &'a [u8], splitter: &'a [u8]) -> SplitReverse<'a> { let finder = haystack.rfind_iter(splitter); SplitReverse { finder, last: haystack.len(), done: false } } } impl<'a> Iterator for SplitReverse<'a> { type Item = &'a [u8]; #[inline] fn next(&mut self) -> Option<&'a [u8]> { let haystack = self.finder.haystack(); match self.finder.next() { Some(start) => { let nlen = self.finder.needle().len(); let next = &haystack[start + nlen..self.last]; self.last = start; Some(next) } None => { if self.last == 0 { if !self.done { self.done = true; Some(b"") } else { None } } else { let s = &haystack[..self.last]; self.last = 0; self.done = true; Some(s) } } } } } /// An iterator over at most `n` substrings in a byte string, split by a /// separator. /// /// `'a` is the lifetime of the byte string being split, while `F` is the type /// of the predicate, i.e., `FnMut(char) -> bool`. #[derive(Debug)] pub struct SplitN<'a> { split: Split<'a>, limit: usize, count: usize, } impl<'a> SplitN<'a> { fn new( haystack: &'a [u8], splitter: &'a [u8], limit: usize, ) -> SplitN<'a> { let split = haystack.split_str(splitter); SplitN { split, limit, count: 0 } } } impl<'a> Iterator for SplitN<'a> { type Item = &'a [u8]; #[inline] fn next(&mut self) -> Option<&'a [u8]> { self.count += 1; if self.count > self.limit || self.split.done { None } else if self.count == self.limit { Some(&self.split.finder.haystack[self.split.last..]) } else { self.split.next() } } } /// An iterator over at most `n` substrings in a byte string, split by a /// separator, in reverse. /// /// `'a` is the lifetime of the byte string being split, while `F` is the type /// of the predicate, i.e., `FnMut(char) -> bool`. #[derive(Debug)] pub struct SplitNReverse<'a> { split: SplitReverse<'a>, limit: usize, count: usize, } impl<'a> SplitNReverse<'a> { fn new( haystack: &'a [u8], splitter: &'a [u8], limit: usize, ) -> SplitNReverse<'a> { let split = haystack.rsplit_str(splitter); SplitNReverse { split, limit, count: 0 } } } impl<'a> Iterator for SplitNReverse<'a> { type Item = &'a [u8]; #[inline] fn next(&mut self) -> Option<&'a [u8]> { self.count += 1; if self.count > self.limit || self.split.done { None } else if self.count == self.limit { Some(&self.split.finder.haystack()[..self.split.last]) } else { self.split.next() } } } /// An iterator over all lines in a byte string, without their terminators. /// /// For this iterator, the only line terminators recognized are `\r\n` and /// `\n`. /// /// `'a` is the lifetime of the byte string being iterated over. pub struct Lines<'a> { it: LinesWithTerminator<'a>, } impl<'a> Lines<'a> { fn new(bytes: &'a [u8]) -> Lines<'a> { Lines { it: LinesWithTerminator::new(bytes) } } } impl<'a> Iterator for Lines<'a> { type Item = &'a [u8]; #[inline] fn next(&mut self) -> Option<&'a [u8]> { let mut line = self.it.next()?; if line.last_byte() == Some(b'\n') { line = &line[..line.len() - 1]; if line.last_byte() == Some(b'\r') { line = &line[..line.len() - 1]; } } Some(line) } } /// An iterator over all lines in a byte string, including their terminators. /// /// For this iterator, the only line terminator recognized is `\n`. (Since /// line terminators are included, this also handles `\r\n` line endings.) /// /// Line terminators are only included if they are present in the original /// byte string. For example, the last line in a byte string may not end with /// a line terminator. /// /// Concatenating all elements yielded by this iterator is guaranteed to yield /// the original byte string. /// /// `'a` is the lifetime of the byte string being iterated over. pub struct LinesWithTerminator<'a> { bytes: &'a [u8], } impl<'a> LinesWithTerminator<'a> { fn new(bytes: &'a [u8]) -> LinesWithTerminator<'a> { LinesWithTerminator { bytes } } } impl<'a> Iterator for LinesWithTerminator<'a> { type Item = &'a [u8]; #[inline] fn next(&mut self) -> Option<&'a [u8]> { match self.bytes.find_byte(b'\n') { None if self.bytes.is_empty() => None, None => { let line = self.bytes; self.bytes = b""; Some(line) } Some(end) => { let line = &self.bytes[..end + 1]; self.bytes = &self.bytes[end + 1..]; Some(line) } } } } #[cfg(test)] mod tests { use crate::ext_slice::{ByteSlice, B}; use crate::tests::LOSSY_TESTS; #[test] fn to_str_lossy() { for (i, &(expected, input)) in LOSSY_TESTS.iter().enumerate() { let got = B(input).to_str_lossy(); assert_eq!( expected.as_bytes(), got.as_bytes(), "to_str_lossy(ith: {:?}, given: {:?})", i, input, ); let mut got = String::new(); B(input).to_str_lossy_into(&mut got); assert_eq!( expected.as_bytes(), got.as_bytes(), "to_str_lossy_into", ); let got = String::from_utf8_lossy(input); assert_eq!(expected.as_bytes(), got.as_bytes(), "std"); } } #[test] #[should_panic] fn copy_within_fail1() { let mut buf = *b"foobar"; let s = &mut buf; s.copy_within_str(0..2, 5); } #[test] #[should_panic] fn copy_within_fail2() { let mut buf = *b"foobar"; let s = &mut buf; s.copy_within_str(3..2, 0); } #[test] #[should_panic] fn copy_within_fail3() { let mut buf = *b"foobar"; let s = &mut buf; s.copy_within_str(5..7, 0); } #[test] #[should_panic] fn copy_within_fail4() { let mut buf = *b"foobar"; let s = &mut buf; s.copy_within_str(0..1, 6); } } bstr-0.2.17/src/ext_vec.rs000064400000000000000000000755470072674642500135310ustar 00000000000000use std::borrow::Cow; use std::error; use std::ffi::{OsStr, OsString}; use std::fmt; use std::iter; use std::ops; use std::path::{Path, PathBuf}; use std::ptr; use std::str; use std::vec; use crate::ext_slice::ByteSlice; use crate::utf8::{self, Utf8Error}; /// Concatenate the elements given by the iterator together into a single /// `Vec`. /// /// The elements may be any type that can be cheaply converted into an `&[u8]`. /// This includes, but is not limited to, `&str`, `&BStr` and `&[u8]` itself. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr; /// /// let s = bstr::concat(&["foo", "bar", "baz"]); /// assert_eq!(s, "foobarbaz".as_bytes()); /// ``` #[inline] pub fn concat(elements: I) -> Vec where T: AsRef<[u8]>, I: IntoIterator, { let mut dest = vec![]; for element in elements { dest.push_str(element); } dest } /// Join the elements given by the iterator with the given separator into a /// single `Vec`. /// /// Both the separator and the elements may be any type that can be cheaply /// converted into an `&[u8]`. This includes, but is not limited to, /// `&str`, `&BStr` and `&[u8]` itself. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr; /// /// let s = bstr::join(",", &["foo", "bar", "baz"]); /// assert_eq!(s, "foo,bar,baz".as_bytes()); /// ``` #[inline] pub fn join(separator: B, elements: I) -> Vec where B: AsRef<[u8]>, T: AsRef<[u8]>, I: IntoIterator, { let mut it = elements.into_iter(); let mut dest = vec![]; match it.next() { None => return dest, Some(first) => { dest.push_str(first); } } for element in it { dest.push_str(&separator); dest.push_str(element); } dest } impl ByteVec for Vec { #[inline] fn as_vec(&self) -> &Vec { self } #[inline] fn as_vec_mut(&mut self) -> &mut Vec { self } #[inline] fn into_vec(self) -> Vec { self } } /// Ensure that callers cannot implement `ByteSlice` by making an /// umplementable trait its super trait. pub trait Sealed {} impl Sealed for Vec {} /// A trait that extends `Vec` with string oriented methods. /// /// Note that when using the constructor methods, such as /// `ByteVec::from_slice`, one should actually call them using the concrete /// type. For example: /// /// ``` /// use bstr::{B, ByteVec}; /// /// let s = Vec::from_slice(b"abc"); // NOT ByteVec::from_slice("...") /// assert_eq!(s, B("abc")); /// ``` pub trait ByteVec: Sealed { /// A method for accessing the raw vector bytes of this type. This is /// always a no-op and callers shouldn't care about it. This only exists /// for making the extension trait work. #[doc(hidden)] fn as_vec(&self) -> &Vec; /// A method for accessing the raw vector bytes of this type, mutably. This /// is always a no-op and callers shouldn't care about it. This only exists /// for making the extension trait work. #[doc(hidden)] fn as_vec_mut(&mut self) -> &mut Vec; /// A method for consuming ownership of this vector. This is always a no-op /// and callers shouldn't care about it. This only exists for making the /// extension trait work. #[doc(hidden)] fn into_vec(self) -> Vec where Self: Sized; /// Create a new owned byte string from the given byte slice. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::{B, ByteVec}; /// /// let s = Vec::from_slice(b"abc"); /// assert_eq!(s, B("abc")); /// ``` #[inline] fn from_slice>(bytes: B) -> Vec { bytes.as_ref().to_vec() } /// Create a new byte string from an owned OS string. /// /// On Unix, this always succeeds and is zero cost. On non-Unix systems, /// this returns the original OS string if it is not valid UTF-8. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::ffi::OsString; /// /// use bstr::{B, ByteVec}; /// /// let os_str = OsString::from("foo"); /// let bs = Vec::from_os_string(os_str).expect("valid UTF-8"); /// assert_eq!(bs, B("foo")); /// ``` #[inline] fn from_os_string(os_str: OsString) -> Result, OsString> { #[cfg(unix)] #[inline] fn imp(os_str: OsString) -> Result, OsString> { use std::os::unix::ffi::OsStringExt; Ok(Vec::from(os_str.into_vec())) } #[cfg(not(unix))] #[inline] fn imp(os_str: OsString) -> Result, OsString> { os_str.into_string().map(Vec::from) } imp(os_str) } /// Lossily create a new byte string from an OS string slice. /// /// On Unix, this always succeeds, is zero cost and always returns a slice. /// On non-Unix systems, this does a UTF-8 check. If the given OS string /// slice is not valid UTF-8, then it is lossily decoded into valid UTF-8 /// (with invalid bytes replaced by the Unicode replacement codepoint). /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::ffi::OsStr; /// /// use bstr::{B, ByteVec}; /// /// let os_str = OsStr::new("foo"); /// let bs = Vec::from_os_str_lossy(os_str); /// assert_eq!(bs, B("foo")); /// ``` #[inline] fn from_os_str_lossy<'a>(os_str: &'a OsStr) -> Cow<'a, [u8]> { #[cfg(unix)] #[inline] fn imp<'a>(os_str: &'a OsStr) -> Cow<'a, [u8]> { use std::os::unix::ffi::OsStrExt; Cow::Borrowed(os_str.as_bytes()) } #[cfg(not(unix))] #[inline] fn imp<'a>(os_str: &'a OsStr) -> Cow<'a, [u8]> { match os_str.to_string_lossy() { Cow::Borrowed(x) => Cow::Borrowed(x.as_bytes()), Cow::Owned(x) => Cow::Owned(Vec::from(x)), } } imp(os_str) } /// Create a new byte string from an owned file path. /// /// On Unix, this always succeeds and is zero cost. On non-Unix systems, /// this returns the original path if it is not valid UTF-8. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::path::PathBuf; /// /// use bstr::{B, ByteVec}; /// /// let path = PathBuf::from("foo"); /// let bs = Vec::from_path_buf(path).expect("must be valid UTF-8"); /// assert_eq!(bs, B("foo")); /// ``` #[inline] fn from_path_buf(path: PathBuf) -> Result, PathBuf> { Vec::from_os_string(path.into_os_string()).map_err(PathBuf::from) } /// Lossily create a new byte string from a file path. /// /// On Unix, this always succeeds, is zero cost and always returns a slice. /// On non-Unix systems, this does a UTF-8 check. If the given path is not /// valid UTF-8, then it is lossily decoded into valid UTF-8 (with invalid /// bytes replaced by the Unicode replacement codepoint). /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::path::Path; /// /// use bstr::{B, ByteVec}; /// /// let path = Path::new("foo"); /// let bs = Vec::from_path_lossy(path); /// assert_eq!(bs, B("foo")); /// ``` #[inline] fn from_path_lossy<'a>(path: &'a Path) -> Cow<'a, [u8]> { Vec::from_os_str_lossy(path.as_os_str()) } /// Appends the given byte to the end of this byte string. /// /// Note that this is equivalent to the generic `Vec::push` method. This /// method is provided to permit callers to explicitly differentiate /// between pushing bytes, codepoints and strings. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteVec; /// /// let mut s = >::from("abc"); /// s.push_byte(b'\xE2'); /// s.push_byte(b'\x98'); /// s.push_byte(b'\x83'); /// assert_eq!(s, "abc☃".as_bytes()); /// ``` #[inline] fn push_byte(&mut self, byte: u8) { self.as_vec_mut().push(byte); } /// Appends the given `char` to the end of this byte string. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteVec; /// /// let mut s = >::from("abc"); /// s.push_char('1'); /// s.push_char('2'); /// s.push_char('3'); /// assert_eq!(s, "abc123".as_bytes()); /// ``` #[inline] fn push_char(&mut self, ch: char) { if ch.len_utf8() == 1 { self.push_byte(ch as u8); return; } self.as_vec_mut() .extend_from_slice(ch.encode_utf8(&mut [0; 4]).as_bytes()); } /// Appends the given slice to the end of this byte string. This accepts /// any type that be converted to a `&[u8]`. This includes, but is not /// limited to, `&str`, `&BStr`, and of course, `&[u8]` itself. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteVec; /// /// let mut s = >::from("abc"); /// s.push_str(b"123"); /// assert_eq!(s, "abc123".as_bytes()); /// ``` #[inline] fn push_str>(&mut self, bytes: B) { self.as_vec_mut().extend_from_slice(bytes.as_ref()); } /// Converts a `Vec` into a `String` if and only if this byte string is /// valid UTF-8. /// /// If it is not valid UTF-8, then a /// [`FromUtf8Error`](struct.FromUtf8Error.html) /// is returned. (This error can be used to examine why UTF-8 validation /// failed, or to regain the original byte string.) /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteVec; /// /// # fn example() -> Result<(), Box> { /// let bytes = Vec::from("hello"); /// let string = bytes.into_string()?; /// /// assert_eq!("hello", string); /// # Ok(()) }; example().unwrap() /// ``` /// /// If this byte string is not valid UTF-8, then an error will be returned. /// That error can then be used to inspect the location at which invalid /// UTF-8 was found, or to regain the original byte string: /// /// ``` /// use bstr::{B, ByteVec}; /// /// let bytes = Vec::from_slice(b"foo\xFFbar"); /// let err = bytes.into_string().unwrap_err(); /// /// assert_eq!(err.utf8_error().valid_up_to(), 3); /// assert_eq!(err.utf8_error().error_len(), Some(1)); /// /// // At no point in this example is an allocation performed. /// let bytes = Vec::from(err.into_vec()); /// assert_eq!(bytes, B(b"foo\xFFbar")); /// ``` #[inline] fn into_string(self) -> Result where Self: Sized, { match utf8::validate(self.as_vec()) { Err(err) => Err(FromUtf8Error { original: self.into_vec(), err }), Ok(()) => { // SAFETY: This is safe because of the guarantees provided by // utf8::validate. unsafe { Ok(self.into_string_unchecked()) } } } } /// Lossily converts a `Vec` into a `String`. If this byte string /// contains invalid UTF-8, then the invalid bytes are replaced with the /// Unicode replacement codepoint. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteVec; /// /// let bytes = Vec::from_slice(b"foo\xFFbar"); /// let string = bytes.into_string_lossy(); /// assert_eq!(string, "foo\u{FFFD}bar"); /// ``` #[inline] fn into_string_lossy(self) -> String where Self: Sized, { match self.as_vec().to_str_lossy() { Cow::Borrowed(_) => { // SAFETY: to_str_lossy() returning a Cow::Borrowed guarantees // the entire string is valid utf8. unsafe { self.into_string_unchecked() } } Cow::Owned(s) => s, } } /// Unsafely convert this byte string into a `String`, without checking for /// valid UTF-8. /// /// # Safety /// /// Callers *must* ensure that this byte string is valid UTF-8 before /// calling this method. Converting a byte string into a `String` that is /// not valid UTF-8 is considered undefined behavior. /// /// This routine is useful in performance sensitive contexts where the /// UTF-8 validity of the byte string is already known and it is /// undesirable to pay the cost of an additional UTF-8 validation check /// that [`into_string`](#method.into_string) performs. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteVec; /// /// // SAFETY: This is safe because string literals are guaranteed to be /// // valid UTF-8 by the Rust compiler. /// let s = unsafe { Vec::from("☃βツ").into_string_unchecked() }; /// assert_eq!("☃βツ", s); /// ``` #[inline] unsafe fn into_string_unchecked(self) -> String where Self: Sized, { String::from_utf8_unchecked(self.into_vec()) } /// Converts this byte string into an OS string, in place. /// /// On Unix, this always succeeds and is zero cost. On non-Unix systems, /// this returns the original byte string if it is not valid UTF-8. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::ffi::OsStr; /// /// use bstr::ByteVec; /// /// let bs = Vec::from("foo"); /// let os_str = bs.into_os_string().expect("should be valid UTF-8"); /// assert_eq!(os_str, OsStr::new("foo")); /// ``` #[inline] fn into_os_string(self) -> Result> where Self: Sized, { #[cfg(unix)] #[inline] fn imp(v: Vec) -> Result> { use std::os::unix::ffi::OsStringExt; Ok(OsString::from_vec(v)) } #[cfg(not(unix))] #[inline] fn imp(v: Vec) -> Result> { match v.into_string() { Ok(s) => Ok(OsString::from(s)), Err(err) => Err(err.into_vec()), } } imp(self.into_vec()) } /// Lossily converts this byte string into an OS string, in place. /// /// On Unix, this always succeeds and is zero cost. On non-Unix systems, /// this will perform a UTF-8 check and lossily convert this byte string /// into valid UTF-8 using the Unicode replacement codepoint. /// /// Note that this can prevent the correct roundtripping of file paths on /// non-Unix systems such as Windows, where file paths are an arbitrary /// sequence of 16-bit integers. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteVec; /// /// let bs = Vec::from_slice(b"foo\xFFbar"); /// let os_str = bs.into_os_string_lossy(); /// assert_eq!(os_str.to_string_lossy(), "foo\u{FFFD}bar"); /// ``` #[inline] fn into_os_string_lossy(self) -> OsString where Self: Sized, { #[cfg(unix)] #[inline] fn imp(v: Vec) -> OsString { use std::os::unix::ffi::OsStringExt; OsString::from_vec(v) } #[cfg(not(unix))] #[inline] fn imp(v: Vec) -> OsString { OsString::from(v.into_string_lossy()) } imp(self.into_vec()) } /// Converts this byte string into an owned file path, in place. /// /// On Unix, this always succeeds and is zero cost. On non-Unix systems, /// this returns the original byte string if it is not valid UTF-8. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteVec; /// /// let bs = Vec::from("foo"); /// let path = bs.into_path_buf().expect("should be valid UTF-8"); /// assert_eq!(path.as_os_str(), "foo"); /// ``` #[inline] fn into_path_buf(self) -> Result> where Self: Sized, { self.into_os_string().map(PathBuf::from) } /// Lossily converts this byte string into an owned file path, in place. /// /// On Unix, this always succeeds and is zero cost. On non-Unix systems, /// this will perform a UTF-8 check and lossily convert this byte string /// into valid UTF-8 using the Unicode replacement codepoint. /// /// Note that this can prevent the correct roundtripping of file paths on /// non-Unix systems such as Windows, where file paths are an arbitrary /// sequence of 16-bit integers. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteVec; /// /// let bs = Vec::from_slice(b"foo\xFFbar"); /// let path = bs.into_path_buf_lossy(); /// assert_eq!(path.to_string_lossy(), "foo\u{FFFD}bar"); /// ``` #[inline] fn into_path_buf_lossy(self) -> PathBuf where Self: Sized, { PathBuf::from(self.into_os_string_lossy()) } /// Removes the last byte from this `Vec` and returns it. /// /// If this byte string is empty, then `None` is returned. /// /// If the last codepoint in this byte string is not ASCII, then removing /// the last byte could make this byte string contain invalid UTF-8. /// /// Note that this is equivalent to the generic `Vec::pop` method. This /// method is provided to permit callers to explicitly differentiate /// between popping bytes and codepoints. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteVec; /// /// let mut s = Vec::from("foo"); /// assert_eq!(s.pop_byte(), Some(b'o')); /// assert_eq!(s.pop_byte(), Some(b'o')); /// assert_eq!(s.pop_byte(), Some(b'f')); /// assert_eq!(s.pop_byte(), None); /// ``` #[inline] fn pop_byte(&mut self) -> Option { self.as_vec_mut().pop() } /// Removes the last codepoint from this `Vec` and returns it. /// /// If this byte string is empty, then `None` is returned. If the last /// bytes of this byte string do not correspond to a valid UTF-8 code unit /// sequence, then the Unicode replacement codepoint is yielded instead in /// accordance with the /// [replacement codepoint substitution policy](index.html#handling-of-invalid-utf8-8). /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteVec; /// /// let mut s = Vec::from("foo"); /// assert_eq!(s.pop_char(), Some('o')); /// assert_eq!(s.pop_char(), Some('o')); /// assert_eq!(s.pop_char(), Some('f')); /// assert_eq!(s.pop_char(), None); /// ``` /// /// This shows the replacement codepoint substitution policy. Note that /// the first pop yields a replacement codepoint but actually removes two /// bytes. This is in contrast with subsequent pops when encountering /// `\xFF` since `\xFF` is never a valid prefix for any valid UTF-8 /// code unit sequence. /// /// ``` /// use bstr::ByteVec; /// /// let mut s = Vec::from_slice(b"f\xFF\xFF\xFFoo\xE2\x98"); /// assert_eq!(s.pop_char(), Some('\u{FFFD}')); /// assert_eq!(s.pop_char(), Some('o')); /// assert_eq!(s.pop_char(), Some('o')); /// assert_eq!(s.pop_char(), Some('\u{FFFD}')); /// assert_eq!(s.pop_char(), Some('\u{FFFD}')); /// assert_eq!(s.pop_char(), Some('\u{FFFD}')); /// assert_eq!(s.pop_char(), Some('f')); /// assert_eq!(s.pop_char(), None); /// ``` #[inline] fn pop_char(&mut self) -> Option { let (ch, size) = utf8::decode_last_lossy(self.as_vec()); if size == 0 { return None; } let new_len = self.as_vec().len() - size; self.as_vec_mut().truncate(new_len); Some(ch) } /// Removes a `char` from this `Vec` at the given byte position and /// returns it. /// /// If the bytes at the given position do not lead to a valid UTF-8 code /// unit sequence, then a /// [replacement codepoint is returned instead](index.html#handling-of-invalid-utf8-8). /// /// # Panics /// /// Panics if `at` is larger than or equal to this byte string's length. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteVec; /// /// let mut s = Vec::from("foo☃bar"); /// assert_eq!(s.remove_char(3), '☃'); /// assert_eq!(s, b"foobar"); /// ``` /// /// This example shows how the Unicode replacement codepoint policy is /// used: /// /// ``` /// use bstr::ByteVec; /// /// let mut s = Vec::from_slice(b"foo\xFFbar"); /// assert_eq!(s.remove_char(3), '\u{FFFD}'); /// assert_eq!(s, b"foobar"); /// ``` #[inline] fn remove_char(&mut self, at: usize) -> char { let (ch, size) = utf8::decode_lossy(&self.as_vec()[at..]); assert!( size > 0, "expected {} to be less than {}", at, self.as_vec().len(), ); self.as_vec_mut().drain(at..at + size); ch } /// Inserts the given codepoint into this `Vec` at a particular byte /// position. /// /// This is an `O(n)` operation as it may copy a number of elements in this /// byte string proportional to its length. /// /// # Panics /// /// Panics if `at` is larger than the byte string's length. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteVec; /// /// let mut s = Vec::from("foobar"); /// s.insert_char(3, '☃'); /// assert_eq!(s, "foo☃bar".as_bytes()); /// ``` #[inline] fn insert_char(&mut self, at: usize, ch: char) { self.insert_str(at, ch.encode_utf8(&mut [0; 4]).as_bytes()); } /// Inserts the given byte string into this byte string at a particular /// byte position. /// /// This is an `O(n)` operation as it may copy a number of elements in this /// byte string proportional to its length. /// /// The given byte string may be any type that can be cheaply converted /// into a `&[u8]`. This includes, but is not limited to, `&str` and /// `&[u8]`. /// /// # Panics /// /// Panics if `at` is larger than the byte string's length. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteVec; /// /// let mut s = Vec::from("foobar"); /// s.insert_str(3, "☃☃☃"); /// assert_eq!(s, "foo☃☃☃bar".as_bytes()); /// ``` #[inline] fn insert_str>(&mut self, at: usize, bytes: B) { let bytes = bytes.as_ref(); let len = self.as_vec().len(); assert!(at <= len, "expected {} to be <= {}", at, len); // SAFETY: We'd like to efficiently splice in the given bytes into // this byte string. Since we are only working with `u8` elements here, // we only need to consider whether our bounds are correct and whether // our byte string has enough space. self.as_vec_mut().reserve(bytes.len()); unsafe { // Shift bytes after `at` over by the length of `bytes` to make // room for it. This requires referencing two regions of memory // that may overlap, so we use ptr::copy. ptr::copy( self.as_vec().as_ptr().add(at), self.as_vec_mut().as_mut_ptr().add(at + bytes.len()), len - at, ); // Now copy the bytes given into the room we made above. In this // case, we know that the given bytes cannot possibly overlap // with this byte string since we have a mutable borrow of the // latter. Thus, we can use a nonoverlapping copy. ptr::copy_nonoverlapping( bytes.as_ptr(), self.as_vec_mut().as_mut_ptr().add(at), bytes.len(), ); self.as_vec_mut().set_len(len + bytes.len()); } } /// Removes the specified range in this byte string and replaces it with /// the given bytes. The given bytes do not need to have the same length /// as the range provided. /// /// # Panics /// /// Panics if the given range is invalid. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteVec; /// /// let mut s = Vec::from("foobar"); /// s.replace_range(2..4, "xxxxx"); /// assert_eq!(s, "foxxxxxar".as_bytes()); /// ``` #[inline] fn replace_range(&mut self, range: R, replace_with: B) where R: ops::RangeBounds, B: AsRef<[u8]>, { self.as_vec_mut().splice(range, replace_with.as_ref().iter().cloned()); } /// Creates a draining iterator that removes the specified range in this /// `Vec` and yields each of the removed bytes. /// /// Note that the elements specified by the given range are removed /// regardless of whether the returned iterator is fully exhausted. /// /// Also note that is is unspecified how many bytes are removed from the /// `Vec` if the `DrainBytes` iterator is leaked. /// /// # Panics /// /// Panics if the given range is not valid. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteVec; /// /// let mut s = Vec::from("foobar"); /// { /// let mut drainer = s.drain_bytes(2..4); /// assert_eq!(drainer.next(), Some(b'o')); /// assert_eq!(drainer.next(), Some(b'b')); /// assert_eq!(drainer.next(), None); /// } /// assert_eq!(s, "foar".as_bytes()); /// ``` #[inline] fn drain_bytes(&mut self, range: R) -> DrainBytes<'_> where R: ops::RangeBounds, { DrainBytes { it: self.as_vec_mut().drain(range) } } } /// A draining byte oriented iterator for `Vec`. /// /// This iterator is created by /// [`ByteVec::drain_bytes`](trait.ByteVec.html#method.drain_bytes). /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::ByteVec; /// /// let mut s = Vec::from("foobar"); /// { /// let mut drainer = s.drain_bytes(2..4); /// assert_eq!(drainer.next(), Some(b'o')); /// assert_eq!(drainer.next(), Some(b'b')); /// assert_eq!(drainer.next(), None); /// } /// assert_eq!(s, "foar".as_bytes()); /// ``` #[derive(Debug)] pub struct DrainBytes<'a> { it: vec::Drain<'a, u8>, } impl<'a> iter::FusedIterator for DrainBytes<'a> {} impl<'a> Iterator for DrainBytes<'a> { type Item = u8; #[inline] fn next(&mut self) -> Option { self.it.next() } } impl<'a> DoubleEndedIterator for DrainBytes<'a> { #[inline] fn next_back(&mut self) -> Option { self.it.next_back() } } impl<'a> ExactSizeIterator for DrainBytes<'a> { #[inline] fn len(&self) -> usize { self.it.len() } } /// An error that may occur when converting a `Vec` to a `String`. /// /// This error includes the original `Vec` that failed to convert to a /// `String`. This permits callers to recover the allocation used even if it /// it not valid UTF-8. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::{B, ByteVec}; /// /// let bytes = Vec::from_slice(b"foo\xFFbar"); /// let err = bytes.into_string().unwrap_err(); /// /// assert_eq!(err.utf8_error().valid_up_to(), 3); /// assert_eq!(err.utf8_error().error_len(), Some(1)); /// /// // At no point in this example is an allocation performed. /// let bytes = Vec::from(err.into_vec()); /// assert_eq!(bytes, B(b"foo\xFFbar")); /// ``` #[derive(Debug, Eq, PartialEq)] pub struct FromUtf8Error { original: Vec, err: Utf8Error, } impl FromUtf8Error { /// Return the original bytes as a slice that failed to convert to a /// `String`. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::{B, ByteVec}; /// /// let bytes = Vec::from_slice(b"foo\xFFbar"); /// let err = bytes.into_string().unwrap_err(); /// /// // At no point in this example is an allocation performed. /// assert_eq!(err.as_bytes(), B(b"foo\xFFbar")); /// ``` #[inline] pub fn as_bytes(&self) -> &[u8] { &self.original } /// Consume this error and return the original byte string that failed to /// convert to a `String`. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::{B, ByteVec}; /// /// let bytes = Vec::from_slice(b"foo\xFFbar"); /// let err = bytes.into_string().unwrap_err(); /// let original = err.into_vec(); /// /// // At no point in this example is an allocation performed. /// assert_eq!(original, B(b"foo\xFFbar")); /// ``` #[inline] pub fn into_vec(self) -> Vec { self.original } /// Return the underlying UTF-8 error that occurred. This error provides /// information on the nature and location of the invalid UTF-8 detected. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::{B, ByteVec}; /// /// let bytes = Vec::from_slice(b"foo\xFFbar"); /// let err = bytes.into_string().unwrap_err(); /// /// assert_eq!(err.utf8_error().valid_up_to(), 3); /// assert_eq!(err.utf8_error().error_len(), Some(1)); /// ``` #[inline] pub fn utf8_error(&self) -> &Utf8Error { &self.err } } impl error::Error for FromUtf8Error { #[inline] fn description(&self) -> &str { "invalid UTF-8 vector" } } impl fmt::Display for FromUtf8Error { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.err) } } #[cfg(test)] mod tests { use crate::ext_vec::ByteVec; #[test] fn insert() { let mut s = vec![]; s.insert_str(0, "foo"); assert_eq!(s, "foo".as_bytes()); let mut s = Vec::from("a"); s.insert_str(0, "foo"); assert_eq!(s, "fooa".as_bytes()); let mut s = Vec::from("a"); s.insert_str(1, "foo"); assert_eq!(s, "afoo".as_bytes()); let mut s = Vec::from("foobar"); s.insert_str(3, "quux"); assert_eq!(s, "fooquuxbar".as_bytes()); let mut s = Vec::from("foobar"); s.insert_str(3, "x"); assert_eq!(s, "fooxbar".as_bytes()); let mut s = Vec::from("foobar"); s.insert_str(0, "x"); assert_eq!(s, "xfoobar".as_bytes()); let mut s = Vec::from("foobar"); s.insert_str(6, "x"); assert_eq!(s, "foobarx".as_bytes()); let mut s = Vec::from("foobar"); s.insert_str(3, "quuxbazquux"); assert_eq!(s, "fooquuxbazquuxbar".as_bytes()); } #[test] #[should_panic] fn insert_fail1() { let mut s = vec![]; s.insert_str(1, "foo"); } #[test] #[should_panic] fn insert_fail2() { let mut s = Vec::from("a"); s.insert_str(2, "foo"); } #[test] #[should_panic] fn insert_fail3() { let mut s = Vec::from("foobar"); s.insert_str(7, "foo"); } } bstr-0.2.17/src/impls.rs000064400000000000000000000645420072674642500132110ustar 00000000000000macro_rules! impl_partial_eq { ($lhs:ty, $rhs:ty) => { impl<'a, 'b> PartialEq<$rhs> for $lhs { #[inline] fn eq(&self, other: &$rhs) -> bool { let other: &[u8] = other.as_ref(); PartialEq::eq(self.as_bytes(), other) } } impl<'a, 'b> PartialEq<$lhs> for $rhs { #[inline] fn eq(&self, other: &$lhs) -> bool { let this: &[u8] = self.as_ref(); PartialEq::eq(this, other.as_bytes()) } } }; } #[cfg(feature = "std")] macro_rules! impl_partial_eq_cow { ($lhs:ty, $rhs:ty) => { impl<'a, 'b> PartialEq<$rhs> for $lhs { #[inline] fn eq(&self, other: &$rhs) -> bool { let other: &[u8] = (&**other).as_ref(); PartialEq::eq(self.as_bytes(), other) } } impl<'a, 'b> PartialEq<$lhs> for $rhs { #[inline] fn eq(&self, other: &$lhs) -> bool { let this: &[u8] = (&**other).as_ref(); PartialEq::eq(this, self.as_bytes()) } } }; } macro_rules! impl_partial_ord { ($lhs:ty, $rhs:ty) => { impl<'a, 'b> PartialOrd<$rhs> for $lhs { #[inline] fn partial_cmp(&self, other: &$rhs) -> Option { let other: &[u8] = other.as_ref(); PartialOrd::partial_cmp(self.as_bytes(), other) } } impl<'a, 'b> PartialOrd<$lhs> for $rhs { #[inline] fn partial_cmp(&self, other: &$lhs) -> Option { let this: &[u8] = self.as_ref(); PartialOrd::partial_cmp(this, other.as_bytes()) } } }; } #[cfg(feature = "std")] mod bstring { use std::borrow::{Borrow, Cow, ToOwned}; use std::cmp::Ordering; use std::fmt; use std::iter::FromIterator; use std::ops; use crate::bstr::BStr; use crate::bstring::BString; use crate::ext_vec::ByteVec; impl fmt::Display for BString { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Display::fmt(self.as_bstr(), f) } } impl fmt::Debug for BString { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Debug::fmt(self.as_bstr(), f) } } impl ops::Deref for BString { type Target = Vec; #[inline] fn deref(&self) -> &Vec { &self.bytes } } impl ops::DerefMut for BString { #[inline] fn deref_mut(&mut self) -> &mut Vec { &mut self.bytes } } impl AsRef<[u8]> for BString { #[inline] fn as_ref(&self) -> &[u8] { &self.bytes } } impl AsRef for BString { #[inline] fn as_ref(&self) -> &BStr { self.as_bstr() } } impl AsMut<[u8]> for BString { #[inline] fn as_mut(&mut self) -> &mut [u8] { &mut self.bytes } } impl AsMut for BString { #[inline] fn as_mut(&mut self) -> &mut BStr { self.as_mut_bstr() } } impl Borrow for BString { #[inline] fn borrow(&self) -> &BStr { self.as_bstr() } } impl ToOwned for BStr { type Owned = BString; #[inline] fn to_owned(&self) -> BString { BString::from(self) } } impl Default for BString { fn default() -> BString { BString::from(vec![]) } } impl<'a> From<&'a [u8]> for BString { #[inline] fn from(s: &'a [u8]) -> BString { BString::from(s.to_vec()) } } impl From> for BString { #[inline] fn from(s: Vec) -> BString { BString { bytes: s } } } impl From for Vec { #[inline] fn from(s: BString) -> Vec { s.bytes } } impl<'a> From<&'a str> for BString { #[inline] fn from(s: &'a str) -> BString { BString::from(s.as_bytes().to_vec()) } } impl From for BString { #[inline] fn from(s: String) -> BString { BString::from(s.into_bytes()) } } impl<'a> From<&'a BStr> for BString { #[inline] fn from(s: &'a BStr) -> BString { BString::from(s.bytes.to_vec()) } } impl<'a> From for Cow<'a, BStr> { #[inline] fn from(s: BString) -> Cow<'a, BStr> { Cow::Owned(s) } } impl FromIterator for BString { #[inline] fn from_iter>(iter: T) -> BString { BString::from(iter.into_iter().collect::()) } } impl FromIterator for BString { #[inline] fn from_iter>(iter: T) -> BString { BString::from(iter.into_iter().collect::>()) } } impl<'a> FromIterator<&'a str> for BString { #[inline] fn from_iter>(iter: T) -> BString { let mut buf = vec![]; for b in iter { buf.push_str(b); } BString::from(buf) } } impl<'a> FromIterator<&'a [u8]> for BString { #[inline] fn from_iter>(iter: T) -> BString { let mut buf = vec![]; for b in iter { buf.push_str(b); } BString::from(buf) } } impl<'a> FromIterator<&'a BStr> for BString { #[inline] fn from_iter>(iter: T) -> BString { let mut buf = vec![]; for b in iter { buf.push_str(b); } BString::from(buf) } } impl FromIterator for BString { #[inline] fn from_iter>(iter: T) -> BString { let mut buf = vec![]; for b in iter { buf.push_str(b); } BString::from(buf) } } impl Eq for BString {} impl PartialEq for BString { #[inline] fn eq(&self, other: &BString) -> bool { &self[..] == &other[..] } } impl_partial_eq!(BString, Vec); impl_partial_eq!(BString, [u8]); impl_partial_eq!(BString, &'a [u8]); impl_partial_eq!(BString, String); impl_partial_eq!(BString, str); impl_partial_eq!(BString, &'a str); impl_partial_eq!(BString, BStr); impl_partial_eq!(BString, &'a BStr); impl PartialOrd for BString { #[inline] fn partial_cmp(&self, other: &BString) -> Option { PartialOrd::partial_cmp(&self.bytes, &other.bytes) } } impl Ord for BString { #[inline] fn cmp(&self, other: &BString) -> Ordering { self.partial_cmp(other).unwrap() } } impl_partial_ord!(BString, Vec); impl_partial_ord!(BString, [u8]); impl_partial_ord!(BString, &'a [u8]); impl_partial_ord!(BString, String); impl_partial_ord!(BString, str); impl_partial_ord!(BString, &'a str); impl_partial_ord!(BString, BStr); impl_partial_ord!(BString, &'a BStr); } mod bstr { #[cfg(feature = "std")] use std::borrow::Cow; use core::cmp::Ordering; use core::fmt; use core::ops; use crate::bstr::BStr; use crate::ext_slice::ByteSlice; impl fmt::Display for BStr { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { /// Write the given bstr (lossily) to the given formatter. fn write_bstr( f: &mut fmt::Formatter<'_>, bstr: &BStr, ) -> Result<(), fmt::Error> { for chunk in bstr.utf8_chunks() { f.write_str(chunk.valid())?; if !chunk.invalid().is_empty() { f.write_str("\u{FFFD}")?; } } Ok(()) } /// Write 'num' fill characters to the given formatter. fn write_pads( f: &mut fmt::Formatter<'_>, num: usize, ) -> fmt::Result { let fill = f.fill(); for _ in 0..num { f.write_fmt(format_args!("{}", fill))?; } Ok(()) } if let Some(align) = f.align() { let width = f.width().unwrap_or(0); let nchars = self.chars().count(); let remaining_pads = width.saturating_sub(nchars); match align { fmt::Alignment::Left => { write_bstr(f, self)?; write_pads(f, remaining_pads)?; } fmt::Alignment::Right => { write_pads(f, remaining_pads)?; write_bstr(f, self)?; } fmt::Alignment::Center => { let half = remaining_pads / 2; let second_half = if remaining_pads % 2 == 0 { half } else { half + 1 }; write_pads(f, half)?; write_bstr(f, self)?; write_pads(f, second_half)?; } } Ok(()) } else { write_bstr(f, self)?; Ok(()) } } } impl fmt::Debug for BStr { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "\"")?; for (s, e, ch) in self.char_indices() { match ch { '\0' => write!(f, "\\0")?, '\u{FFFD}' => { let bytes = self[s..e].as_bytes(); if bytes == b"\xEF\xBF\xBD" { write!(f, "{}", ch.escape_debug())?; } else { for &b in self[s..e].as_bytes() { write!(f, r"\x{:02X}", b)?; } } } // ASCII control characters except \0, \n, \r, \t '\x01'..='\x08' | '\x0b' | '\x0c' | '\x0e'..='\x19' | '\x7f' => { write!(f, "\\x{:02x}", ch as u32)?; } '\n' | '\r' | '\t' | _ => { write!(f, "{}", ch.escape_debug())?; } } } write!(f, "\"")?; Ok(()) } } impl ops::Deref for BStr { type Target = [u8]; #[inline] fn deref(&self) -> &[u8] { &self.bytes } } impl ops::DerefMut for BStr { #[inline] fn deref_mut(&mut self) -> &mut [u8] { &mut self.bytes } } impl ops::Index for BStr { type Output = u8; #[inline] fn index(&self, idx: usize) -> &u8 { &self.as_bytes()[idx] } } impl ops::Index for BStr { type Output = BStr; #[inline] fn index(&self, _: ops::RangeFull) -> &BStr { self } } impl ops::Index> for BStr { type Output = BStr; #[inline] fn index(&self, r: ops::Range) -> &BStr { BStr::new(&self.as_bytes()[r.start..r.end]) } } impl ops::Index> for BStr { type Output = BStr; #[inline] fn index(&self, r: ops::RangeInclusive) -> &BStr { BStr::new(&self.as_bytes()[*r.start()..=*r.end()]) } } impl ops::Index> for BStr { type Output = BStr; #[inline] fn index(&self, r: ops::RangeFrom) -> &BStr { BStr::new(&self.as_bytes()[r.start..]) } } impl ops::Index> for BStr { type Output = BStr; #[inline] fn index(&self, r: ops::RangeTo) -> &BStr { BStr::new(&self.as_bytes()[..r.end]) } } impl ops::Index> for BStr { type Output = BStr; #[inline] fn index(&self, r: ops::RangeToInclusive) -> &BStr { BStr::new(&self.as_bytes()[..=r.end]) } } impl ops::IndexMut for BStr { #[inline] fn index_mut(&mut self, idx: usize) -> &mut u8 { &mut self.bytes[idx] } } impl ops::IndexMut for BStr { #[inline] fn index_mut(&mut self, _: ops::RangeFull) -> &mut BStr { self } } impl ops::IndexMut> for BStr { #[inline] fn index_mut(&mut self, r: ops::Range) -> &mut BStr { BStr::from_bytes_mut(&mut self.bytes[r.start..r.end]) } } impl ops::IndexMut> for BStr { #[inline] fn index_mut(&mut self, r: ops::RangeInclusive) -> &mut BStr { BStr::from_bytes_mut(&mut self.bytes[*r.start()..=*r.end()]) } } impl ops::IndexMut> for BStr { #[inline] fn index_mut(&mut self, r: ops::RangeFrom) -> &mut BStr { BStr::from_bytes_mut(&mut self.bytes[r.start..]) } } impl ops::IndexMut> for BStr { #[inline] fn index_mut(&mut self, r: ops::RangeTo) -> &mut BStr { BStr::from_bytes_mut(&mut self.bytes[..r.end]) } } impl ops::IndexMut> for BStr { #[inline] fn index_mut(&mut self, r: ops::RangeToInclusive) -> &mut BStr { BStr::from_bytes_mut(&mut self.bytes[..=r.end]) } } impl AsRef<[u8]> for BStr { #[inline] fn as_ref(&self) -> &[u8] { self.as_bytes() } } impl AsRef for [u8] { #[inline] fn as_ref(&self) -> &BStr { BStr::new(self) } } impl AsRef for str { #[inline] fn as_ref(&self) -> &BStr { BStr::new(self) } } impl AsMut<[u8]> for BStr { #[inline] fn as_mut(&mut self) -> &mut [u8] { &mut self.bytes } } impl AsMut for [u8] { #[inline] fn as_mut(&mut self) -> &mut BStr { BStr::new_mut(self) } } impl<'a> Default for &'a BStr { fn default() -> &'a BStr { BStr::from_bytes(b"") } } impl<'a> Default for &'a mut BStr { fn default() -> &'a mut BStr { BStr::from_bytes_mut(&mut []) } } impl<'a> From<&'a [u8]> for &'a BStr { #[inline] fn from(s: &'a [u8]) -> &'a BStr { BStr::from_bytes(s) } } impl<'a> From<&'a str> for &'a BStr { #[inline] fn from(s: &'a str) -> &'a BStr { BStr::from_bytes(s.as_bytes()) } } #[cfg(feature = "std")] impl<'a> From<&'a BStr> for Cow<'a, BStr> { #[inline] fn from(s: &'a BStr) -> Cow<'a, BStr> { Cow::Borrowed(s) } } #[cfg(feature = "std")] impl From> for Box { #[inline] fn from(s: Box<[u8]>) -> Box { BStr::from_boxed_bytes(s) } } #[cfg(feature = "std")] impl From> for Box<[u8]> { #[inline] fn from(s: Box) -> Box<[u8]> { BStr::into_boxed_bytes(s) } } impl Eq for BStr {} impl PartialEq for BStr { #[inline] fn eq(&self, other: &BStr) -> bool { self.as_bytes() == other.as_bytes() } } impl_partial_eq!(BStr, [u8]); impl_partial_eq!(BStr, &'a [u8]); impl_partial_eq!(BStr, str); impl_partial_eq!(BStr, &'a str); #[cfg(feature = "std")] impl_partial_eq!(BStr, Vec); #[cfg(feature = "std")] impl_partial_eq!(&'a BStr, Vec); #[cfg(feature = "std")] impl_partial_eq!(BStr, String); #[cfg(feature = "std")] impl_partial_eq!(&'a BStr, String); #[cfg(feature = "std")] impl_partial_eq_cow!(&'a BStr, Cow<'a, BStr>); #[cfg(feature = "std")] impl_partial_eq_cow!(&'a BStr, Cow<'a, str>); #[cfg(feature = "std")] impl_partial_eq_cow!(&'a BStr, Cow<'a, [u8]>); impl PartialOrd for BStr { #[inline] fn partial_cmp(&self, other: &BStr) -> Option { PartialOrd::partial_cmp(self.as_bytes(), other.as_bytes()) } } impl Ord for BStr { #[inline] fn cmp(&self, other: &BStr) -> Ordering { self.partial_cmp(other).unwrap() } } impl_partial_ord!(BStr, [u8]); impl_partial_ord!(BStr, &'a [u8]); impl_partial_ord!(BStr, str); impl_partial_ord!(BStr, &'a str); #[cfg(feature = "std")] impl_partial_ord!(BStr, Vec); #[cfg(feature = "std")] impl_partial_ord!(&'a BStr, Vec); #[cfg(feature = "std")] impl_partial_ord!(BStr, String); #[cfg(feature = "std")] impl_partial_ord!(&'a BStr, String); } #[cfg(feature = "serde1-nostd")] mod bstr_serde { use core::fmt; use serde::{ de::Error, de::Visitor, Deserialize, Deserializer, Serialize, Serializer, }; use crate::bstr::BStr; impl Serialize for BStr { #[inline] fn serialize(&self, serializer: S) -> Result where S: Serializer, { serializer.serialize_bytes(self.as_bytes()) } } impl<'a, 'de: 'a> Deserialize<'de> for &'a BStr { #[inline] fn deserialize(deserializer: D) -> Result<&'a BStr, D::Error> where D: Deserializer<'de>, { struct BStrVisitor; impl<'de> Visitor<'de> for BStrVisitor { type Value = &'de BStr; fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str("a borrowed byte string") } #[inline] fn visit_borrowed_bytes( self, value: &'de [u8], ) -> Result<&'de BStr, E> { Ok(BStr::new(value)) } #[inline] fn visit_borrowed_str( self, value: &'de str, ) -> Result<&'de BStr, E> { Ok(BStr::new(value)) } } deserializer.deserialize_bytes(BStrVisitor) } } } #[cfg(feature = "serde1")] mod bstring_serde { use std::cmp; use std::fmt; use serde::{ de::Error, de::SeqAccess, de::Visitor, Deserialize, Deserializer, Serialize, Serializer, }; use crate::bstring::BString; impl Serialize for BString { #[inline] fn serialize(&self, serializer: S) -> Result where S: Serializer, { serializer.serialize_bytes(self.as_bytes()) } } impl<'de> Deserialize<'de> for BString { #[inline] fn deserialize(deserializer: D) -> Result where D: Deserializer<'de>, { struct BStringVisitor; impl<'de> Visitor<'de> for BStringVisitor { type Value = BString; fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str("a byte string") } #[inline] fn visit_seq>( self, mut visitor: V, ) -> Result { let len = cmp::min(visitor.size_hint().unwrap_or(0), 256); let mut bytes = Vec::with_capacity(len); while let Some(v) = visitor.next_element()? { bytes.push(v); } Ok(BString::from(bytes)) } #[inline] fn visit_bytes( self, value: &[u8], ) -> Result { Ok(BString::from(value)) } #[inline] fn visit_byte_buf( self, value: Vec, ) -> Result { Ok(BString::from(value)) } #[inline] fn visit_str( self, value: &str, ) -> Result { Ok(BString::from(value)) } #[inline] fn visit_string( self, value: String, ) -> Result { Ok(BString::from(value)) } } deserializer.deserialize_byte_buf(BStringVisitor) } } } #[cfg(test)] mod display { use crate::bstring::BString; use crate::ByteSlice; #[test] fn clean() { assert_eq!(&format!("{}", &b"abc".as_bstr()), "abc"); assert_eq!(&format!("{}", &b"\xf0\x28\x8c\xbc".as_bstr()), "�(��"); } #[test] fn width_bigger_than_bstr() { assert_eq!(&format!("{:<7}!", &b"abc".as_bstr()), "abc !"); assert_eq!(&format!("{:>7}!", &b"abc".as_bstr()), " abc!"); assert_eq!(&format!("{:^7}!", &b"abc".as_bstr()), " abc !"); assert_eq!(&format!("{:^6}!", &b"abc".as_bstr()), " abc !"); assert_eq!(&format!("{:-<7}!", &b"abc".as_bstr()), "abc----!"); assert_eq!(&format!("{:->7}!", &b"abc".as_bstr()), "----abc!"); assert_eq!(&format!("{:-^7}!", &b"abc".as_bstr()), "--abc--!"); assert_eq!(&format!("{:-^6}!", &b"abc".as_bstr()), "-abc--!"); assert_eq!( &format!("{:<7}!", &b"\xf0\x28\x8c\xbc".as_bstr()), "�(�� !" ); assert_eq!( &format!("{:>7}!", &b"\xf0\x28\x8c\xbc".as_bstr()), " �(��!" ); assert_eq!( &format!("{:^7}!", &b"\xf0\x28\x8c\xbc".as_bstr()), " �(�� !" ); assert_eq!( &format!("{:^6}!", &b"\xf0\x28\x8c\xbc".as_bstr()), " �(�� !" ); assert_eq!( &format!("{:-<7}!", &b"\xf0\x28\x8c\xbc".as_bstr()), "�(��---!" ); assert_eq!( &format!("{:->7}!", &b"\xf0\x28\x8c\xbc".as_bstr()), "---�(��!" ); assert_eq!( &format!("{:-^7}!", &b"\xf0\x28\x8c\xbc".as_bstr()), "-�(��--!" ); assert_eq!( &format!("{:-^6}!", &b"\xf0\x28\x8c\xbc".as_bstr()), "-�(��-!" ); } #[test] fn width_lesser_than_bstr() { assert_eq!(&format!("{:<2}!", &b"abc".as_bstr()), "abc!"); assert_eq!(&format!("{:>2}!", &b"abc".as_bstr()), "abc!"); assert_eq!(&format!("{:^2}!", &b"abc".as_bstr()), "abc!"); assert_eq!(&format!("{:-<2}!", &b"abc".as_bstr()), "abc!"); assert_eq!(&format!("{:->2}!", &b"abc".as_bstr()), "abc!"); assert_eq!(&format!("{:-^2}!", &b"abc".as_bstr()), "abc!"); assert_eq!( &format!("{:<3}!", &b"\xf0\x28\x8c\xbc".as_bstr()), "�(��!" ); assert_eq!( &format!("{:>3}!", &b"\xf0\x28\x8c\xbc".as_bstr()), "�(��!" ); assert_eq!( &format!("{:^3}!", &b"\xf0\x28\x8c\xbc".as_bstr()), "�(��!" ); assert_eq!( &format!("{:^2}!", &b"\xf0\x28\x8c\xbc".as_bstr()), "�(��!" ); assert_eq!( &format!("{:-<3}!", &b"\xf0\x28\x8c\xbc".as_bstr()), "�(��!" ); assert_eq!( &format!("{:->3}!", &b"\xf0\x28\x8c\xbc".as_bstr()), "�(��!" ); assert_eq!( &format!("{:-^3}!", &b"\xf0\x28\x8c\xbc".as_bstr()), "�(��!" ); assert_eq!( &format!("{:-^2}!", &b"\xf0\x28\x8c\xbc".as_bstr()), "�(��!" ); } quickcheck::quickcheck! { fn total_length(bstr: BString) -> bool { let size = bstr.chars().count(); format!("{:<1$}", bstr.as_bstr(), size).chars().count() >= size } } } #[cfg(test)] mod bstring_arbitrary { use crate::bstring::BString; use quickcheck::{Arbitrary, Gen}; impl Arbitrary for BString { fn arbitrary(g: &mut Gen) -> BString { BString::from(Vec::::arbitrary(g)) } fn shrink(&self) -> Box> { Box::new(self.bytes.shrink().map(BString::from)) } } } #[test] fn test_debug() { use crate::{ByteSlice, B}; assert_eq!( r#""\0\0\0 ftypisom\0\0\x02\0isomiso2avc1mp""#, format!("{:?}", b"\0\0\0 ftypisom\0\0\x02\0isomiso2avc1mp".as_bstr()), ); // Tests that if the underlying bytes contain the UTF-8 encoding of the // replacement codepoint, then we emit the codepoint just like other // non-printable Unicode characters. assert_eq!( b"\"\\xFF\xEF\xBF\xBD\\xFF\"".as_bstr(), // Before fixing #72, the output here would be: // \\xFF\\xEF\\xBF\\xBD\\xFF B(&format!("{:?}", b"\xFF\xEF\xBF\xBD\xFF".as_bstr())).as_bstr(), ); } // See: https://github.com/BurntSushi/bstr/issues/82 #[test] fn test_cows_regression() { use crate::ByteSlice; use std::borrow::Cow; let c1 = Cow::from(b"hello bstr".as_bstr()); let c2 = b"goodbye bstr".as_bstr(); assert_ne!(c1, c2); let c3 = Cow::from("hello str"); let c4 = "goodbye str"; assert_ne!(c3, c4); } bstr-0.2.17/src/io.rs000064400000000000000000000407710072674642500124720ustar 00000000000000/*! Utilities for working with I/O using byte strings. This module currently only exports a single trait, `BufReadExt`, which provides facilities for conveniently and efficiently working with lines as byte strings. More APIs may be added in the future. */ use std::io; use crate::ext_slice::ByteSlice; use crate::ext_vec::ByteVec; /// An extention trait for /// [`std::io::BufRead`](https://doc.rust-lang.org/std/io/trait.BufRead.html) /// which provides convenience APIs for dealing with byte strings. pub trait BufReadExt: io::BufRead { /// Returns an iterator over the lines of this reader, where each line /// is represented as a byte string. /// /// Each item yielded by this iterator is a `io::Result>`, where /// an error is yielded if there was a problem reading from the underlying /// reader. /// /// On success, the next line in the iterator is returned. The line does /// *not* contain a trailing `\n` or `\r\n`. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::io; /// /// use bstr::io::BufReadExt; /// /// # fn example() -> Result<(), io::Error> { /// let cursor = io::Cursor::new(b"lorem\nipsum\r\ndolor"); /// /// let mut lines = vec![]; /// for result in cursor.byte_lines() { /// let line = result?; /// lines.push(line); /// } /// assert_eq!(lines.len(), 3); /// assert_eq!(lines[0], "lorem".as_bytes()); /// assert_eq!(lines[1], "ipsum".as_bytes()); /// assert_eq!(lines[2], "dolor".as_bytes()); /// # Ok(()) }; example().unwrap() /// ``` fn byte_lines(self) -> ByteLines where Self: Sized, { ByteLines { buf: self } } /// Returns an iterator over byte-terminated records of this reader, where /// each record is represented as a byte string. /// /// Each item yielded by this iterator is a `io::Result>`, where /// an error is yielded if there was a problem reading from the underlying /// reader. /// /// On success, the next record in the iterator is returned. The record /// does *not* contain its trailing terminator. /// /// Note that calling `byte_records(b'\n')` differs from `byte_lines()` in /// that it has no special handling for `\r`. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::io; /// /// use bstr::io::BufReadExt; /// /// # fn example() -> Result<(), io::Error> { /// let cursor = io::Cursor::new(b"lorem\x00ipsum\x00dolor"); /// /// let mut records = vec![]; /// for result in cursor.byte_records(b'\x00') { /// let record = result?; /// records.push(record); /// } /// assert_eq!(records.len(), 3); /// assert_eq!(records[0], "lorem".as_bytes()); /// assert_eq!(records[1], "ipsum".as_bytes()); /// assert_eq!(records[2], "dolor".as_bytes()); /// # Ok(()) }; example().unwrap() /// ``` fn byte_records(self, terminator: u8) -> ByteRecords where Self: Sized, { ByteRecords { terminator, buf: self } } /// Executes the given closure on each line in the underlying reader. /// /// If the closure returns an error (or if the underlying reader returns an /// error), then iteration is stopped and the error is returned. If false /// is returned, then iteration is stopped and no error is returned. /// /// The closure given is called on exactly the same values as yielded by /// the [`byte_lines`](trait.BufReadExt.html#method.byte_lines) /// iterator. Namely, lines do _not_ contain trailing `\n` or `\r\n` bytes. /// /// This routine is useful for iterating over lines as quickly as /// possible. Namely, a single allocation is reused for each line. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::io; /// /// use bstr::io::BufReadExt; /// /// # fn example() -> Result<(), io::Error> { /// let cursor = io::Cursor::new(b"lorem\nipsum\r\ndolor"); /// /// let mut lines = vec![]; /// cursor.for_byte_line(|line| { /// lines.push(line.to_vec()); /// Ok(true) /// })?; /// assert_eq!(lines.len(), 3); /// assert_eq!(lines[0], "lorem".as_bytes()); /// assert_eq!(lines[1], "ipsum".as_bytes()); /// assert_eq!(lines[2], "dolor".as_bytes()); /// # Ok(()) }; example().unwrap() /// ``` fn for_byte_line(self, mut for_each_line: F) -> io::Result<()> where Self: Sized, F: FnMut(&[u8]) -> io::Result, { self.for_byte_line_with_terminator(|line| { for_each_line(&trim_line_slice(&line)) }) } /// Executes the given closure on each byte-terminated record in the /// underlying reader. /// /// If the closure returns an error (or if the underlying reader returns an /// error), then iteration is stopped and the error is returned. If false /// is returned, then iteration is stopped and no error is returned. /// /// The closure given is called on exactly the same values as yielded by /// the [`byte_records`](trait.BufReadExt.html#method.byte_records) /// iterator. Namely, records do _not_ contain a trailing terminator byte. /// /// This routine is useful for iterating over records as quickly as /// possible. Namely, a single allocation is reused for each record. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::io; /// /// use bstr::io::BufReadExt; /// /// # fn example() -> Result<(), io::Error> { /// let cursor = io::Cursor::new(b"lorem\x00ipsum\x00dolor"); /// /// let mut records = vec![]; /// cursor.for_byte_record(b'\x00', |record| { /// records.push(record.to_vec()); /// Ok(true) /// })?; /// assert_eq!(records.len(), 3); /// assert_eq!(records[0], "lorem".as_bytes()); /// assert_eq!(records[1], "ipsum".as_bytes()); /// assert_eq!(records[2], "dolor".as_bytes()); /// # Ok(()) }; example().unwrap() /// ``` fn for_byte_record( self, terminator: u8, mut for_each_record: F, ) -> io::Result<()> where Self: Sized, F: FnMut(&[u8]) -> io::Result, { self.for_byte_record_with_terminator(terminator, |chunk| { for_each_record(&trim_record_slice(&chunk, terminator)) }) } /// Executes the given closure on each line in the underlying reader. /// /// If the closure returns an error (or if the underlying reader returns an /// error), then iteration is stopped and the error is returned. If false /// is returned, then iteration is stopped and no error is returned. /// /// Unlike /// [`for_byte_line`](trait.BufReadExt.html#method.for_byte_line), /// the lines given to the closure *do* include the line terminator, if one /// exists. /// /// This routine is useful for iterating over lines as quickly as /// possible. Namely, a single allocation is reused for each line. /// /// This is identical to `for_byte_record_with_terminator` with a /// terminator of `\n`. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::io; /// /// use bstr::io::BufReadExt; /// /// # fn example() -> Result<(), io::Error> { /// let cursor = io::Cursor::new(b"lorem\nipsum\r\ndolor"); /// /// let mut lines = vec![]; /// cursor.for_byte_line_with_terminator(|line| { /// lines.push(line.to_vec()); /// Ok(true) /// })?; /// assert_eq!(lines.len(), 3); /// assert_eq!(lines[0], "lorem\n".as_bytes()); /// assert_eq!(lines[1], "ipsum\r\n".as_bytes()); /// assert_eq!(lines[2], "dolor".as_bytes()); /// # Ok(()) }; example().unwrap() /// ``` fn for_byte_line_with_terminator( self, for_each_line: F, ) -> io::Result<()> where Self: Sized, F: FnMut(&[u8]) -> io::Result, { self.for_byte_record_with_terminator(b'\n', for_each_line) } /// Executes the given closure on each byte-terminated record in the /// underlying reader. /// /// If the closure returns an error (or if the underlying reader returns an /// error), then iteration is stopped and the error is returned. If false /// is returned, then iteration is stopped and no error is returned. /// /// Unlike /// [`for_byte_record`](trait.BufReadExt.html#method.for_byte_record), /// the lines given to the closure *do* include the record terminator, if /// one exists. /// /// This routine is useful for iterating over records as quickly as /// possible. Namely, a single allocation is reused for each record. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::io; /// /// use bstr::B; /// use bstr::io::BufReadExt; /// /// # fn example() -> Result<(), io::Error> { /// let cursor = io::Cursor::new(b"lorem\x00ipsum\x00dolor"); /// /// let mut records = vec![]; /// cursor.for_byte_record_with_terminator(b'\x00', |record| { /// records.push(record.to_vec()); /// Ok(true) /// })?; /// assert_eq!(records.len(), 3); /// assert_eq!(records[0], B(b"lorem\x00")); /// assert_eq!(records[1], B("ipsum\x00")); /// assert_eq!(records[2], B("dolor")); /// # Ok(()) }; example().unwrap() /// ``` fn for_byte_record_with_terminator( mut self, terminator: u8, mut for_each_record: F, ) -> io::Result<()> where Self: Sized, F: FnMut(&[u8]) -> io::Result, { let mut bytes = vec![]; let mut res = Ok(()); let mut consumed = 0; 'outer: loop { // Lend out complete record slices from our buffer { let mut buf = self.fill_buf()?; while let Some(index) = buf.find_byte(terminator) { let (record, rest) = buf.split_at(index + 1); buf = rest; consumed += record.len(); match for_each_record(&record) { Ok(false) => break 'outer, Err(err) => { res = Err(err); break 'outer; } _ => (), } } // Copy the final record fragment to our local buffer. This // saves read_until() from re-scanning a buffer we know // contains no remaining terminators. bytes.extend_from_slice(&buf); consumed += buf.len(); } self.consume(consumed); consumed = 0; // N.B. read_until uses a different version of memchr that may // be slower than the memchr crate that bstr uses. However, this // should only run for a fairly small number of records, assuming a // decent buffer size. self.read_until(terminator, &mut bytes)?; if bytes.is_empty() || !for_each_record(&bytes)? { break; } bytes.clear(); } self.consume(consumed); res } } impl BufReadExt for B {} /// An iterator over lines from an instance of /// [`std::io::BufRead`](https://doc.rust-lang.org/std/io/trait.BufRead.html). /// /// This iterator is generally created by calling the /// [`byte_lines`](trait.BufReadExt.html#method.byte_lines) /// method on the /// [`BufReadExt`](trait.BufReadExt.html) /// trait. #[derive(Debug)] pub struct ByteLines { buf: B, } /// An iterator over records from an instance of /// [`std::io::BufRead`](https://doc.rust-lang.org/std/io/trait.BufRead.html). /// /// A byte record is any sequence of bytes terminated by a particular byte /// chosen by the caller. For example, NUL separated byte strings are said to /// be NUL-terminated byte records. /// /// This iterator is generally created by calling the /// [`byte_records`](trait.BufReadExt.html#method.byte_records) /// method on the /// [`BufReadExt`](trait.BufReadExt.html) /// trait. #[derive(Debug)] pub struct ByteRecords { buf: B, terminator: u8, } impl Iterator for ByteLines { type Item = io::Result>; fn next(&mut self) -> Option>> { let mut bytes = vec![]; match self.buf.read_until(b'\n', &mut bytes) { Err(e) => Some(Err(e)), Ok(0) => None, Ok(_) => { trim_line(&mut bytes); Some(Ok(bytes)) } } } } impl Iterator for ByteRecords { type Item = io::Result>; fn next(&mut self) -> Option>> { let mut bytes = vec![]; match self.buf.read_until(self.terminator, &mut bytes) { Err(e) => Some(Err(e)), Ok(0) => None, Ok(_) => { trim_record(&mut bytes, self.terminator); Some(Ok(bytes)) } } } } fn trim_line(line: &mut Vec) { if line.last_byte() == Some(b'\n') { line.pop_byte(); if line.last_byte() == Some(b'\r') { line.pop_byte(); } } } fn trim_line_slice(mut line: &[u8]) -> &[u8] { if line.last_byte() == Some(b'\n') { line = &line[..line.len() - 1]; if line.last_byte() == Some(b'\r') { line = &line[..line.len() - 1]; } } line } fn trim_record(record: &mut Vec, terminator: u8) { if record.last_byte() == Some(terminator) { record.pop_byte(); } } fn trim_record_slice(mut record: &[u8], terminator: u8) -> &[u8] { if record.last_byte() == Some(terminator) { record = &record[..record.len() - 1]; } record } #[cfg(test)] mod tests { use super::BufReadExt; use crate::bstring::BString; fn collect_lines>(slice: B) -> Vec { let mut lines = vec![]; slice .as_ref() .for_byte_line(|line| { lines.push(BString::from(line.to_vec())); Ok(true) }) .unwrap(); lines } fn collect_lines_term>(slice: B) -> Vec { let mut lines = vec![]; slice .as_ref() .for_byte_line_with_terminator(|line| { lines.push(BString::from(line.to_vec())); Ok(true) }) .unwrap(); lines } #[test] fn lines_without_terminator() { assert_eq!(collect_lines(""), Vec::::new()); assert_eq!(collect_lines("\n"), vec![""]); assert_eq!(collect_lines("\n\n"), vec!["", ""]); assert_eq!(collect_lines("a\nb\n"), vec!["a", "b"]); assert_eq!(collect_lines("a\nb"), vec!["a", "b"]); assert_eq!(collect_lines("abc\nxyz\n"), vec!["abc", "xyz"]); assert_eq!(collect_lines("abc\nxyz"), vec!["abc", "xyz"]); assert_eq!(collect_lines("\r\n"), vec![""]); assert_eq!(collect_lines("\r\n\r\n"), vec!["", ""]); assert_eq!(collect_lines("a\r\nb\r\n"), vec!["a", "b"]); assert_eq!(collect_lines("a\r\nb"), vec!["a", "b"]); assert_eq!(collect_lines("abc\r\nxyz\r\n"), vec!["abc", "xyz"]); assert_eq!(collect_lines("abc\r\nxyz"), vec!["abc", "xyz"]); assert_eq!(collect_lines("abc\rxyz"), vec!["abc\rxyz"]); } #[test] fn lines_with_terminator() { assert_eq!(collect_lines_term(""), Vec::::new()); assert_eq!(collect_lines_term("\n"), vec!["\n"]); assert_eq!(collect_lines_term("\n\n"), vec!["\n", "\n"]); assert_eq!(collect_lines_term("a\nb\n"), vec!["a\n", "b\n"]); assert_eq!(collect_lines_term("a\nb"), vec!["a\n", "b"]); assert_eq!(collect_lines_term("abc\nxyz\n"), vec!["abc\n", "xyz\n"]); assert_eq!(collect_lines_term("abc\nxyz"), vec!["abc\n", "xyz"]); assert_eq!(collect_lines_term("\r\n"), vec!["\r\n"]); assert_eq!(collect_lines_term("\r\n\r\n"), vec!["\r\n", "\r\n"]); assert_eq!(collect_lines_term("a\r\nb\r\n"), vec!["a\r\n", "b\r\n"]); assert_eq!(collect_lines_term("a\r\nb"), vec!["a\r\n", "b"]); assert_eq!( collect_lines_term("abc\r\nxyz\r\n"), vec!["abc\r\n", "xyz\r\n"] ); assert_eq!(collect_lines_term("abc\r\nxyz"), vec!["abc\r\n", "xyz"]); assert_eq!(collect_lines_term("abc\rxyz"), vec!["abc\rxyz"]); } } bstr-0.2.17/src/lib.rs000064400000000000000000000470040072674642500126250ustar 00000000000000/*! A byte string library. Byte strings are just like standard Unicode strings with one very important difference: byte strings are only *conventionally* UTF-8 while Rust's standard Unicode strings are *guaranteed* to be valid UTF-8. The primary motivation for byte strings is for handling arbitrary bytes that are mostly UTF-8. # Overview This crate provides two important traits that provide string oriented methods on `&[u8]` and `Vec` types: * [`ByteSlice`](trait.ByteSlice.html) extends the `[u8]` type with additional string oriented methods. * [`ByteVec`](trait.ByteVec.html) extends the `Vec` type with additional string oriented methods. Additionally, this crate provides two concrete byte string types that deref to `[u8]` and `Vec`. These are useful for storing byte string types, and come with convenient `std::fmt::Debug` implementations: * [`BStr`](struct.BStr.html) is a byte string slice, analogous to `str`. * [`BString`](struct.BString.html) is an owned growable byte string buffer, analogous to `String`. Additionally, the free function [`B`](fn.B.html) serves as a convenient short hand for writing byte string literals. # Quick examples Byte strings build on the existing APIs for `Vec` and `&[u8]`, with additional string oriented methods. Operations such as iterating over graphemes, searching for substrings, replacing substrings, trimming and case conversion are examples of things not provided on the standard library `&[u8]` APIs but are provided by this crate. For example, this code iterates over all of occurrences of a subtring: ``` use bstr::ByteSlice; let s = b"foo bar foo foo quux foo"; let mut matches = vec![]; for start in s.find_iter("foo") { matches.push(start); } assert_eq!(matches, [0, 8, 12, 21]); ``` Here's another example showing how to do a search and replace (and also showing use of the `B` function): ``` use bstr::{B, ByteSlice}; let old = B("foo ☃☃☃ foo foo quux foo"); let new = old.replace("foo", "hello"); assert_eq!(new, B("hello ☃☃☃ hello hello quux hello")); ``` And here's an example that shows case conversion, even in the presence of invalid UTF-8: ``` use bstr::{ByteSlice, ByteVec}; let mut lower = Vec::from("hello β"); lower[0] = b'\xFF'; // lowercase β is uppercased to Β assert_eq!(lower.to_uppercase(), b"\xFFELLO \xCE\x92"); ``` # Convenient debug representation When working with byte strings, it is often useful to be able to print them as if they were byte strings and not sequences of integers. While this crate cannot affect the `std::fmt::Debug` implementations for `[u8]` and `Vec`, this crate does provide the `BStr` and `BString` types which have convenient `std::fmt::Debug` implementations. For example, this ``` use bstr::ByteSlice; let mut bytes = Vec::from("hello β"); bytes[0] = b'\xFF'; println!("{:?}", bytes.as_bstr()); ``` will output `"\xFFello β"`. This example works because the [`ByteSlice::as_bstr`](trait.ByteSlice.html#method.as_bstr) method converts any `&[u8]` to a `&BStr`. # When should I use byte strings? This library reflects my hypothesis that UTF-8 by convention is a better trade off in some circumstances than guaranteed UTF-8. It's possible, perhaps even likely, that this is a niche concern for folks working closely with core text primitives. The first time this idea hit me was in the implementation of Rust's regex engine. In particular, very little of the internal implementation cares at all about searching valid UTF-8 encoded strings. Indeed, internally, the implementation converts `&str` from the API to `&[u8]` fairly quickly and just deals with raw bytes. UTF-8 match boundaries are then guaranteed by the finite state machine itself rather than any specific string type. This makes it possible to not only run regexes on `&str` values, but also on `&[u8]` values. Why would you ever want to run a regex on a `&[u8]` though? Well, `&[u8]` is the fundamental way at which one reads data from all sorts of streams, via the standard library's [`Read`](https://doc.rust-lang.org/std/io/trait.Read.html) trait. In particular, there is no platform independent way to determine whether what you're reading from is some binary file or a human readable text file. Therefore, if you're writing a program to search files, you probably need to deal with `&[u8]` directly unless you're okay with first converting it to a `&str` and dropping any bytes that aren't valid UTF-8. (Or otherwise determine the encoding---which is often impractical---and perform a transcoding step.) Often, the simplest and most robust way to approach this is to simply treat the contents of a file as if it were mostly valid UTF-8 and pass through invalid UTF-8 untouched. This may not be the most correct approach though! One case in particular exacerbates these issues, and that's memory mapping a file. When you memory map a file, that file may be gigabytes big, but all you get is a `&[u8]`. Converting that to a `&str` all in one go is generally not a good idea because of the costs associated with doing so, and also because it generally causes one to do two passes over the data instead of one, which is quite undesirable. It is of course usually possible to do it an incremental way by only parsing chunks at a time, but this is often complex to do or impractical. For example, many regex engines only accept one contiguous sequence of bytes at a time with no way to perform incremental matching. In summary, conventional UTF-8 byte strings provided by this library are definitely useful in some limited circumstances, but how useful they are more broadly isn't clear yet. # `bstr` in public APIs Since this library is not yet `1.0`, you should not use it in the public API of your crates until it hits `1.0` (unless you're OK with with tracking breaking releases of `bstr`). It is expected that `bstr 1.0` will be released before 2022. In general, it should be possible to avoid putting anything in this crate into your public APIs. Namely, you should never need to use the `ByteSlice` or `ByteVec` traits as bounds on public APIs, since their only purpose is to extend the methods on the concrete types `[u8]` and `Vec`, respectively. Similarly, it should not be necessary to put either the `BStr` or `BString` types into public APIs. If you want to use them internally, then they can be converted to/from `[u8]`/`Vec` as needed. # Differences with standard strings The primary difference between `[u8]` and `str` is that the former is conventionally UTF-8 while the latter is guaranteed to be UTF-8. The phrase "conventionally UTF-8" means that a `[u8]` may contain bytes that do not form a valid UTF-8 sequence, but operations defined on the type in this crate are generally most useful on valid UTF-8 sequences. For example, iterating over Unicode codepoints or grapheme clusters is an operation that is only defined on valid UTF-8. Therefore, when invalid UTF-8 is encountered, the Unicode replacement codepoint is substituted. Thus, a byte string that is not UTF-8 at all is of limited utility when using these crate. However, not all operations on byte strings are specifically Unicode aware. For example, substring search has no specific Unicode semantics ascribed to it. It works just as well for byte strings that are completely valid UTF-8 as for byte strings that contain no valid UTF-8 at all. Similarly for replacements and various other operations that do not need any Unicode specific tailoring. Aside from the difference in how UTF-8 is handled, the APIs between `[u8]` and `str` (and `Vec` and `String`) are intentionally very similar, including maintaining the same behavior for corner cases in things like substring splitting. There are, however, some differences: * Substring search is not done with `matches`, but instead, `find_iter`. In general, this crate does not define any generic [`Pattern`](https://doc.rust-lang.org/std/str/pattern/trait.Pattern.html) infrastructure, and instead prefers adding new methods for different argument types. For example, `matches` can search by a `char` or a `&str`, where as `find_iter` can only search by a byte string. `find_char` can be used for searching by a `char`. * Since `SliceConcatExt` in the standard library is unstable, it is not possible to reuse that to implement `join` and `concat` methods. Instead, [`join`](fn.join.html) and [`concat`](fn.concat.html) are provided as free functions that perform a similar task. * This library bundles in a few more Unicode operations, such as grapheme, word and sentence iterators. More operations, such as normalization and case folding, may be provided in the future. * Some `String`/`str` APIs will panic if a particular index was not on a valid UTF-8 code unit sequence boundary. Conversely, no such checking is performed in this crate, as is consistent with treating byte strings as a sequence of bytes. This means callers are responsible for maintaining a UTF-8 invariant if that's important. * Some routines provided by this crate, such as `starts_with_str`, have a `_str` suffix to differentiate them from similar routines already defined on the `[u8]` type. The difference is that `starts_with` requires its parameter to be a `&[u8]`, where as `starts_with_str` permits its parameter to by anything that implements `AsRef<[u8]>`, which is more flexible. This means you can write `bytes.starts_with_str("☃")` instead of `bytes.starts_with("☃".as_bytes())`. Otherwise, you should find most of the APIs between this crate and the standard library string APIs to be very similar, if not identical. # Handling of invalid UTF-8 Since byte strings are only *conventionally* UTF-8, there is no guarantee that byte strings contain valid UTF-8. Indeed, it is perfectly legal for a byte string to contain arbitrary bytes. However, since this library defines a *string* type, it provides many operations specified by Unicode. These operations are typically only defined over codepoints, and thus have no real meaning on bytes that are invalid UTF-8 because they do not map to a particular codepoint. For this reason, whenever operations defined only on codepoints are used, this library will automatically convert invalid UTF-8 to the Unicode replacement codepoint, `U+FFFD`, which looks like this: `�`. For example, an [iterator over codepoints](struct.Chars.html) will yield a Unicode replacement codepoint whenever it comes across bytes that are not valid UTF-8: ``` use bstr::ByteSlice; let bs = b"a\xFF\xFFz"; let chars: Vec = bs.chars().collect(); assert_eq!(vec!['a', '\u{FFFD}', '\u{FFFD}', 'z'], chars); ``` There are a few ways in which invalid bytes can be substituted with a Unicode replacement codepoint. One way, not used by this crate, is to replace every individual invalid byte with a single replacement codepoint. In contrast, the approach this crate uses is called the "substitution of maximal subparts," as specified by the Unicode Standard (Chapter 3, Section 9). (This approach is also used by [W3C's Encoding Standard](https://www.w3.org/TR/encoding/).) In this strategy, a replacement codepoint is inserted whenever a byte is found that cannot possibly lead to a valid UTF-8 code unit sequence. If there were previous bytes that represented a *prefix* of a well-formed UTF-8 code unit sequence, then all of those bytes (up to 3) are substituted with a single replacement codepoint. For example: ``` use bstr::ByteSlice; let bs = b"a\xF0\x9F\x87z"; let chars: Vec = bs.chars().collect(); // The bytes \xF0\x9F\x87 could lead to a valid UTF-8 sequence, but 3 of them // on their own are invalid. Only one replacement codepoint is substituted, // which demonstrates the "substitution of maximal subparts" strategy. assert_eq!(vec!['a', '\u{FFFD}', 'z'], chars); ``` If you do need to access the raw bytes for some reason in an iterator like `Chars`, then you should use the iterator's "indices" variant, which gives the byte offsets containing the invalid UTF-8 bytes that were substituted with the replacement codepoint. For example: ``` use bstr::{B, ByteSlice}; let bs = b"a\xE2\x98z"; let chars: Vec<(usize, usize, char)> = bs.char_indices().collect(); // Even though the replacement codepoint is encoded as 3 bytes itself, the // byte range given here is only two bytes, corresponding to the original // raw bytes. assert_eq!(vec![(0, 1, 'a'), (1, 3, '\u{FFFD}'), (3, 4, 'z')], chars); // Thus, getting the original raw bytes is as simple as slicing the original // byte string: let chars: Vec<&[u8]> = bs.char_indices().map(|(s, e, _)| &bs[s..e]).collect(); assert_eq!(vec![B("a"), B(b"\xE2\x98"), B("z")], chars); ``` # File paths and OS strings One of the premiere features of Rust's standard library is how it handles file paths. In particular, it makes it very hard to write incorrect code while simultaneously providing a correct cross platform abstraction for manipulating file paths. The key challenge that one faces with file paths across platforms is derived from the following observations: * On most Unix-like systems, file paths are an arbitrary sequence of bytes. * On Windows, file paths are an arbitrary sequence of 16-bit integers. (In both cases, certain sequences aren't allowed. For example a `NUL` byte is not allowed in either case. But we can ignore this for the purposes of this section.) Byte strings, like the ones provided in this crate, line up really well with file paths on Unix like systems, which are themselves just arbitrary sequences of bytes. It turns out that if you treat them as "mostly UTF-8," then things work out pretty well. On the contrary, byte strings _don't_ really work that well on Windows because it's not possible to correctly roundtrip file paths between 16-bit integers and something that looks like UTF-8 _without_ explicitly defining an encoding to do this for you, which is anathema to byte strings, which are just bytes. Rust's standard library elegantly solves this problem by specifying an internal encoding for file paths that's only used on Windows called [WTF-8](https://simonsapin.github.io/wtf-8/). Its key properties are that they permit losslessly roundtripping file paths on Windows by extending UTF-8 to support an encoding of surrogate codepoints, while simultaneously supporting zero-cost conversion from Rust's Unicode strings to file paths. (Since UTF-8 is a proper subset of WTF-8.) The fundamental point at which the above strategy fails is when you want to treat file paths as things that look like strings in a zero cost way. In most cases, this is actually the wrong thing to do, but some cases call for it, for example, glob or regex matching on file paths. This is because WTF-8 is treated as an internal implementation detail, and there is no way to access those bytes via a public API. Therefore, such consumers are limited in what they can do: 1. One could re-implement WTF-8 and re-encode file paths on Windows to WTF-8 by accessing their underlying 16-bit integer representation. Unfortunately, this isn't zero cost (it introduces a second WTF-8 decoding step) and it's not clear this is a good thing to do, since WTF-8 should ideally remain an internal implementation detail. 2. One could instead declare that they will not handle paths on Windows that are not valid UTF-16, and return an error when one is encountered. 3. Like (2), but instead of returning an error, lossily decode the file path on Windows that isn't valid UTF-16 into UTF-16 by replacing invalid bytes with the Unicode replacement codepoint. While this library may provide facilities for (1) in the future, currently, this library only provides facilities for (2) and (3). In particular, a suite of conversion functions are provided that permit converting between byte strings, OS strings and file paths. For owned byte strings, they are: * [`ByteVec::from_os_string`](trait.ByteVec.html#method.from_os_string) * [`ByteVec::from_os_str_lossy`](trait.ByteVec.html#method.from_os_str_lossy) * [`ByteVec::from_path_buf`](trait.ByteVec.html#method.from_path_buf) * [`ByteVec::from_path_lossy`](trait.ByteVec.html#method.from_path_lossy) * [`ByteVec::into_os_string`](trait.ByteVec.html#method.into_os_string) * [`ByteVec::into_os_string_lossy`](trait.ByteVec.html#method.into_os_string_lossy) * [`ByteVec::into_path_buf`](trait.ByteVec.html#method.into_path_buf) * [`ByteVec::into_path_buf_lossy`](trait.ByteVec.html#method.into_path_buf_lossy) For byte string slices, they are: * [`ByteSlice::from_os_str`](trait.ByteSlice.html#method.from_os_str) * [`ByteSlice::from_path`](trait.ByteSlice.html#method.from_path) * [`ByteSlice::to_os_str`](trait.ByteSlice.html#method.to_os_str) * [`ByteSlice::to_os_str_lossy`](trait.ByteSlice.html#method.to_os_str_lossy) * [`ByteSlice::to_path`](trait.ByteSlice.html#method.to_path) * [`ByteSlice::to_path_lossy`](trait.ByteSlice.html#method.to_path_lossy) On Unix, all of these conversions are rigorously zero cost, which gives one a way to ergonomically deal with raw file paths exactly as they are using normal string-related functions. On Windows, these conversion routines perform a UTF-8 check and either return an error or lossily decode the file path into valid UTF-8, depending on which function you use. This means that you cannot roundtrip all file paths on Windows correctly using these conversion routines. However, this may be an acceptable downside since such file paths are exceptionally rare. Moreover, roundtripping isn't always necessary, for example, if all you're doing is filtering based on file paths. The reason why using byte strings for this is potentially superior than the standard library's approach is that a lot of Rust code is already lossily converting file paths to Rust's Unicode strings, which are required to be valid UTF-8, and thus contain latent bugs on Unix where paths with invalid UTF-8 are not terribly uncommon. If you instead use byte strings, then you're guaranteed to write correct code for Unix, at the cost of getting a corner case wrong on Windows. */ #![cfg_attr(not(feature = "std"), no_std)] pub use crate::bstr::BStr; #[cfg(feature = "std")] pub use crate::bstring::BString; pub use crate::ext_slice::{ ByteSlice, Bytes, Fields, FieldsWith, Find, FindReverse, Finder, FinderReverse, Lines, LinesWithTerminator, Split, SplitN, SplitNReverse, SplitReverse, B, }; #[cfg(feature = "std")] pub use crate::ext_vec::{concat, join, ByteVec, DrainBytes, FromUtf8Error}; #[cfg(feature = "unicode")] pub use crate::unicode::{ GraphemeIndices, Graphemes, SentenceIndices, Sentences, WordIndices, Words, WordsWithBreakIndices, WordsWithBreaks, }; pub use crate::utf8::{ decode as decode_utf8, decode_last as decode_last_utf8, CharIndices, Chars, Utf8Chunk, Utf8Chunks, Utf8Error, }; mod ascii; mod bstr; #[cfg(feature = "std")] mod bstring; mod byteset; mod ext_slice; #[cfg(feature = "std")] mod ext_vec; mod impls; #[cfg(feature = "std")] pub mod io; #[cfg(test)] mod tests; #[cfg(feature = "unicode")] mod unicode; mod utf8; #[cfg(test)] mod apitests { use crate::bstr::BStr; use crate::bstring::BString; use crate::ext_slice::{Finder, FinderReverse}; #[test] fn oibits() { use std::panic::{RefUnwindSafe, UnwindSafe}; fn assert_send() {} fn assert_sync() {} fn assert_unwind_safe() {} assert_send::<&BStr>(); assert_sync::<&BStr>(); assert_unwind_safe::<&BStr>(); assert_send::(); assert_sync::(); assert_unwind_safe::(); assert_send::>(); assert_sync::>(); assert_unwind_safe::>(); assert_send::>(); assert_sync::>(); assert_unwind_safe::>(); } } bstr-0.2.17/src/tests.rs000064400000000000000000000026440072674642500132220ustar 00000000000000/// A sequence of tests for checking whether lossy decoding uses the maximal /// subpart strategy correctly. Namely, if a sequence of otherwise invalid /// UTF-8 bytes is a valid prefix of a valid UTF-8 sequence, then the entire /// prefix is replaced by a single replacement codepoint. In all other cases, /// each invalid byte is replaced by a single replacement codepoint. /// /// The first element in each tuple is the expected result of lossy decoding, /// while the second element is the input given. pub const LOSSY_TESTS: &[(&str, &[u8])] = &[ ("a", b"a"), ("\u{FFFD}", b"\xFF"), ("\u{FFFD}\u{FFFD}", b"\xFF\xFF"), ("β\u{FFFD}", b"\xCE\xB2\xFF"), ("☃\u{FFFD}", b"\xE2\x98\x83\xFF"), ("𝝱\u{FFFD}", b"\xF0\x9D\x9D\xB1\xFF"), ("\u{FFFD}\u{FFFD}", b"\xCE\xF0"), ("\u{FFFD}\u{FFFD}", b"\xCE\xFF"), ("\u{FFFD}\u{FFFD}", b"\xE2\x98\xF0"), ("\u{FFFD}\u{FFFD}", b"\xE2\x98\xFF"), ("\u{FFFD}", b"\xF0\x9D\x9D"), ("\u{FFFD}\u{FFFD}", b"\xF0\x9D\x9D\xF0"), ("\u{FFFD}\u{FFFD}", b"\xF0\x9D\x9D\xFF"), ("\u{FFFD}", b"\xCE"), ("a\u{FFFD}", b"a\xCE"), ("\u{FFFD}", b"\xE2\x98"), ("a\u{FFFD}", b"a\xE2\x98"), ("\u{FFFD}", b"\xF0\x9D\x9C"), ("a\u{FFFD}", b"a\xF0\x9D\x9C"), ("a\u{FFFD}\u{FFFD}\u{FFFD}z", b"a\xED\xA0\x80z"), ("☃βツ\u{FFFD}", b"\xe2\x98\x83\xce\xb2\xe3\x83\x84\xFF"), ("a\u{FFFD}\u{FFFD}\u{FFFD}b", b"\x61\xF1\x80\x80\xE1\x80\xC2\x62"), ]; bstr-0.2.17/src/unicode/data/GraphemeBreakTest.txt000064400000000000000000002433510072674642500201510ustar 00000000000000# GraphemeBreakTest-12.1.0.txt # Date: 2019-03-10, 10:53:12 GMT # © 2019 Unicode®, Inc. # Unicode and the Unicode Logo are registered trademarks of Unicode, Inc. in the U.S. and other countries. # For terms of use, see http://www.unicode.org/terms_of_use.html # # Unicode Character Database # For documentation, see http://www.unicode.org/reports/tr44/ # # Default Grapheme_Cluster_Break Test # # Format: # (# )? # contains hex Unicode code points, with # ÷ wherever there is a break opportunity, and # × wherever there is not. # the format can change, but currently it shows: # - the sample character name # - (x) the Grapheme_Cluster_Break property value for the sample character # - [x] the rule that determines whether there is a break or not, # as listed in the Rules section of GraphemeBreakTest.html # # These samples may be extended or changed in the future. # ÷ 0020 ÷ 0020 ÷ # ÷ [0.2] SPACE (Other) ÷ [999.0] SPACE (Other) ÷ [0.3] ÷ 0020 × 0308 ÷ 0020 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] SPACE (Other) ÷ [0.3] ÷ 0020 ÷ 000D ÷ # ÷ [0.2] SPACE (Other) ÷ [5.0] (CR) ÷ [0.3] ÷ 0020 × 0308 ÷ 000D ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (CR) ÷ [0.3] ÷ 0020 ÷ 000A ÷ # ÷ [0.2] SPACE (Other) ÷ [5.0] (LF) ÷ [0.3] ÷ 0020 × 0308 ÷ 000A ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (LF) ÷ [0.3] ÷ 0020 ÷ 0001 ÷ # ÷ [0.2] SPACE (Other) ÷ [5.0] (Control) ÷ [0.3] ÷ 0020 × 0308 ÷ 0001 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (Control) ÷ [0.3] ÷ 0020 × 034F ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] ÷ 0020 × 0308 × 034F ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] ÷ 0020 ÷ 1F1E6 ÷ # ÷ [0.2] SPACE (Other) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0020 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0020 ÷ 0600 ÷ # ÷ [0.2] SPACE (Other) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] ÷ 0020 × 0308 ÷ 0600 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] ÷ 0020 × 0903 ÷ # ÷ [0.2] SPACE (Other) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] ÷ 0020 × 0308 × 0903 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] ÷ 0020 ÷ 1100 ÷ # ÷ [0.2] SPACE (Other) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] ÷ 0020 × 0308 ÷ 1100 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] ÷ 0020 ÷ 1160 ÷ # ÷ [0.2] SPACE (Other) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] ÷ 0020 × 0308 ÷ 1160 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] ÷ 0020 ÷ 11A8 ÷ # ÷ [0.2] SPACE (Other) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] ÷ 0020 × 0308 ÷ 11A8 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] ÷ 0020 ÷ AC00 ÷ # ÷ [0.2] SPACE (Other) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] ÷ 0020 × 0308 ÷ AC00 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] ÷ 0020 ÷ AC01 ÷ # ÷ [0.2] SPACE (Other) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] ÷ 0020 × 0308 ÷ AC01 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] ÷ 0020 ÷ 231A ÷ # ÷ [0.2] SPACE (Other) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0020 × 0308 ÷ 231A ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0020 × 0300 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] ÷ 0020 × 0308 × 0300 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] ÷ 0020 × 200D ÷ # ÷ [0.2] SPACE (Other) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] ÷ 0020 × 0308 × 200D ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] ÷ 0020 ÷ 0378 ÷ # ÷ [0.2] SPACE (Other) ÷ [999.0] (Other) ÷ [0.3] ÷ 0020 × 0308 ÷ 0378 ÷ # ÷ [0.2] SPACE (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] (Other) ÷ [0.3] ÷ 000D ÷ 0020 ÷ # ÷ [0.2] (CR) ÷ [4.0] SPACE (Other) ÷ [0.3] ÷ 000D ÷ 0308 ÷ 0020 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] SPACE (Other) ÷ [0.3] ÷ 000D ÷ 000D ÷ # ÷ [0.2] (CR) ÷ [4.0] (CR) ÷ [0.3] ÷ 000D ÷ 0308 ÷ 000D ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (CR) ÷ [0.3] ÷ 000D × 000A ÷ # ÷ [0.2] (CR) × [3.0] (LF) ÷ [0.3] ÷ 000D ÷ 0308 ÷ 000A ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (LF) ÷ [0.3] ÷ 000D ÷ 0001 ÷ # ÷ [0.2] (CR) ÷ [4.0] (Control) ÷ [0.3] ÷ 000D ÷ 0308 ÷ 0001 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (Control) ÷ [0.3] ÷ 000D ÷ 034F ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] ÷ 000D ÷ 0308 × 034F ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] ÷ 000D ÷ 1F1E6 ÷ # ÷ [0.2] (CR) ÷ [4.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 000D ÷ 0308 ÷ 1F1E6 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 000D ÷ 0600 ÷ # ÷ [0.2] (CR) ÷ [4.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] ÷ 000D ÷ 0308 ÷ 0600 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] ÷ 000D ÷ 0903 ÷ # ÷ [0.2] (CR) ÷ [4.0] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] ÷ 000D ÷ 0308 × 0903 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] ÷ 000D ÷ 1100 ÷ # ÷ [0.2] (CR) ÷ [4.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] ÷ 000D ÷ 0308 ÷ 1100 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] ÷ 000D ÷ 1160 ÷ # ÷ [0.2] (CR) ÷ [4.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] ÷ 000D ÷ 0308 ÷ 1160 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] ÷ 000D ÷ 11A8 ÷ # ÷ [0.2] (CR) ÷ [4.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] ÷ 000D ÷ 0308 ÷ 11A8 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] ÷ 000D ÷ AC00 ÷ # ÷ [0.2] (CR) ÷ [4.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] ÷ 000D ÷ 0308 ÷ AC00 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] ÷ 000D ÷ AC01 ÷ # ÷ [0.2] (CR) ÷ [4.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] ÷ 000D ÷ 0308 ÷ AC01 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] ÷ 000D ÷ 231A ÷ # ÷ [0.2] (CR) ÷ [4.0] WATCH (ExtPict) ÷ [0.3] ÷ 000D ÷ 0308 ÷ 231A ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 000D ÷ 0300 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] ÷ 000D ÷ 0308 × 0300 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] ÷ 000D ÷ 200D ÷ # ÷ [0.2] (CR) ÷ [4.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] ÷ 000D ÷ 0308 × 200D ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] ÷ 000D ÷ 0378 ÷ # ÷ [0.2] (CR) ÷ [4.0] (Other) ÷ [0.3] ÷ 000D ÷ 0308 ÷ 0378 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] (Other) ÷ [0.3] ÷ 000A ÷ 0020 ÷ # ÷ [0.2] (LF) ÷ [4.0] SPACE (Other) ÷ [0.3] ÷ 000A ÷ 0308 ÷ 0020 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] SPACE (Other) ÷ [0.3] ÷ 000A ÷ 000D ÷ # ÷ [0.2] (LF) ÷ [4.0] (CR) ÷ [0.3] ÷ 000A ÷ 0308 ÷ 000D ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (CR) ÷ [0.3] ÷ 000A ÷ 000A ÷ # ÷ [0.2] (LF) ÷ [4.0] (LF) ÷ [0.3] ÷ 000A ÷ 0308 ÷ 000A ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (LF) ÷ [0.3] ÷ 000A ÷ 0001 ÷ # ÷ [0.2] (LF) ÷ [4.0] (Control) ÷ [0.3] ÷ 000A ÷ 0308 ÷ 0001 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (Control) ÷ [0.3] ÷ 000A ÷ 034F ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] ÷ 000A ÷ 0308 × 034F ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] ÷ 000A ÷ 1F1E6 ÷ # ÷ [0.2] (LF) ÷ [4.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 000A ÷ 0308 ÷ 1F1E6 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 000A ÷ 0600 ÷ # ÷ [0.2] (LF) ÷ [4.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] ÷ 000A ÷ 0308 ÷ 0600 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] ÷ 000A ÷ 0903 ÷ # ÷ [0.2] (LF) ÷ [4.0] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] ÷ 000A ÷ 0308 × 0903 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] ÷ 000A ÷ 1100 ÷ # ÷ [0.2] (LF) ÷ [4.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] ÷ 000A ÷ 0308 ÷ 1100 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] ÷ 000A ÷ 1160 ÷ # ÷ [0.2] (LF) ÷ [4.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] ÷ 000A ÷ 0308 ÷ 1160 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] ÷ 000A ÷ 11A8 ÷ # ÷ [0.2] (LF) ÷ [4.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] ÷ 000A ÷ 0308 ÷ 11A8 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] ÷ 000A ÷ AC00 ÷ # ÷ [0.2] (LF) ÷ [4.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] ÷ 000A ÷ 0308 ÷ AC00 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] ÷ 000A ÷ AC01 ÷ # ÷ [0.2] (LF) ÷ [4.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] ÷ 000A ÷ 0308 ÷ AC01 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] ÷ 000A ÷ 231A ÷ # ÷ [0.2] (LF) ÷ [4.0] WATCH (ExtPict) ÷ [0.3] ÷ 000A ÷ 0308 ÷ 231A ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 000A ÷ 0300 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] ÷ 000A ÷ 0308 × 0300 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] ÷ 000A ÷ 200D ÷ # ÷ [0.2] (LF) ÷ [4.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] ÷ 000A ÷ 0308 × 200D ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] ÷ 000A ÷ 0378 ÷ # ÷ [0.2] (LF) ÷ [4.0] (Other) ÷ [0.3] ÷ 000A ÷ 0308 ÷ 0378 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] (Other) ÷ [0.3] ÷ 0001 ÷ 0020 ÷ # ÷ [0.2] (Control) ÷ [4.0] SPACE (Other) ÷ [0.3] ÷ 0001 ÷ 0308 ÷ 0020 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] SPACE (Other) ÷ [0.3] ÷ 0001 ÷ 000D ÷ # ÷ [0.2] (Control) ÷ [4.0] (CR) ÷ [0.3] ÷ 0001 ÷ 0308 ÷ 000D ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (CR) ÷ [0.3] ÷ 0001 ÷ 000A ÷ # ÷ [0.2] (Control) ÷ [4.0] (LF) ÷ [0.3] ÷ 0001 ÷ 0308 ÷ 000A ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (LF) ÷ [0.3] ÷ 0001 ÷ 0001 ÷ # ÷ [0.2] (Control) ÷ [4.0] (Control) ÷ [0.3] ÷ 0001 ÷ 0308 ÷ 0001 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (Control) ÷ [0.3] ÷ 0001 ÷ 034F ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] ÷ 0001 ÷ 0308 × 034F ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] ÷ 0001 ÷ 1F1E6 ÷ # ÷ [0.2] (Control) ÷ [4.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0001 ÷ 0308 ÷ 1F1E6 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0001 ÷ 0600 ÷ # ÷ [0.2] (Control) ÷ [4.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] ÷ 0001 ÷ 0308 ÷ 0600 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] ÷ 0001 ÷ 0903 ÷ # ÷ [0.2] (Control) ÷ [4.0] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] ÷ 0001 ÷ 0308 × 0903 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] ÷ 0001 ÷ 1100 ÷ # ÷ [0.2] (Control) ÷ [4.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] ÷ 0001 ÷ 0308 ÷ 1100 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] ÷ 0001 ÷ 1160 ÷ # ÷ [0.2] (Control) ÷ [4.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] ÷ 0001 ÷ 0308 ÷ 1160 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] ÷ 0001 ÷ 11A8 ÷ # ÷ [0.2] (Control) ÷ [4.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] ÷ 0001 ÷ 0308 ÷ 11A8 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] ÷ 0001 ÷ AC00 ÷ # ÷ [0.2] (Control) ÷ [4.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] ÷ 0001 ÷ 0308 ÷ AC00 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] ÷ 0001 ÷ AC01 ÷ # ÷ [0.2] (Control) ÷ [4.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] ÷ 0001 ÷ 0308 ÷ AC01 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] ÷ 0001 ÷ 231A ÷ # ÷ [0.2] (Control) ÷ [4.0] WATCH (ExtPict) ÷ [0.3] ÷ 0001 ÷ 0308 ÷ 231A ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0001 ÷ 0300 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] ÷ 0001 ÷ 0308 × 0300 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] ÷ 0001 ÷ 200D ÷ # ÷ [0.2] (Control) ÷ [4.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] ÷ 0001 ÷ 0308 × 200D ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] ÷ 0001 ÷ 0378 ÷ # ÷ [0.2] (Control) ÷ [4.0] (Other) ÷ [0.3] ÷ 0001 ÷ 0308 ÷ 0378 ÷ # ÷ [0.2] (Control) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] (Other) ÷ [0.3] ÷ 034F ÷ 0020 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) ÷ [999.0] SPACE (Other) ÷ [0.3] ÷ 034F × 0308 ÷ 0020 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] SPACE (Other) ÷ [0.3] ÷ 034F ÷ 000D ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) ÷ [5.0] (CR) ÷ [0.3] ÷ 034F × 0308 ÷ 000D ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (CR) ÷ [0.3] ÷ 034F ÷ 000A ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) ÷ [5.0] (LF) ÷ [0.3] ÷ 034F × 0308 ÷ 000A ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (LF) ÷ [0.3] ÷ 034F ÷ 0001 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) ÷ [5.0] (Control) ÷ [0.3] ÷ 034F × 0308 ÷ 0001 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (Control) ÷ [0.3] ÷ 034F × 034F ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] ÷ 034F × 0308 × 034F ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] ÷ 034F ÷ 1F1E6 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 034F × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 034F ÷ 0600 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] ÷ 034F × 0308 ÷ 0600 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] ÷ 034F × 0903 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] ÷ 034F × 0308 × 0903 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] ÷ 034F ÷ 1100 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] ÷ 034F × 0308 ÷ 1100 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] ÷ 034F ÷ 1160 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] ÷ 034F × 0308 ÷ 1160 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] ÷ 034F ÷ 11A8 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] ÷ 034F × 0308 ÷ 11A8 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] ÷ 034F ÷ AC00 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] ÷ 034F × 0308 ÷ AC00 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] ÷ 034F ÷ AC01 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] ÷ 034F × 0308 ÷ AC01 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] ÷ 034F ÷ 231A ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 034F × 0308 ÷ 231A ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 034F × 0300 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] ÷ 034F × 0308 × 0300 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] ÷ 034F × 200D ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] ÷ 034F × 0308 × 200D ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] ÷ 034F ÷ 0378 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) ÷ [999.0] (Other) ÷ [0.3] ÷ 034F × 0308 ÷ 0378 ÷ # ÷ [0.2] COMBINING GRAPHEME JOINER (Extend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] (Other) ÷ [0.3] ÷ 1F1E6 ÷ 0020 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] SPACE (Other) ÷ [0.3] ÷ 1F1E6 × 0308 ÷ 0020 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] SPACE (Other) ÷ [0.3] ÷ 1F1E6 ÷ 000D ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [5.0] (CR) ÷ [0.3] ÷ 1F1E6 × 0308 ÷ 000D ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (CR) ÷ [0.3] ÷ 1F1E6 ÷ 000A ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [5.0] (LF) ÷ [0.3] ÷ 1F1E6 × 0308 ÷ 000A ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (LF) ÷ [0.3] ÷ 1F1E6 ÷ 0001 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [5.0] (Control) ÷ [0.3] ÷ 1F1E6 × 0308 ÷ 0001 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (Control) ÷ [0.3] ÷ 1F1E6 × 034F ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] ÷ 1F1E6 × 0308 × 034F ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] ÷ 1F1E6 × 1F1E6 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [12.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 1F1E6 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 1F1E6 ÷ 0600 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] ÷ 1F1E6 × 0308 ÷ 0600 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] ÷ 1F1E6 × 0903 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] ÷ 1F1E6 × 0308 × 0903 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] ÷ 1F1E6 ÷ 1100 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] ÷ 1F1E6 × 0308 ÷ 1100 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] ÷ 1F1E6 ÷ 1160 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] ÷ 1F1E6 × 0308 ÷ 1160 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] ÷ 1F1E6 ÷ 11A8 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] ÷ 1F1E6 × 0308 ÷ 11A8 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] ÷ 1F1E6 ÷ AC00 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] ÷ 1F1E6 × 0308 ÷ AC00 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] ÷ 1F1E6 ÷ AC01 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] ÷ 1F1E6 × 0308 ÷ AC01 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] ÷ 1F1E6 ÷ 231A ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 1F1E6 × 0308 ÷ 231A ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 1F1E6 × 0300 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] ÷ 1F1E6 × 0308 × 0300 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] ÷ 1F1E6 × 200D ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] ÷ 1F1E6 × 0308 × 200D ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] ÷ 1F1E6 ÷ 0378 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] (Other) ÷ [0.3] ÷ 1F1E6 × 0308 ÷ 0378 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] (Other) ÷ [0.3] ÷ 0600 × 0020 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.2] SPACE (Other) ÷ [0.3] ÷ 0600 × 0308 ÷ 0020 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] SPACE (Other) ÷ [0.3] ÷ 0600 ÷ 000D ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) ÷ [5.0] (CR) ÷ [0.3] ÷ 0600 × 0308 ÷ 000D ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (CR) ÷ [0.3] ÷ 0600 ÷ 000A ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) ÷ [5.0] (LF) ÷ [0.3] ÷ 0600 × 0308 ÷ 000A ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (LF) ÷ [0.3] ÷ 0600 ÷ 0001 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) ÷ [5.0] (Control) ÷ [0.3] ÷ 0600 × 0308 ÷ 0001 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (Control) ÷ [0.3] ÷ 0600 × 034F ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] ÷ 0600 × 0308 × 034F ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] ÷ 0600 × 1F1E6 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0600 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0600 × 0600 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.2] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] ÷ 0600 × 0308 ÷ 0600 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] ÷ 0600 × 0903 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] ÷ 0600 × 0308 × 0903 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] ÷ 0600 × 1100 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.2] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] ÷ 0600 × 0308 ÷ 1100 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] ÷ 0600 × 1160 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.2] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] ÷ 0600 × 0308 ÷ 1160 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] ÷ 0600 × 11A8 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.2] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] ÷ 0600 × 0308 ÷ 11A8 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] ÷ 0600 × AC00 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.2] HANGUL SYLLABLE GA (LV) ÷ [0.3] ÷ 0600 × 0308 ÷ AC00 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] ÷ 0600 × AC01 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.2] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] ÷ 0600 × 0308 ÷ AC01 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] ÷ 0600 × 231A ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.2] WATCH (ExtPict) ÷ [0.3] ÷ 0600 × 0308 ÷ 231A ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0600 × 0300 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] ÷ 0600 × 0308 × 0300 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] ÷ 0600 × 200D ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] ÷ 0600 × 0308 × 200D ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] ÷ 0600 × 0378 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.2] (Other) ÷ [0.3] ÷ 0600 × 0308 ÷ 0378 ÷ # ÷ [0.2] ARABIC NUMBER SIGN (Prepend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] (Other) ÷ [0.3] ÷ 0903 ÷ 0020 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [999.0] SPACE (Other) ÷ [0.3] ÷ 0903 × 0308 ÷ 0020 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] SPACE (Other) ÷ [0.3] ÷ 0903 ÷ 000D ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [5.0] (CR) ÷ [0.3] ÷ 0903 × 0308 ÷ 000D ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (CR) ÷ [0.3] ÷ 0903 ÷ 000A ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [5.0] (LF) ÷ [0.3] ÷ 0903 × 0308 ÷ 000A ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (LF) ÷ [0.3] ÷ 0903 ÷ 0001 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [5.0] (Control) ÷ [0.3] ÷ 0903 × 0308 ÷ 0001 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (Control) ÷ [0.3] ÷ 0903 × 034F ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] ÷ 0903 × 0308 × 034F ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] ÷ 0903 ÷ 1F1E6 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0903 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0903 ÷ 0600 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] ÷ 0903 × 0308 ÷ 0600 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] ÷ 0903 × 0903 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] ÷ 0903 × 0308 × 0903 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] ÷ 0903 ÷ 1100 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] ÷ 0903 × 0308 ÷ 1100 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] ÷ 0903 ÷ 1160 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] ÷ 0903 × 0308 ÷ 1160 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] ÷ 0903 ÷ 11A8 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] ÷ 0903 × 0308 ÷ 11A8 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] ÷ 0903 ÷ AC00 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] ÷ 0903 × 0308 ÷ AC00 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] ÷ 0903 ÷ AC01 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] ÷ 0903 × 0308 ÷ AC01 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] ÷ 0903 ÷ 231A ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0903 × 0308 ÷ 231A ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0903 × 0300 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] ÷ 0903 × 0308 × 0300 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] ÷ 0903 × 200D ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] ÷ 0903 × 0308 × 200D ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] ÷ 0903 ÷ 0378 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [999.0] (Other) ÷ [0.3] ÷ 0903 × 0308 ÷ 0378 ÷ # ÷ [0.2] DEVANAGARI SIGN VISARGA (SpacingMark) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] (Other) ÷ [0.3] ÷ 1100 ÷ 0020 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) ÷ [999.0] SPACE (Other) ÷ [0.3] ÷ 1100 × 0308 ÷ 0020 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] SPACE (Other) ÷ [0.3] ÷ 1100 ÷ 000D ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) ÷ [5.0] (CR) ÷ [0.3] ÷ 1100 × 0308 ÷ 000D ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (CR) ÷ [0.3] ÷ 1100 ÷ 000A ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) ÷ [5.0] (LF) ÷ [0.3] ÷ 1100 × 0308 ÷ 000A ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (LF) ÷ [0.3] ÷ 1100 ÷ 0001 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) ÷ [5.0] (Control) ÷ [0.3] ÷ 1100 × 0308 ÷ 0001 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (Control) ÷ [0.3] ÷ 1100 × 034F ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] ÷ 1100 × 0308 × 034F ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] ÷ 1100 ÷ 1F1E6 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 1100 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 1100 ÷ 0600 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] ÷ 1100 × 0308 ÷ 0600 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] ÷ 1100 × 0903 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] ÷ 1100 × 0308 × 0903 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] ÷ 1100 × 1100 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [6.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] ÷ 1100 × 0308 ÷ 1100 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] ÷ 1100 × 1160 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [6.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] ÷ 1100 × 0308 ÷ 1160 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] ÷ 1100 ÷ 11A8 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] ÷ 1100 × 0308 ÷ 11A8 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] ÷ 1100 × AC00 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [6.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] ÷ 1100 × 0308 ÷ AC00 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] ÷ 1100 × AC01 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [6.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] ÷ 1100 × 0308 ÷ AC01 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] ÷ 1100 ÷ 231A ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 1100 × 0308 ÷ 231A ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 1100 × 0300 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] ÷ 1100 × 0308 × 0300 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] ÷ 1100 × 200D ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] ÷ 1100 × 0308 × 200D ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] ÷ 1100 ÷ 0378 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) ÷ [999.0] (Other) ÷ [0.3] ÷ 1100 × 0308 ÷ 0378 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] (Other) ÷ [0.3] ÷ 1160 ÷ 0020 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) ÷ [999.0] SPACE (Other) ÷ [0.3] ÷ 1160 × 0308 ÷ 0020 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] SPACE (Other) ÷ [0.3] ÷ 1160 ÷ 000D ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) ÷ [5.0] (CR) ÷ [0.3] ÷ 1160 × 0308 ÷ 000D ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (CR) ÷ [0.3] ÷ 1160 ÷ 000A ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) ÷ [5.0] (LF) ÷ [0.3] ÷ 1160 × 0308 ÷ 000A ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (LF) ÷ [0.3] ÷ 1160 ÷ 0001 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) ÷ [5.0] (Control) ÷ [0.3] ÷ 1160 × 0308 ÷ 0001 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (Control) ÷ [0.3] ÷ 1160 × 034F ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] ÷ 1160 × 0308 × 034F ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] ÷ 1160 ÷ 1F1E6 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 1160 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 1160 ÷ 0600 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] ÷ 1160 × 0308 ÷ 0600 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] ÷ 1160 × 0903 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] ÷ 1160 × 0308 × 0903 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] ÷ 1160 ÷ 1100 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] ÷ 1160 × 0308 ÷ 1100 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] ÷ 1160 × 1160 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [7.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] ÷ 1160 × 0308 ÷ 1160 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] ÷ 1160 × 11A8 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [7.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] ÷ 1160 × 0308 ÷ 11A8 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] ÷ 1160 ÷ AC00 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] ÷ 1160 × 0308 ÷ AC00 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] ÷ 1160 ÷ AC01 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] ÷ 1160 × 0308 ÷ AC01 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] ÷ 1160 ÷ 231A ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 1160 × 0308 ÷ 231A ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 1160 × 0300 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] ÷ 1160 × 0308 × 0300 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] ÷ 1160 × 200D ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] ÷ 1160 × 0308 × 200D ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] ÷ 1160 ÷ 0378 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) ÷ [999.0] (Other) ÷ [0.3] ÷ 1160 × 0308 ÷ 0378 ÷ # ÷ [0.2] HANGUL JUNGSEONG FILLER (V) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] (Other) ÷ [0.3] ÷ 11A8 ÷ 0020 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) ÷ [999.0] SPACE (Other) ÷ [0.3] ÷ 11A8 × 0308 ÷ 0020 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] SPACE (Other) ÷ [0.3] ÷ 11A8 ÷ 000D ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) ÷ [5.0] (CR) ÷ [0.3] ÷ 11A8 × 0308 ÷ 000D ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (CR) ÷ [0.3] ÷ 11A8 ÷ 000A ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) ÷ [5.0] (LF) ÷ [0.3] ÷ 11A8 × 0308 ÷ 000A ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (LF) ÷ [0.3] ÷ 11A8 ÷ 0001 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) ÷ [5.0] (Control) ÷ [0.3] ÷ 11A8 × 0308 ÷ 0001 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (Control) ÷ [0.3] ÷ 11A8 × 034F ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] ÷ 11A8 × 0308 × 034F ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] ÷ 11A8 ÷ 1F1E6 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 11A8 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 11A8 ÷ 0600 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] ÷ 11A8 × 0308 ÷ 0600 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] ÷ 11A8 × 0903 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] ÷ 11A8 × 0308 × 0903 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] ÷ 11A8 ÷ 1100 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] ÷ 11A8 × 0308 ÷ 1100 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] ÷ 11A8 ÷ 1160 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] ÷ 11A8 × 0308 ÷ 1160 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] ÷ 11A8 × 11A8 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [8.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] ÷ 11A8 × 0308 ÷ 11A8 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] ÷ 11A8 ÷ AC00 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] ÷ 11A8 × 0308 ÷ AC00 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] ÷ 11A8 ÷ AC01 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] ÷ 11A8 × 0308 ÷ AC01 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] ÷ 11A8 ÷ 231A ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 11A8 × 0308 ÷ 231A ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 11A8 × 0300 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] ÷ 11A8 × 0308 × 0300 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] ÷ 11A8 × 200D ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] ÷ 11A8 × 0308 × 200D ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] ÷ 11A8 ÷ 0378 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) ÷ [999.0] (Other) ÷ [0.3] ÷ 11A8 × 0308 ÷ 0378 ÷ # ÷ [0.2] HANGUL JONGSEONG KIYEOK (T) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] (Other) ÷ [0.3] ÷ AC00 ÷ 0020 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) ÷ [999.0] SPACE (Other) ÷ [0.3] ÷ AC00 × 0308 ÷ 0020 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] SPACE (Other) ÷ [0.3] ÷ AC00 ÷ 000D ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) ÷ [5.0] (CR) ÷ [0.3] ÷ AC00 × 0308 ÷ 000D ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (CR) ÷ [0.3] ÷ AC00 ÷ 000A ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) ÷ [5.0] (LF) ÷ [0.3] ÷ AC00 × 0308 ÷ 000A ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (LF) ÷ [0.3] ÷ AC00 ÷ 0001 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) ÷ [5.0] (Control) ÷ [0.3] ÷ AC00 × 0308 ÷ 0001 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (Control) ÷ [0.3] ÷ AC00 × 034F ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] ÷ AC00 × 0308 × 034F ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] ÷ AC00 ÷ 1F1E6 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ AC00 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ AC00 ÷ 0600 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] ÷ AC00 × 0308 ÷ 0600 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] ÷ AC00 × 0903 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] ÷ AC00 × 0308 × 0903 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] ÷ AC00 ÷ 1100 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] ÷ AC00 × 0308 ÷ 1100 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] ÷ AC00 × 1160 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [7.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] ÷ AC00 × 0308 ÷ 1160 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] ÷ AC00 × 11A8 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [7.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] ÷ AC00 × 0308 ÷ 11A8 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] ÷ AC00 ÷ AC00 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] ÷ AC00 × 0308 ÷ AC00 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] ÷ AC00 ÷ AC01 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] ÷ AC00 × 0308 ÷ AC01 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] ÷ AC00 ÷ 231A ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ AC00 × 0308 ÷ 231A ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ AC00 × 0300 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] ÷ AC00 × 0308 × 0300 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] ÷ AC00 × 200D ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] ÷ AC00 × 0308 × 200D ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] ÷ AC00 ÷ 0378 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) ÷ [999.0] (Other) ÷ [0.3] ÷ AC00 × 0308 ÷ 0378 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] (Other) ÷ [0.3] ÷ AC01 ÷ 0020 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) ÷ [999.0] SPACE (Other) ÷ [0.3] ÷ AC01 × 0308 ÷ 0020 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] SPACE (Other) ÷ [0.3] ÷ AC01 ÷ 000D ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) ÷ [5.0] (CR) ÷ [0.3] ÷ AC01 × 0308 ÷ 000D ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (CR) ÷ [0.3] ÷ AC01 ÷ 000A ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) ÷ [5.0] (LF) ÷ [0.3] ÷ AC01 × 0308 ÷ 000A ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (LF) ÷ [0.3] ÷ AC01 ÷ 0001 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) ÷ [5.0] (Control) ÷ [0.3] ÷ AC01 × 0308 ÷ 0001 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (Control) ÷ [0.3] ÷ AC01 × 034F ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] ÷ AC01 × 0308 × 034F ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] ÷ AC01 ÷ 1F1E6 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ AC01 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ AC01 ÷ 0600 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] ÷ AC01 × 0308 ÷ 0600 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] ÷ AC01 × 0903 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] ÷ AC01 × 0308 × 0903 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] ÷ AC01 ÷ 1100 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] ÷ AC01 × 0308 ÷ 1100 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] ÷ AC01 ÷ 1160 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] ÷ AC01 × 0308 ÷ 1160 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] ÷ AC01 × 11A8 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [8.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] ÷ AC01 × 0308 ÷ 11A8 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] ÷ AC01 ÷ AC00 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] ÷ AC01 × 0308 ÷ AC00 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] ÷ AC01 ÷ AC01 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] ÷ AC01 × 0308 ÷ AC01 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] ÷ AC01 ÷ 231A ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ AC01 × 0308 ÷ 231A ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ AC01 × 0300 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] ÷ AC01 × 0308 × 0300 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] ÷ AC01 × 200D ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] ÷ AC01 × 0308 × 200D ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] ÷ AC01 ÷ 0378 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) ÷ [999.0] (Other) ÷ [0.3] ÷ AC01 × 0308 ÷ 0378 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] (Other) ÷ [0.3] ÷ 231A ÷ 0020 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] SPACE (Other) ÷ [0.3] ÷ 231A × 0308 ÷ 0020 ÷ # ÷ [0.2] WATCH (ExtPict) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] SPACE (Other) ÷ [0.3] ÷ 231A ÷ 000D ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [5.0] (CR) ÷ [0.3] ÷ 231A × 0308 ÷ 000D ÷ # ÷ [0.2] WATCH (ExtPict) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (CR) ÷ [0.3] ÷ 231A ÷ 000A ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [5.0] (LF) ÷ [0.3] ÷ 231A × 0308 ÷ 000A ÷ # ÷ [0.2] WATCH (ExtPict) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (LF) ÷ [0.3] ÷ 231A ÷ 0001 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [5.0] (Control) ÷ [0.3] ÷ 231A × 0308 ÷ 0001 ÷ # ÷ [0.2] WATCH (ExtPict) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (Control) ÷ [0.3] ÷ 231A × 034F ÷ # ÷ [0.2] WATCH (ExtPict) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] ÷ 231A × 0308 × 034F ÷ # ÷ [0.2] WATCH (ExtPict) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] ÷ 231A ÷ 1F1E6 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 231A × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] WATCH (ExtPict) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 231A ÷ 0600 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] ÷ 231A × 0308 ÷ 0600 ÷ # ÷ [0.2] WATCH (ExtPict) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] ÷ 231A × 0903 ÷ # ÷ [0.2] WATCH (ExtPict) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] ÷ 231A × 0308 × 0903 ÷ # ÷ [0.2] WATCH (ExtPict) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] ÷ 231A ÷ 1100 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] ÷ 231A × 0308 ÷ 1100 ÷ # ÷ [0.2] WATCH (ExtPict) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] ÷ 231A ÷ 1160 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] ÷ 231A × 0308 ÷ 1160 ÷ # ÷ [0.2] WATCH (ExtPict) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] ÷ 231A ÷ 11A8 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] ÷ 231A × 0308 ÷ 11A8 ÷ # ÷ [0.2] WATCH (ExtPict) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] ÷ 231A ÷ AC00 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] ÷ 231A × 0308 ÷ AC00 ÷ # ÷ [0.2] WATCH (ExtPict) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] ÷ 231A ÷ AC01 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] ÷ 231A × 0308 ÷ AC01 ÷ # ÷ [0.2] WATCH (ExtPict) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] ÷ 231A ÷ 231A ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 231A × 0308 ÷ 231A ÷ # ÷ [0.2] WATCH (ExtPict) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 231A × 0300 ÷ # ÷ [0.2] WATCH (ExtPict) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] ÷ 231A × 0308 × 0300 ÷ # ÷ [0.2] WATCH (ExtPict) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] ÷ 231A × 200D ÷ # ÷ [0.2] WATCH (ExtPict) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] ÷ 231A × 0308 × 200D ÷ # ÷ [0.2] WATCH (ExtPict) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] ÷ 231A ÷ 0378 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] (Other) ÷ [0.3] ÷ 231A × 0308 ÷ 0378 ÷ # ÷ [0.2] WATCH (ExtPict) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] (Other) ÷ [0.3] ÷ 0300 ÷ 0020 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [999.0] SPACE (Other) ÷ [0.3] ÷ 0300 × 0308 ÷ 0020 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] SPACE (Other) ÷ [0.3] ÷ 0300 ÷ 000D ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [5.0] (CR) ÷ [0.3] ÷ 0300 × 0308 ÷ 000D ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (CR) ÷ [0.3] ÷ 0300 ÷ 000A ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [5.0] (LF) ÷ [0.3] ÷ 0300 × 0308 ÷ 000A ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (LF) ÷ [0.3] ÷ 0300 ÷ 0001 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [5.0] (Control) ÷ [0.3] ÷ 0300 × 0308 ÷ 0001 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (Control) ÷ [0.3] ÷ 0300 × 034F ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] ÷ 0300 × 0308 × 034F ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] ÷ 0300 ÷ 1F1E6 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0300 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0300 ÷ 0600 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] ÷ 0300 × 0308 ÷ 0600 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] ÷ 0300 × 0903 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] ÷ 0300 × 0308 × 0903 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] ÷ 0300 ÷ 1100 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] ÷ 0300 × 0308 ÷ 1100 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] ÷ 0300 ÷ 1160 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] ÷ 0300 × 0308 ÷ 1160 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] ÷ 0300 ÷ 11A8 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] ÷ 0300 × 0308 ÷ 11A8 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] ÷ 0300 ÷ AC00 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] ÷ 0300 × 0308 ÷ AC00 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] ÷ 0300 ÷ AC01 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] ÷ 0300 × 0308 ÷ AC01 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] ÷ 0300 ÷ 231A ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0300 × 0308 ÷ 231A ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0300 × 0300 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] ÷ 0300 × 0308 × 0300 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] ÷ 0300 × 200D ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] ÷ 0300 × 0308 × 200D ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] ÷ 0300 ÷ 0378 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [999.0] (Other) ÷ [0.3] ÷ 0300 × 0308 ÷ 0378 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] (Other) ÷ [0.3] ÷ 200D ÷ 0020 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [999.0] SPACE (Other) ÷ [0.3] ÷ 200D × 0308 ÷ 0020 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] SPACE (Other) ÷ [0.3] ÷ 200D ÷ 000D ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [5.0] (CR) ÷ [0.3] ÷ 200D × 0308 ÷ 000D ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (CR) ÷ [0.3] ÷ 200D ÷ 000A ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [5.0] (LF) ÷ [0.3] ÷ 200D × 0308 ÷ 000A ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (LF) ÷ [0.3] ÷ 200D ÷ 0001 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [5.0] (Control) ÷ [0.3] ÷ 200D × 0308 ÷ 0001 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (Control) ÷ [0.3] ÷ 200D × 034F ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] ÷ 200D × 0308 × 034F ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] ÷ 200D ÷ 1F1E6 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 200D × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 200D ÷ 0600 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] ÷ 200D × 0308 ÷ 0600 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] ÷ 200D × 0903 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] ÷ 200D × 0308 × 0903 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] ÷ 200D ÷ 1100 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] ÷ 200D × 0308 ÷ 1100 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] ÷ 200D ÷ 1160 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] ÷ 200D × 0308 ÷ 1160 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] ÷ 200D ÷ 11A8 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] ÷ 200D × 0308 ÷ 11A8 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] ÷ 200D ÷ AC00 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] ÷ 200D × 0308 ÷ AC00 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] ÷ 200D ÷ AC01 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] ÷ 200D × 0308 ÷ AC01 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] ÷ 200D ÷ 231A ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 200D × 0308 ÷ 231A ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 200D × 0300 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] ÷ 200D × 0308 × 0300 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] ÷ 200D × 200D ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] ÷ 200D × 0308 × 200D ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] ÷ 200D ÷ 0378 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [999.0] (Other) ÷ [0.3] ÷ 200D × 0308 ÷ 0378 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] (Other) ÷ [0.3] ÷ 0378 ÷ 0020 ÷ # ÷ [0.2] (Other) ÷ [999.0] SPACE (Other) ÷ [0.3] ÷ 0378 × 0308 ÷ 0020 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] SPACE (Other) ÷ [0.3] ÷ 0378 ÷ 000D ÷ # ÷ [0.2] (Other) ÷ [5.0] (CR) ÷ [0.3] ÷ 0378 × 0308 ÷ 000D ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (CR) ÷ [0.3] ÷ 0378 ÷ 000A ÷ # ÷ [0.2] (Other) ÷ [5.0] (LF) ÷ [0.3] ÷ 0378 × 0308 ÷ 000A ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (LF) ÷ [0.3] ÷ 0378 ÷ 0001 ÷ # ÷ [0.2] (Other) ÷ [5.0] (Control) ÷ [0.3] ÷ 0378 × 0308 ÷ 0001 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [5.0] (Control) ÷ [0.3] ÷ 0378 × 034F ÷ # ÷ [0.2] (Other) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] ÷ 0378 × 0308 × 034F ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAPHEME JOINER (Extend) ÷ [0.3] ÷ 0378 ÷ 1F1E6 ÷ # ÷ [0.2] (Other) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0378 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0378 ÷ 0600 ÷ # ÷ [0.2] (Other) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] ÷ 0378 × 0308 ÷ 0600 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) ÷ [0.3] ÷ 0378 × 0903 ÷ # ÷ [0.2] (Other) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] ÷ 0378 × 0308 × 0903 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [0.3] ÷ 0378 ÷ 1100 ÷ # ÷ [0.2] (Other) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] ÷ 0378 × 0308 ÷ 1100 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] ÷ 0378 ÷ 1160 ÷ # ÷ [0.2] (Other) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] ÷ 0378 × 0308 ÷ 1160 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JUNGSEONG FILLER (V) ÷ [0.3] ÷ 0378 ÷ 11A8 ÷ # ÷ [0.2] (Other) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] ÷ 0378 × 0308 ÷ 11A8 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL JONGSEONG KIYEOK (T) ÷ [0.3] ÷ 0378 ÷ AC00 ÷ # ÷ [0.2] (Other) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] ÷ 0378 × 0308 ÷ AC00 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GA (LV) ÷ [0.3] ÷ 0378 ÷ AC01 ÷ # ÷ [0.2] (Other) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] ÷ 0378 × 0308 ÷ AC01 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] HANGUL SYLLABLE GAG (LVT) ÷ [0.3] ÷ 0378 ÷ 231A ÷ # ÷ [0.2] (Other) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0378 × 0308 ÷ 231A ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0378 × 0300 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] ÷ 0378 × 0308 × 0300 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] COMBINING GRAVE ACCENT (Extend_ExtCccZwj) ÷ [0.3] ÷ 0378 × 200D ÷ # ÷ [0.2] (Other) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] ÷ 0378 × 0308 × 200D ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] ÷ 0378 ÷ 0378 ÷ # ÷ [0.2] (Other) ÷ [999.0] (Other) ÷ [0.3] ÷ 0378 × 0308 ÷ 0378 ÷ # ÷ [0.2] (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] (Other) ÷ [0.3] ÷ 000D × 000A ÷ 0061 ÷ 000A ÷ 0308 ÷ # ÷ [0.2] (CR) × [3.0] (LF) ÷ [4.0] LATIN SMALL LETTER A (Other) ÷ [5.0] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [0.3] ÷ 0061 × 0308 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [0.3] ÷ 0020 × 200D ÷ 0646 ÷ # ÷ [0.2] SPACE (Other) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [999.0] ARABIC LETTER NOON (Other) ÷ [0.3] ÷ 0646 × 200D ÷ 0020 ÷ # ÷ [0.2] ARABIC LETTER NOON (Other) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [999.0] SPACE (Other) ÷ [0.3] ÷ 1100 × 1100 ÷ # ÷ [0.2] HANGUL CHOSEONG KIYEOK (L) × [6.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] ÷ AC00 × 11A8 ÷ 1100 ÷ # ÷ [0.2] HANGUL SYLLABLE GA (LV) × [7.0] HANGUL JONGSEONG KIYEOK (T) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] ÷ AC01 × 11A8 ÷ 1100 ÷ # ÷ [0.2] HANGUL SYLLABLE GAG (LVT) × [8.0] HANGUL JONGSEONG KIYEOK (T) ÷ [999.0] HANGUL CHOSEONG KIYEOK (L) ÷ [0.3] ÷ 1F1E6 × 1F1E7 ÷ 1F1E8 ÷ 0062 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [12.0] REGIONAL INDICATOR SYMBOL LETTER B (RI) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER C (RI) ÷ [999.0] LATIN SMALL LETTER B (Other) ÷ [0.3] ÷ 0061 ÷ 1F1E6 × 1F1E7 ÷ 1F1E8 ÷ 0062 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Other) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [13.0] REGIONAL INDICATOR SYMBOL LETTER B (RI) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER C (RI) ÷ [999.0] LATIN SMALL LETTER B (Other) ÷ [0.3] ÷ 0061 ÷ 1F1E6 × 1F1E7 × 200D ÷ 1F1E8 ÷ 0062 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Other) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [13.0] REGIONAL INDICATOR SYMBOL LETTER B (RI) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER C (RI) ÷ [999.0] LATIN SMALL LETTER B (Other) ÷ [0.3] ÷ 0061 ÷ 1F1E6 × 200D ÷ 1F1E7 × 1F1E8 ÷ 0062 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Other) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER B (RI) × [13.0] REGIONAL INDICATOR SYMBOL LETTER C (RI) ÷ [999.0] LATIN SMALL LETTER B (Other) ÷ [0.3] ÷ 0061 ÷ 1F1E6 × 1F1E7 ÷ 1F1E8 × 1F1E9 ÷ 0062 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Other) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [13.0] REGIONAL INDICATOR SYMBOL LETTER B (RI) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER C (RI) × [13.0] REGIONAL INDICATOR SYMBOL LETTER D (RI) ÷ [999.0] LATIN SMALL LETTER B (Other) ÷ [0.3] ÷ 0061 × 200D ÷ # ÷ [0.2] LATIN SMALL LETTER A (Other) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [0.3] ÷ 0061 × 0308 ÷ 0062 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Other) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) ÷ [999.0] LATIN SMALL LETTER B (Other) ÷ [0.3] ÷ 0061 × 0903 ÷ 0062 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Other) × [9.1] DEVANAGARI SIGN VISARGA (SpacingMark) ÷ [999.0] LATIN SMALL LETTER B (Other) ÷ [0.3] ÷ 0061 ÷ 0600 × 0062 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Other) ÷ [999.0] ARABIC NUMBER SIGN (Prepend) × [9.2] LATIN SMALL LETTER B (Other) ÷ [0.3] ÷ 1F476 × 1F3FF ÷ 1F476 ÷ # ÷ [0.2] BABY (ExtPict) × [9.0] EMOJI MODIFIER FITZPATRICK TYPE-6 (Extend) ÷ [999.0] BABY (ExtPict) ÷ [0.3] ÷ 0061 × 1F3FF ÷ 1F476 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Other) × [9.0] EMOJI MODIFIER FITZPATRICK TYPE-6 (Extend) ÷ [999.0] BABY (ExtPict) ÷ [0.3] ÷ 0061 × 1F3FF ÷ 1F476 × 200D × 1F6D1 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Other) × [9.0] EMOJI MODIFIER FITZPATRICK TYPE-6 (Extend) ÷ [999.0] BABY (ExtPict) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [11.0] OCTAGONAL SIGN (ExtPict) ÷ [0.3] ÷ 1F476 × 1F3FF × 0308 × 200D × 1F476 × 1F3FF ÷ # ÷ [0.2] BABY (ExtPict) × [9.0] EMOJI MODIFIER FITZPATRICK TYPE-6 (Extend) × [9.0] COMBINING DIAERESIS (Extend_ExtCccZwj) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [11.0] BABY (ExtPict) × [9.0] EMOJI MODIFIER FITZPATRICK TYPE-6 (Extend) ÷ [0.3] ÷ 1F6D1 × 200D × 1F6D1 ÷ # ÷ [0.2] OCTAGONAL SIGN (ExtPict) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [11.0] OCTAGONAL SIGN (ExtPict) ÷ [0.3] ÷ 0061 × 200D ÷ 1F6D1 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Other) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [999.0] OCTAGONAL SIGN (ExtPict) ÷ [0.3] ÷ 2701 × 200D × 2701 ÷ # ÷ [0.2] UPPER BLADE SCISSORS (Other) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) × [11.0] UPPER BLADE SCISSORS (Other) ÷ [0.3] ÷ 0061 × 200D ÷ 2701 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Other) × [9.0] ZERO WIDTH JOINER (ZWJ_ExtCccZwj) ÷ [999.0] UPPER BLADE SCISSORS (Other) ÷ [0.3] # # Lines: 602 # # EOF bstr-0.2.17/src/unicode/data/LICENSE-UNICODE000064400000000000000000000043370072674642500161430ustar 00000000000000UNICODE, INC. LICENSE AGREEMENT - DATA FILES AND SOFTWARE See Terms of Use for definitions of Unicode Inc.'s Data Files and Software. NOTICE TO USER: Carefully read the following legal agreement. BY DOWNLOADING, INSTALLING, COPYING OR OTHERWISE USING UNICODE INC.'S DATA FILES ("DATA FILES"), AND/OR SOFTWARE ("SOFTWARE"), YOU UNEQUIVOCALLY ACCEPT, AND AGREE TO BE BOUND BY, ALL OF THE TERMS AND CONDITIONS OF THIS AGREEMENT. IF YOU DO NOT AGREE, DO NOT DOWNLOAD, INSTALL, COPY, DISTRIBUTE OR USE THE DATA FILES OR SOFTWARE. COPYRIGHT AND PERMISSION NOTICE Copyright © 1991-2019 Unicode, Inc. All rights reserved. Distributed under the Terms of Use in https://www.unicode.org/copyright.html. Permission is hereby granted, free of charge, to any person obtaining a copy of the Unicode data files and any associated documentation (the "Data Files") or Unicode software and any associated documentation (the "Software") to deal in the Data Files or Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, and/or sell copies of the Data Files or Software, and to permit persons to whom the Data Files or Software are furnished to do so, provided that either (a) this copyright and permission notice appear with all copies of the Data Files or Software, or (b) this copyright and permission notice appear in associated Documentation. THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THE DATA FILES OR SOFTWARE. Except as contained in this notice, the name of a copyright holder shall not be used in advertising or otherwise to promote the sale, use or other dealings in these Data Files or Software without prior written authorization of the copyright holder. bstr-0.2.17/src/unicode/data/SentenceBreakTest.txt000064400000000000000000002530700072674642500201640ustar 00000000000000# SentenceBreakTest-12.1.0.txt # Date: 2019-03-10, 10:53:28 GMT # © 2019 Unicode®, Inc. # Unicode and the Unicode Logo are registered trademarks of Unicode, Inc. in the U.S. and other countries. # For terms of use, see http://www.unicode.org/terms_of_use.html # # Unicode Character Database # For documentation, see http://www.unicode.org/reports/tr44/ # # Default Sentence_Break Test # # Format: # (# )? # contains hex Unicode code points, with # ÷ wherever there is a break opportunity, and # × wherever there is not. # the format can change, but currently it shows: # - the sample character name # - (x) the Sentence_Break property value for the sample character # - [x] the rule that determines whether there is a break or not, # as listed in the Rules section of SentenceBreakTest.html # # These samples may be extended or changed in the future. # ÷ 0001 × 0001 ÷ # ÷ [0.2] (Other) × [998.0] (Other) ÷ [0.3] ÷ 0001 × 0308 × 0001 ÷ # ÷ [0.2] (Other) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Other) ÷ [0.3] ÷ 0001 × 000D ÷ # ÷ [0.2] (Other) × [998.0] (CR) ÷ [0.3] ÷ 0001 × 0308 × 000D ÷ # ÷ [0.2] (Other) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (CR) ÷ [0.3] ÷ 0001 × 000A ÷ # ÷ [0.2] (Other) × [998.0] (LF) ÷ [0.3] ÷ 0001 × 0308 × 000A ÷ # ÷ [0.2] (Other) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (LF) ÷ [0.3] ÷ 0001 × 0085 ÷ # ÷ [0.2] (Other) × [998.0] (Sep) ÷ [0.3] ÷ 0001 × 0308 × 0085 ÷ # ÷ [0.2] (Other) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sep) ÷ [0.3] ÷ 0001 × 0009 ÷ # ÷ [0.2] (Other) × [998.0] (Sp) ÷ [0.3] ÷ 0001 × 0308 × 0009 ÷ # ÷ [0.2] (Other) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sp) ÷ [0.3] ÷ 0001 × 0061 ÷ # ÷ [0.2] (Other) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] ÷ 0001 × 0308 × 0061 ÷ # ÷ [0.2] (Other) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] ÷ 0001 × 0041 ÷ # ÷ [0.2] (Other) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] ÷ 0001 × 0308 × 0041 ÷ # ÷ [0.2] (Other) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] ÷ 0001 × 01BB ÷ # ÷ [0.2] (Other) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] ÷ 0001 × 0308 × 01BB ÷ # ÷ [0.2] (Other) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] ÷ 0001 × 0030 ÷ # ÷ [0.2] (Other) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0001 × 0308 × 0030 ÷ # ÷ [0.2] (Other) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0001 × 002E ÷ # ÷ [0.2] (Other) × [998.0] FULL STOP (ATerm) ÷ [0.3] ÷ 0001 × 0308 × 002E ÷ # ÷ [0.2] (Other) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] FULL STOP (ATerm) ÷ [0.3] ÷ 0001 × 0021 ÷ # ÷ [0.2] (Other) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] ÷ 0001 × 0308 × 0021 ÷ # ÷ [0.2] (Other) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] ÷ 0001 × 0022 ÷ # ÷ [0.2] (Other) × [998.0] QUOTATION MARK (Close) ÷ [0.3] ÷ 0001 × 0308 × 0022 ÷ # ÷ [0.2] (Other) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] QUOTATION MARK (Close) ÷ [0.3] ÷ 0001 × 002C ÷ # ÷ [0.2] (Other) × [998.0] COMMA (SContinue) ÷ [0.3] ÷ 0001 × 0308 × 002C ÷ # ÷ [0.2] (Other) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] COMMA (SContinue) ÷ [0.3] ÷ 0001 × 00AD ÷ # ÷ [0.2] (Other) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0001 × 0308 × 00AD ÷ # ÷ [0.2] (Other) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0001 × 0300 ÷ # ÷ [0.2] (Other) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0001 × 0308 × 0300 ÷ # ÷ [0.2] (Other) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 000D ÷ 0001 ÷ # ÷ [0.2] (CR) ÷ [4.0] (Other) ÷ [0.3] ÷ 000D ÷ 0308 × 0001 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Other) ÷ [0.3] ÷ 000D ÷ 000D ÷ # ÷ [0.2] (CR) ÷ [4.0] (CR) ÷ [0.3] ÷ 000D ÷ 0308 × 000D ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (CR) ÷ [0.3] ÷ 000D × 000A ÷ # ÷ [0.2] (CR) × [3.0] (LF) ÷ [0.3] ÷ 000D ÷ 0308 × 000A ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (LF) ÷ [0.3] ÷ 000D ÷ 0085 ÷ # ÷ [0.2] (CR) ÷ [4.0] (Sep) ÷ [0.3] ÷ 000D ÷ 0308 × 0085 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sep) ÷ [0.3] ÷ 000D ÷ 0009 ÷ # ÷ [0.2] (CR) ÷ [4.0] (Sp) ÷ [0.3] ÷ 000D ÷ 0308 × 0009 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sp) ÷ [0.3] ÷ 000D ÷ 0061 ÷ # ÷ [0.2] (CR) ÷ [4.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] ÷ 000D ÷ 0308 × 0061 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] ÷ 000D ÷ 0041 ÷ # ÷ [0.2] (CR) ÷ [4.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] ÷ 000D ÷ 0308 × 0041 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] ÷ 000D ÷ 01BB ÷ # ÷ [0.2] (CR) ÷ [4.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] ÷ 000D ÷ 0308 × 01BB ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] ÷ 000D ÷ 0030 ÷ # ÷ [0.2] (CR) ÷ [4.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 000D ÷ 0308 × 0030 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 000D ÷ 002E ÷ # ÷ [0.2] (CR) ÷ [4.0] FULL STOP (ATerm) ÷ [0.3] ÷ 000D ÷ 0308 × 002E ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] FULL STOP (ATerm) ÷ [0.3] ÷ 000D ÷ 0021 ÷ # ÷ [0.2] (CR) ÷ [4.0] EXCLAMATION MARK (STerm) ÷ [0.3] ÷ 000D ÷ 0308 × 0021 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] ÷ 000D ÷ 0022 ÷ # ÷ [0.2] (CR) ÷ [4.0] QUOTATION MARK (Close) ÷ [0.3] ÷ 000D ÷ 0308 × 0022 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] QUOTATION MARK (Close) ÷ [0.3] ÷ 000D ÷ 002C ÷ # ÷ [0.2] (CR) ÷ [4.0] COMMA (SContinue) ÷ [0.3] ÷ 000D ÷ 0308 × 002C ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] COMMA (SContinue) ÷ [0.3] ÷ 000D ÷ 00AD ÷ # ÷ [0.2] (CR) ÷ [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 000D ÷ 0308 × 00AD ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 000D ÷ 0300 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 000D ÷ 0308 × 0300 ÷ # ÷ [0.2] (CR) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 000A ÷ 0001 ÷ # ÷ [0.2] (LF) ÷ [4.0] (Other) ÷ [0.3] ÷ 000A ÷ 0308 × 0001 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Other) ÷ [0.3] ÷ 000A ÷ 000D ÷ # ÷ [0.2] (LF) ÷ [4.0] (CR) ÷ [0.3] ÷ 000A ÷ 0308 × 000D ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (CR) ÷ [0.3] ÷ 000A ÷ 000A ÷ # ÷ [0.2] (LF) ÷ [4.0] (LF) ÷ [0.3] ÷ 000A ÷ 0308 × 000A ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (LF) ÷ [0.3] ÷ 000A ÷ 0085 ÷ # ÷ [0.2] (LF) ÷ [4.0] (Sep) ÷ [0.3] ÷ 000A ÷ 0308 × 0085 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sep) ÷ [0.3] ÷ 000A ÷ 0009 ÷ # ÷ [0.2] (LF) ÷ [4.0] (Sp) ÷ [0.3] ÷ 000A ÷ 0308 × 0009 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sp) ÷ [0.3] ÷ 000A ÷ 0061 ÷ # ÷ [0.2] (LF) ÷ [4.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] ÷ 000A ÷ 0308 × 0061 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] ÷ 000A ÷ 0041 ÷ # ÷ [0.2] (LF) ÷ [4.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] ÷ 000A ÷ 0308 × 0041 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] ÷ 000A ÷ 01BB ÷ # ÷ [0.2] (LF) ÷ [4.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] ÷ 000A ÷ 0308 × 01BB ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] ÷ 000A ÷ 0030 ÷ # ÷ [0.2] (LF) ÷ [4.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 000A ÷ 0308 × 0030 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 000A ÷ 002E ÷ # ÷ [0.2] (LF) ÷ [4.0] FULL STOP (ATerm) ÷ [0.3] ÷ 000A ÷ 0308 × 002E ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] FULL STOP (ATerm) ÷ [0.3] ÷ 000A ÷ 0021 ÷ # ÷ [0.2] (LF) ÷ [4.0] EXCLAMATION MARK (STerm) ÷ [0.3] ÷ 000A ÷ 0308 × 0021 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] ÷ 000A ÷ 0022 ÷ # ÷ [0.2] (LF) ÷ [4.0] QUOTATION MARK (Close) ÷ [0.3] ÷ 000A ÷ 0308 × 0022 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] QUOTATION MARK (Close) ÷ [0.3] ÷ 000A ÷ 002C ÷ # ÷ [0.2] (LF) ÷ [4.0] COMMA (SContinue) ÷ [0.3] ÷ 000A ÷ 0308 × 002C ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] COMMA (SContinue) ÷ [0.3] ÷ 000A ÷ 00AD ÷ # ÷ [0.2] (LF) ÷ [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 000A ÷ 0308 × 00AD ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 000A ÷ 0300 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 000A ÷ 0308 × 0300 ÷ # ÷ [0.2] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0085 ÷ 0001 ÷ # ÷ [0.2] (Sep) ÷ [4.0] (Other) ÷ [0.3] ÷ 0085 ÷ 0308 × 0001 ÷ # ÷ [0.2] (Sep) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Other) ÷ [0.3] ÷ 0085 ÷ 000D ÷ # ÷ [0.2] (Sep) ÷ [4.0] (CR) ÷ [0.3] ÷ 0085 ÷ 0308 × 000D ÷ # ÷ [0.2] (Sep) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (CR) ÷ [0.3] ÷ 0085 ÷ 000A ÷ # ÷ [0.2] (Sep) ÷ [4.0] (LF) ÷ [0.3] ÷ 0085 ÷ 0308 × 000A ÷ # ÷ [0.2] (Sep) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (LF) ÷ [0.3] ÷ 0085 ÷ 0085 ÷ # ÷ [0.2] (Sep) ÷ [4.0] (Sep) ÷ [0.3] ÷ 0085 ÷ 0308 × 0085 ÷ # ÷ [0.2] (Sep) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sep) ÷ [0.3] ÷ 0085 ÷ 0009 ÷ # ÷ [0.2] (Sep) ÷ [4.0] (Sp) ÷ [0.3] ÷ 0085 ÷ 0308 × 0009 ÷ # ÷ [0.2] (Sep) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sp) ÷ [0.3] ÷ 0085 ÷ 0061 ÷ # ÷ [0.2] (Sep) ÷ [4.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] ÷ 0085 ÷ 0308 × 0061 ÷ # ÷ [0.2] (Sep) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] ÷ 0085 ÷ 0041 ÷ # ÷ [0.2] (Sep) ÷ [4.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] ÷ 0085 ÷ 0308 × 0041 ÷ # ÷ [0.2] (Sep) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] ÷ 0085 ÷ 01BB ÷ # ÷ [0.2] (Sep) ÷ [4.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] ÷ 0085 ÷ 0308 × 01BB ÷ # ÷ [0.2] (Sep) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] ÷ 0085 ÷ 0030 ÷ # ÷ [0.2] (Sep) ÷ [4.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0085 ÷ 0308 × 0030 ÷ # ÷ [0.2] (Sep) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0085 ÷ 002E ÷ # ÷ [0.2] (Sep) ÷ [4.0] FULL STOP (ATerm) ÷ [0.3] ÷ 0085 ÷ 0308 × 002E ÷ # ÷ [0.2] (Sep) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] FULL STOP (ATerm) ÷ [0.3] ÷ 0085 ÷ 0021 ÷ # ÷ [0.2] (Sep) ÷ [4.0] EXCLAMATION MARK (STerm) ÷ [0.3] ÷ 0085 ÷ 0308 × 0021 ÷ # ÷ [0.2] (Sep) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] ÷ 0085 ÷ 0022 ÷ # ÷ [0.2] (Sep) ÷ [4.0] QUOTATION MARK (Close) ÷ [0.3] ÷ 0085 ÷ 0308 × 0022 ÷ # ÷ [0.2] (Sep) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] QUOTATION MARK (Close) ÷ [0.3] ÷ 0085 ÷ 002C ÷ # ÷ [0.2] (Sep) ÷ [4.0] COMMA (SContinue) ÷ [0.3] ÷ 0085 ÷ 0308 × 002C ÷ # ÷ [0.2] (Sep) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] COMMA (SContinue) ÷ [0.3] ÷ 0085 ÷ 00AD ÷ # ÷ [0.2] (Sep) ÷ [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0085 ÷ 0308 × 00AD ÷ # ÷ [0.2] (Sep) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0085 ÷ 0300 ÷ # ÷ [0.2] (Sep) ÷ [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0085 ÷ 0308 × 0300 ÷ # ÷ [0.2] (Sep) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0009 × 0001 ÷ # ÷ [0.2] (Sp) × [998.0] (Other) ÷ [0.3] ÷ 0009 × 0308 × 0001 ÷ # ÷ [0.2] (Sp) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Other) ÷ [0.3] ÷ 0009 × 000D ÷ # ÷ [0.2] (Sp) × [998.0] (CR) ÷ [0.3] ÷ 0009 × 0308 × 000D ÷ # ÷ [0.2] (Sp) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (CR) ÷ [0.3] ÷ 0009 × 000A ÷ # ÷ [0.2] (Sp) × [998.0] (LF) ÷ [0.3] ÷ 0009 × 0308 × 000A ÷ # ÷ [0.2] (Sp) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (LF) ÷ [0.3] ÷ 0009 × 0085 ÷ # ÷ [0.2] (Sp) × [998.0] (Sep) ÷ [0.3] ÷ 0009 × 0308 × 0085 ÷ # ÷ [0.2] (Sp) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sep) ÷ [0.3] ÷ 0009 × 0009 ÷ # ÷ [0.2] (Sp) × [998.0] (Sp) ÷ [0.3] ÷ 0009 × 0308 × 0009 ÷ # ÷ [0.2] (Sp) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sp) ÷ [0.3] ÷ 0009 × 0061 ÷ # ÷ [0.2] (Sp) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] ÷ 0009 × 0308 × 0061 ÷ # ÷ [0.2] (Sp) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] ÷ 0009 × 0041 ÷ # ÷ [0.2] (Sp) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] ÷ 0009 × 0308 × 0041 ÷ # ÷ [0.2] (Sp) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] ÷ 0009 × 01BB ÷ # ÷ [0.2] (Sp) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] ÷ 0009 × 0308 × 01BB ÷ # ÷ [0.2] (Sp) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] ÷ 0009 × 0030 ÷ # ÷ [0.2] (Sp) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0009 × 0308 × 0030 ÷ # ÷ [0.2] (Sp) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0009 × 002E ÷ # ÷ [0.2] (Sp) × [998.0] FULL STOP (ATerm) ÷ [0.3] ÷ 0009 × 0308 × 002E ÷ # ÷ [0.2] (Sp) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] FULL STOP (ATerm) ÷ [0.3] ÷ 0009 × 0021 ÷ # ÷ [0.2] (Sp) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] ÷ 0009 × 0308 × 0021 ÷ # ÷ [0.2] (Sp) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] ÷ 0009 × 0022 ÷ # ÷ [0.2] (Sp) × [998.0] QUOTATION MARK (Close) ÷ [0.3] ÷ 0009 × 0308 × 0022 ÷ # ÷ [0.2] (Sp) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] QUOTATION MARK (Close) ÷ [0.3] ÷ 0009 × 002C ÷ # ÷ [0.2] (Sp) × [998.0] COMMA (SContinue) ÷ [0.3] ÷ 0009 × 0308 × 002C ÷ # ÷ [0.2] (Sp) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] COMMA (SContinue) ÷ [0.3] ÷ 0009 × 00AD ÷ # ÷ [0.2] (Sp) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0009 × 0308 × 00AD ÷ # ÷ [0.2] (Sp) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0009 × 0300 ÷ # ÷ [0.2] (Sp) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0009 × 0308 × 0300 ÷ # ÷ [0.2] (Sp) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0061 × 0001 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [998.0] (Other) ÷ [0.3] ÷ 0061 × 0308 × 0001 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Other) ÷ [0.3] ÷ 0061 × 000D ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [998.0] (CR) ÷ [0.3] ÷ 0061 × 0308 × 000D ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (CR) ÷ [0.3] ÷ 0061 × 000A ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [998.0] (LF) ÷ [0.3] ÷ 0061 × 0308 × 000A ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (LF) ÷ [0.3] ÷ 0061 × 0085 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [998.0] (Sep) ÷ [0.3] ÷ 0061 × 0308 × 0085 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sep) ÷ [0.3] ÷ 0061 × 0009 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [998.0] (Sp) ÷ [0.3] ÷ 0061 × 0308 × 0009 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sp) ÷ [0.3] ÷ 0061 × 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] ÷ 0061 × 0308 × 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] ÷ 0061 × 0041 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] ÷ 0061 × 0308 × 0041 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] ÷ 0061 × 01BB ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] ÷ 0061 × 0308 × 01BB ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] ÷ 0061 × 0030 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0061 × 0308 × 0030 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0061 × 002E ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [998.0] FULL STOP (ATerm) ÷ [0.3] ÷ 0061 × 0308 × 002E ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] FULL STOP (ATerm) ÷ [0.3] ÷ 0061 × 0021 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] ÷ 0061 × 0308 × 0021 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] ÷ 0061 × 0022 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [998.0] QUOTATION MARK (Close) ÷ [0.3] ÷ 0061 × 0308 × 0022 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] QUOTATION MARK (Close) ÷ [0.3] ÷ 0061 × 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [998.0] COMMA (SContinue) ÷ [0.3] ÷ 0061 × 0308 × 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] COMMA (SContinue) ÷ [0.3] ÷ 0061 × 00AD ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0061 × 0308 × 00AD ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0061 × 0300 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0061 × 0308 × 0300 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0041 × 0001 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [998.0] (Other) ÷ [0.3] ÷ 0041 × 0308 × 0001 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Other) ÷ [0.3] ÷ 0041 × 000D ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [998.0] (CR) ÷ [0.3] ÷ 0041 × 0308 × 000D ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (CR) ÷ [0.3] ÷ 0041 × 000A ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [998.0] (LF) ÷ [0.3] ÷ 0041 × 0308 × 000A ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (LF) ÷ [0.3] ÷ 0041 × 0085 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [998.0] (Sep) ÷ [0.3] ÷ 0041 × 0308 × 0085 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sep) ÷ [0.3] ÷ 0041 × 0009 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [998.0] (Sp) ÷ [0.3] ÷ 0041 × 0308 × 0009 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sp) ÷ [0.3] ÷ 0041 × 0061 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] ÷ 0041 × 0308 × 0061 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] ÷ 0041 × 0041 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] ÷ 0041 × 0308 × 0041 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] ÷ 0041 × 01BB ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] ÷ 0041 × 0308 × 01BB ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] ÷ 0041 × 0030 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0041 × 0308 × 0030 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0041 × 002E ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [998.0] FULL STOP (ATerm) ÷ [0.3] ÷ 0041 × 0308 × 002E ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] FULL STOP (ATerm) ÷ [0.3] ÷ 0041 × 0021 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] ÷ 0041 × 0308 × 0021 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] ÷ 0041 × 0022 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [998.0] QUOTATION MARK (Close) ÷ [0.3] ÷ 0041 × 0308 × 0022 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] QUOTATION MARK (Close) ÷ [0.3] ÷ 0041 × 002C ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [998.0] COMMA (SContinue) ÷ [0.3] ÷ 0041 × 0308 × 002C ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] COMMA (SContinue) ÷ [0.3] ÷ 0041 × 00AD ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0041 × 0308 × 00AD ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0041 × 0300 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0041 × 0308 × 0300 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (Upper) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 01BB × 0001 ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [998.0] (Other) ÷ [0.3] ÷ 01BB × 0308 × 0001 ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Other) ÷ [0.3] ÷ 01BB × 000D ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [998.0] (CR) ÷ [0.3] ÷ 01BB × 0308 × 000D ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (CR) ÷ [0.3] ÷ 01BB × 000A ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [998.0] (LF) ÷ [0.3] ÷ 01BB × 0308 × 000A ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (LF) ÷ [0.3] ÷ 01BB × 0085 ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [998.0] (Sep) ÷ [0.3] ÷ 01BB × 0308 × 0085 ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sep) ÷ [0.3] ÷ 01BB × 0009 ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [998.0] (Sp) ÷ [0.3] ÷ 01BB × 0308 × 0009 ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sp) ÷ [0.3] ÷ 01BB × 0061 ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] ÷ 01BB × 0308 × 0061 ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] ÷ 01BB × 0041 ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] ÷ 01BB × 0308 × 0041 ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] ÷ 01BB × 01BB ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] ÷ 01BB × 0308 × 01BB ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] ÷ 01BB × 0030 ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 01BB × 0308 × 0030 ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 01BB × 002E ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [998.0] FULL STOP (ATerm) ÷ [0.3] ÷ 01BB × 0308 × 002E ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] FULL STOP (ATerm) ÷ [0.3] ÷ 01BB × 0021 ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] ÷ 01BB × 0308 × 0021 ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] ÷ 01BB × 0022 ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [998.0] QUOTATION MARK (Close) ÷ [0.3] ÷ 01BB × 0308 × 0022 ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] QUOTATION MARK (Close) ÷ [0.3] ÷ 01BB × 002C ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [998.0] COMMA (SContinue) ÷ [0.3] ÷ 01BB × 0308 × 002C ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] COMMA (SContinue) ÷ [0.3] ÷ 01BB × 00AD ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 01BB × 0308 × 00AD ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 01BB × 0300 ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 01BB × 0308 × 0300 ÷ # ÷ [0.2] LATIN LETTER TWO WITH STROKE (OLetter) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0030 × 0001 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [998.0] (Other) ÷ [0.3] ÷ 0030 × 0308 × 0001 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Other) ÷ [0.3] ÷ 0030 × 000D ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [998.0] (CR) ÷ [0.3] ÷ 0030 × 0308 × 000D ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (CR) ÷ [0.3] ÷ 0030 × 000A ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [998.0] (LF) ÷ [0.3] ÷ 0030 × 0308 × 000A ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (LF) ÷ [0.3] ÷ 0030 × 0085 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [998.0] (Sep) ÷ [0.3] ÷ 0030 × 0308 × 0085 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sep) ÷ [0.3] ÷ 0030 × 0009 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [998.0] (Sp) ÷ [0.3] ÷ 0030 × 0308 × 0009 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sp) ÷ [0.3] ÷ 0030 × 0061 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] ÷ 0030 × 0308 × 0061 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] ÷ 0030 × 0041 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] ÷ 0030 × 0308 × 0041 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] ÷ 0030 × 01BB ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] ÷ 0030 × 0308 × 01BB ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] ÷ 0030 × 0030 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0030 × 0308 × 0030 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0030 × 002E ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [998.0] FULL STOP (ATerm) ÷ [0.3] ÷ 0030 × 0308 × 002E ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] FULL STOP (ATerm) ÷ [0.3] ÷ 0030 × 0021 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] ÷ 0030 × 0308 × 0021 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] ÷ 0030 × 0022 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [998.0] QUOTATION MARK (Close) ÷ [0.3] ÷ 0030 × 0308 × 0022 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] QUOTATION MARK (Close) ÷ [0.3] ÷ 0030 × 002C ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [998.0] COMMA (SContinue) ÷ [0.3] ÷ 0030 × 0308 × 002C ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] COMMA (SContinue) ÷ [0.3] ÷ 0030 × 00AD ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0030 × 0308 × 00AD ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0030 × 0300 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0030 × 0308 × 0300 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 002E ÷ 0001 ÷ # ÷ [0.2] FULL STOP (ATerm) ÷ [11.0] (Other) ÷ [0.3] ÷ 002E × 0308 ÷ 0001 ÷ # ÷ [0.2] FULL STOP (ATerm) × [5.0] COMBINING DIAERESIS (Extend_FE) ÷ [11.0] (Other) ÷ [0.3] ÷ 002E × 000D ÷ # ÷ [0.2] FULL STOP (ATerm) × [9.0] (CR) ÷ [0.3] ÷ 002E × 0308 × 000D ÷ # ÷ [0.2] FULL STOP (ATerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [9.0] (CR) ÷ [0.3] ÷ 002E × 000A ÷ # ÷ [0.2] FULL STOP (ATerm) × [9.0] (LF) ÷ [0.3] ÷ 002E × 0308 × 000A ÷ # ÷ [0.2] FULL STOP (ATerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [9.0] (LF) ÷ [0.3] ÷ 002E × 0085 ÷ # ÷ [0.2] FULL STOP (ATerm) × [9.0] (Sep) ÷ [0.3] ÷ 002E × 0308 × 0085 ÷ # ÷ [0.2] FULL STOP (ATerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [9.0] (Sep) ÷ [0.3] ÷ 002E × 0009 ÷ # ÷ [0.2] FULL STOP (ATerm) × [9.0] (Sp) ÷ [0.3] ÷ 002E × 0308 × 0009 ÷ # ÷ [0.2] FULL STOP (ATerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [9.0] (Sp) ÷ [0.3] ÷ 002E × 0061 ÷ # ÷ [0.2] FULL STOP (ATerm) × [8.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] ÷ 002E × 0308 × 0061 ÷ # ÷ [0.2] FULL STOP (ATerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [8.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] ÷ 002E ÷ 0041 ÷ # ÷ [0.2] FULL STOP (ATerm) ÷ [11.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] ÷ 002E × 0308 ÷ 0041 ÷ # ÷ [0.2] FULL STOP (ATerm) × [5.0] COMBINING DIAERESIS (Extend_FE) ÷ [11.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] ÷ 002E ÷ 01BB ÷ # ÷ [0.2] FULL STOP (ATerm) ÷ [11.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] ÷ 002E × 0308 ÷ 01BB ÷ # ÷ [0.2] FULL STOP (ATerm) × [5.0] COMBINING DIAERESIS (Extend_FE) ÷ [11.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] ÷ 002E × 0030 ÷ # ÷ [0.2] FULL STOP (ATerm) × [6.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 002E × 0308 × 0030 ÷ # ÷ [0.2] FULL STOP (ATerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [6.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 002E × 002E ÷ # ÷ [0.2] FULL STOP (ATerm) × [8.1] FULL STOP (ATerm) ÷ [0.3] ÷ 002E × 0308 × 002E ÷ # ÷ [0.2] FULL STOP (ATerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [8.1] FULL STOP (ATerm) ÷ [0.3] ÷ 002E × 0021 ÷ # ÷ [0.2] FULL STOP (ATerm) × [8.1] EXCLAMATION MARK (STerm) ÷ [0.3] ÷ 002E × 0308 × 0021 ÷ # ÷ [0.2] FULL STOP (ATerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [8.1] EXCLAMATION MARK (STerm) ÷ [0.3] ÷ 002E × 0022 ÷ # ÷ [0.2] FULL STOP (ATerm) × [9.0] QUOTATION MARK (Close) ÷ [0.3] ÷ 002E × 0308 × 0022 ÷ # ÷ [0.2] FULL STOP (ATerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [9.0] QUOTATION MARK (Close) ÷ [0.3] ÷ 002E × 002C ÷ # ÷ [0.2] FULL STOP (ATerm) × [8.1] COMMA (SContinue) ÷ [0.3] ÷ 002E × 0308 × 002C ÷ # ÷ [0.2] FULL STOP (ATerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [8.1] COMMA (SContinue) ÷ [0.3] ÷ 002E × 00AD ÷ # ÷ [0.2] FULL STOP (ATerm) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 002E × 0308 × 00AD ÷ # ÷ [0.2] FULL STOP (ATerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 002E × 0300 ÷ # ÷ [0.2] FULL STOP (ATerm) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 002E × 0308 × 0300 ÷ # ÷ [0.2] FULL STOP (ATerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0021 ÷ 0001 ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) ÷ [11.0] (Other) ÷ [0.3] ÷ 0021 × 0308 ÷ 0001 ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [5.0] COMBINING DIAERESIS (Extend_FE) ÷ [11.0] (Other) ÷ [0.3] ÷ 0021 × 000D ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [9.0] (CR) ÷ [0.3] ÷ 0021 × 0308 × 000D ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [9.0] (CR) ÷ [0.3] ÷ 0021 × 000A ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [9.0] (LF) ÷ [0.3] ÷ 0021 × 0308 × 000A ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [9.0] (LF) ÷ [0.3] ÷ 0021 × 0085 ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [9.0] (Sep) ÷ [0.3] ÷ 0021 × 0308 × 0085 ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [9.0] (Sep) ÷ [0.3] ÷ 0021 × 0009 ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [9.0] (Sp) ÷ [0.3] ÷ 0021 × 0308 × 0009 ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [9.0] (Sp) ÷ [0.3] ÷ 0021 ÷ 0061 ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) ÷ [11.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] ÷ 0021 × 0308 ÷ 0061 ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [5.0] COMBINING DIAERESIS (Extend_FE) ÷ [11.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] ÷ 0021 ÷ 0041 ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) ÷ [11.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] ÷ 0021 × 0308 ÷ 0041 ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [5.0] COMBINING DIAERESIS (Extend_FE) ÷ [11.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] ÷ 0021 ÷ 01BB ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) ÷ [11.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] ÷ 0021 × 0308 ÷ 01BB ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [5.0] COMBINING DIAERESIS (Extend_FE) ÷ [11.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] ÷ 0021 ÷ 0030 ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) ÷ [11.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0021 × 0308 ÷ 0030 ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [5.0] COMBINING DIAERESIS (Extend_FE) ÷ [11.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0021 × 002E ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [8.1] FULL STOP (ATerm) ÷ [0.3] ÷ 0021 × 0308 × 002E ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [8.1] FULL STOP (ATerm) ÷ [0.3] ÷ 0021 × 0021 ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [8.1] EXCLAMATION MARK (STerm) ÷ [0.3] ÷ 0021 × 0308 × 0021 ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [8.1] EXCLAMATION MARK (STerm) ÷ [0.3] ÷ 0021 × 0022 ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [9.0] QUOTATION MARK (Close) ÷ [0.3] ÷ 0021 × 0308 × 0022 ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [9.0] QUOTATION MARK (Close) ÷ [0.3] ÷ 0021 × 002C ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [8.1] COMMA (SContinue) ÷ [0.3] ÷ 0021 × 0308 × 002C ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [8.1] COMMA (SContinue) ÷ [0.3] ÷ 0021 × 00AD ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0021 × 0308 × 00AD ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0021 × 0300 ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0021 × 0308 × 0300 ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0022 × 0001 ÷ # ÷ [0.2] QUOTATION MARK (Close) × [998.0] (Other) ÷ [0.3] ÷ 0022 × 0308 × 0001 ÷ # ÷ [0.2] QUOTATION MARK (Close) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Other) ÷ [0.3] ÷ 0022 × 000D ÷ # ÷ [0.2] QUOTATION MARK (Close) × [998.0] (CR) ÷ [0.3] ÷ 0022 × 0308 × 000D ÷ # ÷ [0.2] QUOTATION MARK (Close) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (CR) ÷ [0.3] ÷ 0022 × 000A ÷ # ÷ [0.2] QUOTATION MARK (Close) × [998.0] (LF) ÷ [0.3] ÷ 0022 × 0308 × 000A ÷ # ÷ [0.2] QUOTATION MARK (Close) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (LF) ÷ [0.3] ÷ 0022 × 0085 ÷ # ÷ [0.2] QUOTATION MARK (Close) × [998.0] (Sep) ÷ [0.3] ÷ 0022 × 0308 × 0085 ÷ # ÷ [0.2] QUOTATION MARK (Close) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sep) ÷ [0.3] ÷ 0022 × 0009 ÷ # ÷ [0.2] QUOTATION MARK (Close) × [998.0] (Sp) ÷ [0.3] ÷ 0022 × 0308 × 0009 ÷ # ÷ [0.2] QUOTATION MARK (Close) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sp) ÷ [0.3] ÷ 0022 × 0061 ÷ # ÷ [0.2] QUOTATION MARK (Close) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] ÷ 0022 × 0308 × 0061 ÷ # ÷ [0.2] QUOTATION MARK (Close) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] ÷ 0022 × 0041 ÷ # ÷ [0.2] QUOTATION MARK (Close) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] ÷ 0022 × 0308 × 0041 ÷ # ÷ [0.2] QUOTATION MARK (Close) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] ÷ 0022 × 01BB ÷ # ÷ [0.2] QUOTATION MARK (Close) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] ÷ 0022 × 0308 × 01BB ÷ # ÷ [0.2] QUOTATION MARK (Close) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] ÷ 0022 × 0030 ÷ # ÷ [0.2] QUOTATION MARK (Close) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0022 × 0308 × 0030 ÷ # ÷ [0.2] QUOTATION MARK (Close) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0022 × 002E ÷ # ÷ [0.2] QUOTATION MARK (Close) × [998.0] FULL STOP (ATerm) ÷ [0.3] ÷ 0022 × 0308 × 002E ÷ # ÷ [0.2] QUOTATION MARK (Close) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] FULL STOP (ATerm) ÷ [0.3] ÷ 0022 × 0021 ÷ # ÷ [0.2] QUOTATION MARK (Close) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] ÷ 0022 × 0308 × 0021 ÷ # ÷ [0.2] QUOTATION MARK (Close) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] ÷ 0022 × 0022 ÷ # ÷ [0.2] QUOTATION MARK (Close) × [998.0] QUOTATION MARK (Close) ÷ [0.3] ÷ 0022 × 0308 × 0022 ÷ # ÷ [0.2] QUOTATION MARK (Close) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] QUOTATION MARK (Close) ÷ [0.3] ÷ 0022 × 002C ÷ # ÷ [0.2] QUOTATION MARK (Close) × [998.0] COMMA (SContinue) ÷ [0.3] ÷ 0022 × 0308 × 002C ÷ # ÷ [0.2] QUOTATION MARK (Close) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] COMMA (SContinue) ÷ [0.3] ÷ 0022 × 00AD ÷ # ÷ [0.2] QUOTATION MARK (Close) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0022 × 0308 × 00AD ÷ # ÷ [0.2] QUOTATION MARK (Close) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0022 × 0300 ÷ # ÷ [0.2] QUOTATION MARK (Close) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0022 × 0308 × 0300 ÷ # ÷ [0.2] QUOTATION MARK (Close) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 002C × 0001 ÷ # ÷ [0.2] COMMA (SContinue) × [998.0] (Other) ÷ [0.3] ÷ 002C × 0308 × 0001 ÷ # ÷ [0.2] COMMA (SContinue) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Other) ÷ [0.3] ÷ 002C × 000D ÷ # ÷ [0.2] COMMA (SContinue) × [998.0] (CR) ÷ [0.3] ÷ 002C × 0308 × 000D ÷ # ÷ [0.2] COMMA (SContinue) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (CR) ÷ [0.3] ÷ 002C × 000A ÷ # ÷ [0.2] COMMA (SContinue) × [998.0] (LF) ÷ [0.3] ÷ 002C × 0308 × 000A ÷ # ÷ [0.2] COMMA (SContinue) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (LF) ÷ [0.3] ÷ 002C × 0085 ÷ # ÷ [0.2] COMMA (SContinue) × [998.0] (Sep) ÷ [0.3] ÷ 002C × 0308 × 0085 ÷ # ÷ [0.2] COMMA (SContinue) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sep) ÷ [0.3] ÷ 002C × 0009 ÷ # ÷ [0.2] COMMA (SContinue) × [998.0] (Sp) ÷ [0.3] ÷ 002C × 0308 × 0009 ÷ # ÷ [0.2] COMMA (SContinue) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sp) ÷ [0.3] ÷ 002C × 0061 ÷ # ÷ [0.2] COMMA (SContinue) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] ÷ 002C × 0308 × 0061 ÷ # ÷ [0.2] COMMA (SContinue) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] ÷ 002C × 0041 ÷ # ÷ [0.2] COMMA (SContinue) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] ÷ 002C × 0308 × 0041 ÷ # ÷ [0.2] COMMA (SContinue) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] ÷ 002C × 01BB ÷ # ÷ [0.2] COMMA (SContinue) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] ÷ 002C × 0308 × 01BB ÷ # ÷ [0.2] COMMA (SContinue) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] ÷ 002C × 0030 ÷ # ÷ [0.2] COMMA (SContinue) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 002C × 0308 × 0030 ÷ # ÷ [0.2] COMMA (SContinue) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 002C × 002E ÷ # ÷ [0.2] COMMA (SContinue) × [998.0] FULL STOP (ATerm) ÷ [0.3] ÷ 002C × 0308 × 002E ÷ # ÷ [0.2] COMMA (SContinue) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] FULL STOP (ATerm) ÷ [0.3] ÷ 002C × 0021 ÷ # ÷ [0.2] COMMA (SContinue) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] ÷ 002C × 0308 × 0021 ÷ # ÷ [0.2] COMMA (SContinue) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] ÷ 002C × 0022 ÷ # ÷ [0.2] COMMA (SContinue) × [998.0] QUOTATION MARK (Close) ÷ [0.3] ÷ 002C × 0308 × 0022 ÷ # ÷ [0.2] COMMA (SContinue) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] QUOTATION MARK (Close) ÷ [0.3] ÷ 002C × 002C ÷ # ÷ [0.2] COMMA (SContinue) × [998.0] COMMA (SContinue) ÷ [0.3] ÷ 002C × 0308 × 002C ÷ # ÷ [0.2] COMMA (SContinue) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] COMMA (SContinue) ÷ [0.3] ÷ 002C × 00AD ÷ # ÷ [0.2] COMMA (SContinue) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 002C × 0308 × 00AD ÷ # ÷ [0.2] COMMA (SContinue) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 002C × 0300 ÷ # ÷ [0.2] COMMA (SContinue) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 002C × 0308 × 0300 ÷ # ÷ [0.2] COMMA (SContinue) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 00AD × 0001 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [998.0] (Other) ÷ [0.3] ÷ 00AD × 0308 × 0001 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Other) ÷ [0.3] ÷ 00AD × 000D ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [998.0] (CR) ÷ [0.3] ÷ 00AD × 0308 × 000D ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (CR) ÷ [0.3] ÷ 00AD × 000A ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [998.0] (LF) ÷ [0.3] ÷ 00AD × 0308 × 000A ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (LF) ÷ [0.3] ÷ 00AD × 0085 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [998.0] (Sep) ÷ [0.3] ÷ 00AD × 0308 × 0085 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sep) ÷ [0.3] ÷ 00AD × 0009 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [998.0] (Sp) ÷ [0.3] ÷ 00AD × 0308 × 0009 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sp) ÷ [0.3] ÷ 00AD × 0061 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] ÷ 00AD × 0308 × 0061 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] ÷ 00AD × 0041 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] ÷ 00AD × 0308 × 0041 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] ÷ 00AD × 01BB ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] ÷ 00AD × 0308 × 01BB ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] ÷ 00AD × 0030 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 00AD × 0308 × 0030 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 00AD × 002E ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [998.0] FULL STOP (ATerm) ÷ [0.3] ÷ 00AD × 0308 × 002E ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] FULL STOP (ATerm) ÷ [0.3] ÷ 00AD × 0021 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] ÷ 00AD × 0308 × 0021 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] ÷ 00AD × 0022 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [998.0] QUOTATION MARK (Close) ÷ [0.3] ÷ 00AD × 0308 × 0022 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] QUOTATION MARK (Close) ÷ [0.3] ÷ 00AD × 002C ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [998.0] COMMA (SContinue) ÷ [0.3] ÷ 00AD × 0308 × 002C ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] COMMA (SContinue) ÷ [0.3] ÷ 00AD × 00AD ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 00AD × 0308 × 00AD ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 00AD × 0300 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 00AD × 0308 × 0300 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0300 × 0001 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [998.0] (Other) ÷ [0.3] ÷ 0300 × 0308 × 0001 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Other) ÷ [0.3] ÷ 0300 × 000D ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [998.0] (CR) ÷ [0.3] ÷ 0300 × 0308 × 000D ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (CR) ÷ [0.3] ÷ 0300 × 000A ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [998.0] (LF) ÷ [0.3] ÷ 0300 × 0308 × 000A ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (LF) ÷ [0.3] ÷ 0300 × 0085 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [998.0] (Sep) ÷ [0.3] ÷ 0300 × 0308 × 0085 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sep) ÷ [0.3] ÷ 0300 × 0009 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [998.0] (Sp) ÷ [0.3] ÷ 0300 × 0308 × 0009 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] (Sp) ÷ [0.3] ÷ 0300 × 0061 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] ÷ 0300 × 0308 × 0061 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN SMALL LETTER A (Lower) ÷ [0.3] ÷ 0300 × 0041 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] ÷ 0300 × 0308 × 0041 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN CAPITAL LETTER A (Upper) ÷ [0.3] ÷ 0300 × 01BB ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] ÷ 0300 × 0308 × 01BB ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN LETTER TWO WITH STROKE (OLetter) ÷ [0.3] ÷ 0300 × 0030 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0300 × 0308 × 0030 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0300 × 002E ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [998.0] FULL STOP (ATerm) ÷ [0.3] ÷ 0300 × 0308 × 002E ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] FULL STOP (ATerm) ÷ [0.3] ÷ 0300 × 0021 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] ÷ 0300 × 0308 × 0021 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] EXCLAMATION MARK (STerm) ÷ [0.3] ÷ 0300 × 0022 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [998.0] QUOTATION MARK (Close) ÷ [0.3] ÷ 0300 × 0308 × 0022 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] QUOTATION MARK (Close) ÷ [0.3] ÷ 0300 × 002C ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [998.0] COMMA (SContinue) ÷ [0.3] ÷ 0300 × 0308 × 002C ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [998.0] COMMA (SContinue) ÷ [0.3] ÷ 0300 × 00AD ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0300 × 0308 × 00AD ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0300 × 0300 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0300 × 0308 × 0300 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 000D × 000A ÷ 0061 × 000A ÷ 0308 ÷ # ÷ [0.2] (CR) × [3.0] (LF) ÷ [4.0] LATIN SMALL LETTER A (Lower) × [998.0] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [0.3] ÷ 0061 × 0308 ÷ # ÷ [0.2] LATIN SMALL LETTER A (Lower) × [5.0] COMBINING DIAERESIS (Extend_FE) ÷ [0.3] ÷ 0020 × 200D × 0646 ÷ # ÷ [0.2] SPACE (Sp) × [5.0] ZERO WIDTH JOINER (Extend_FE) × [998.0] ARABIC LETTER NOON (OLetter) ÷ [0.3] ÷ 0646 × 200D × 0020 ÷ # ÷ [0.2] ARABIC LETTER NOON (OLetter) × [5.0] ZERO WIDTH JOINER (Extend_FE) × [998.0] SPACE (Sp) ÷ [0.3] ÷ 0028 × 0022 × 0047 × 006F × 002E × 0022 × 0029 × 0020 ÷ 0028 × 0048 × 0065 × 0020 × 0064 × 0069 × 0064 × 002E × 0029 ÷ # ÷ [0.2] LEFT PARENTHESIS (Close) × [998.0] QUOTATION MARK (Close) × [998.0] LATIN CAPITAL LETTER G (Upper) × [998.0] LATIN SMALL LETTER O (Lower) × [998.0] FULL STOP (ATerm) × [9.0] QUOTATION MARK (Close) × [9.0] RIGHT PARENTHESIS (Close) × [9.0] SPACE (Sp) ÷ [11.0] LEFT PARENTHESIS (Close) × [998.0] LATIN CAPITAL LETTER H (Upper) × [998.0] LATIN SMALL LETTER E (Lower) × [998.0] SPACE (Sp) × [998.0] LATIN SMALL LETTER D (Lower) × [998.0] LATIN SMALL LETTER I (Lower) × [998.0] LATIN SMALL LETTER D (Lower) × [998.0] FULL STOP (ATerm) × [9.0] RIGHT PARENTHESIS (Close) ÷ [0.3] ÷ 0028 × 201C × 0047 × 006F × 003F × 201D × 0029 × 0020 ÷ 0028 × 0048 × 0065 × 0020 × 0064 × 0069 × 0064 × 002E × 0029 ÷ # ÷ [0.2] LEFT PARENTHESIS (Close) × [998.0] LEFT DOUBLE QUOTATION MARK (Close) × [998.0] LATIN CAPITAL LETTER G (Upper) × [998.0] LATIN SMALL LETTER O (Lower) × [998.0] QUESTION MARK (STerm) × [9.0] RIGHT DOUBLE QUOTATION MARK (Close) × [9.0] RIGHT PARENTHESIS (Close) × [9.0] SPACE (Sp) ÷ [11.0] LEFT PARENTHESIS (Close) × [998.0] LATIN CAPITAL LETTER H (Upper) × [998.0] LATIN SMALL LETTER E (Lower) × [998.0] SPACE (Sp) × [998.0] LATIN SMALL LETTER D (Lower) × [998.0] LATIN SMALL LETTER I (Lower) × [998.0] LATIN SMALL LETTER D (Lower) × [998.0] FULL STOP (ATerm) × [9.0] RIGHT PARENTHESIS (Close) ÷ [0.3] ÷ 0055 × 002E × 0053 × 002E × 0041 × 0300 × 002E × 0020 × 0069 × 0073 ÷ # ÷ [0.2] LATIN CAPITAL LETTER U (Upper) × [998.0] FULL STOP (ATerm) × [7.0] LATIN CAPITAL LETTER S (Upper) × [998.0] FULL STOP (ATerm) × [7.0] LATIN CAPITAL LETTER A (Upper) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) × [998.0] FULL STOP (ATerm) × [8.0] SPACE (Sp) × [8.0] LATIN SMALL LETTER I (Lower) × [998.0] LATIN SMALL LETTER S (Lower) ÷ [0.3] ÷ 0055 × 002E × 0053 × 002E × 0041 × 0300 × 003F × 0020 ÷ 0048 × 0065 ÷ # ÷ [0.2] LATIN CAPITAL LETTER U (Upper) × [998.0] FULL STOP (ATerm) × [7.0] LATIN CAPITAL LETTER S (Upper) × [998.0] FULL STOP (ATerm) × [7.0] LATIN CAPITAL LETTER A (Upper) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) × [998.0] QUESTION MARK (STerm) × [9.0] SPACE (Sp) ÷ [11.0] LATIN CAPITAL LETTER H (Upper) × [998.0] LATIN SMALL LETTER E (Lower) ÷ [0.3] ÷ 0055 × 002E × 0053 × 002E × 0041 × 0300 × 002E ÷ # ÷ [0.2] LATIN CAPITAL LETTER U (Upper) × [998.0] FULL STOP (ATerm) × [7.0] LATIN CAPITAL LETTER S (Upper) × [998.0] FULL STOP (ATerm) × [7.0] LATIN CAPITAL LETTER A (Upper) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) × [998.0] FULL STOP (ATerm) ÷ [0.3] ÷ 0033 × 002E × 0034 ÷ # ÷ [0.2] DIGIT THREE (Numeric) × [998.0] FULL STOP (ATerm) × [6.0] DIGIT FOUR (Numeric) ÷ [0.3] ÷ 0063 × 002E × 0064 ÷ # ÷ [0.2] LATIN SMALL LETTER C (Lower) × [998.0] FULL STOP (ATerm) × [8.0] LATIN SMALL LETTER D (Lower) ÷ [0.3] ÷ 0043 × 002E × 0064 ÷ # ÷ [0.2] LATIN CAPITAL LETTER C (Upper) × [998.0] FULL STOP (ATerm) × [8.0] LATIN SMALL LETTER D (Lower) ÷ [0.3] ÷ 0063 × 002E × 0044 ÷ # ÷ [0.2] LATIN SMALL LETTER C (Lower) × [998.0] FULL STOP (ATerm) × [7.0] LATIN CAPITAL LETTER D (Upper) ÷ [0.3] ÷ 0043 × 002E × 0044 ÷ # ÷ [0.2] LATIN CAPITAL LETTER C (Upper) × [998.0] FULL STOP (ATerm) × [7.0] LATIN CAPITAL LETTER D (Upper) ÷ [0.3] ÷ 0065 × 0074 × 0063 × 002E × 0029 × 2019 × 00A0 × 0074 × 0068 × 0065 ÷ # ÷ [0.2] LATIN SMALL LETTER E (Lower) × [998.0] LATIN SMALL LETTER T (Lower) × [998.0] LATIN SMALL LETTER C (Lower) × [998.0] FULL STOP (ATerm) × [8.0] RIGHT PARENTHESIS (Close) × [8.0] RIGHT SINGLE QUOTATION MARK (Close) × [8.0] NO-BREAK SPACE (Sp) × [8.0] LATIN SMALL LETTER T (Lower) × [998.0] LATIN SMALL LETTER H (Lower) × [998.0] LATIN SMALL LETTER E (Lower) ÷ [0.3] ÷ 0065 × 0074 × 0063 × 002E × 0029 × 2019 × 00A0 ÷ 0054 × 0068 × 0065 ÷ # ÷ [0.2] LATIN SMALL LETTER E (Lower) × [998.0] LATIN SMALL LETTER T (Lower) × [998.0] LATIN SMALL LETTER C (Lower) × [998.0] FULL STOP (ATerm) × [9.0] RIGHT PARENTHESIS (Close) × [9.0] RIGHT SINGLE QUOTATION MARK (Close) × [9.0] NO-BREAK SPACE (Sp) ÷ [11.0] LATIN CAPITAL LETTER T (Upper) × [998.0] LATIN SMALL LETTER H (Lower) × [998.0] LATIN SMALL LETTER E (Lower) ÷ [0.3] ÷ 0065 × 0074 × 0063 × 002E × 0029 × 2019 × 00A0 × 2018 × 0028 × 0074 × 0068 × 0065 ÷ # ÷ [0.2] LATIN SMALL LETTER E (Lower) × [998.0] LATIN SMALL LETTER T (Lower) × [998.0] LATIN SMALL LETTER C (Lower) × [998.0] FULL STOP (ATerm) × [8.0] RIGHT PARENTHESIS (Close) × [8.0] RIGHT SINGLE QUOTATION MARK (Close) × [8.0] NO-BREAK SPACE (Sp) × [8.0] LEFT SINGLE QUOTATION MARK (Close) × [998.0] LEFT PARENTHESIS (Close) × [998.0] LATIN SMALL LETTER T (Lower) × [998.0] LATIN SMALL LETTER H (Lower) × [998.0] LATIN SMALL LETTER E (Lower) ÷ [0.3] ÷ 0065 × 0074 × 0063 × 002E × 0029 × 2019 × 00A0 ÷ 2018 × 0028 × 0054 × 0068 × 0065 ÷ # ÷ [0.2] LATIN SMALL LETTER E (Lower) × [998.0] LATIN SMALL LETTER T (Lower) × [998.0] LATIN SMALL LETTER C (Lower) × [998.0] FULL STOP (ATerm) × [9.0] RIGHT PARENTHESIS (Close) × [9.0] RIGHT SINGLE QUOTATION MARK (Close) × [9.0] NO-BREAK SPACE (Sp) ÷ [11.0] LEFT SINGLE QUOTATION MARK (Close) × [998.0] LEFT PARENTHESIS (Close) × [998.0] LATIN CAPITAL LETTER T (Upper) × [998.0] LATIN SMALL LETTER H (Lower) × [998.0] LATIN SMALL LETTER E (Lower) ÷ [0.3] ÷ 0065 × 0074 × 0063 × 002E × 0029 × 2019 × 00A0 × 0308 × 0074 × 0068 × 0065 ÷ # ÷ [0.2] LATIN SMALL LETTER E (Lower) × [998.0] LATIN SMALL LETTER T (Lower) × [998.0] LATIN SMALL LETTER C (Lower) × [998.0] FULL STOP (ATerm) × [8.0] RIGHT PARENTHESIS (Close) × [8.0] RIGHT SINGLE QUOTATION MARK (Close) × [8.0] NO-BREAK SPACE (Sp) × [5.0] COMBINING DIAERESIS (Extend_FE) × [8.0] LATIN SMALL LETTER T (Lower) × [998.0] LATIN SMALL LETTER H (Lower) × [998.0] LATIN SMALL LETTER E (Lower) ÷ [0.3] ÷ 0065 × 0074 × 0063 × 002E × 0029 × 2019 × 00A0 × 0308 ÷ 0054 × 0068 × 0065 ÷ # ÷ [0.2] LATIN SMALL LETTER E (Lower) × [998.0] LATIN SMALL LETTER T (Lower) × [998.0] LATIN SMALL LETTER C (Lower) × [998.0] FULL STOP (ATerm) × [9.0] RIGHT PARENTHESIS (Close) × [9.0] RIGHT SINGLE QUOTATION MARK (Close) × [9.0] NO-BREAK SPACE (Sp) × [5.0] COMBINING DIAERESIS (Extend_FE) ÷ [11.0] LATIN CAPITAL LETTER T (Upper) × [998.0] LATIN SMALL LETTER H (Lower) × [998.0] LATIN SMALL LETTER E (Lower) ÷ [0.3] ÷ 0065 × 0074 × 0063 × 002E × 0029 × 2019 × 0308 ÷ 0054 × 0068 × 0065 ÷ # ÷ [0.2] LATIN SMALL LETTER E (Lower) × [998.0] LATIN SMALL LETTER T (Lower) × [998.0] LATIN SMALL LETTER C (Lower) × [998.0] FULL STOP (ATerm) × [9.0] RIGHT PARENTHESIS (Close) × [9.0] RIGHT SINGLE QUOTATION MARK (Close) × [5.0] COMBINING DIAERESIS (Extend_FE) ÷ [11.0] LATIN CAPITAL LETTER T (Upper) × [998.0] LATIN SMALL LETTER H (Lower) × [998.0] LATIN SMALL LETTER E (Lower) ÷ [0.3] ÷ 0065 × 0074 × 0063 × 002E × 0029 × 000A ÷ 0308 × 0054 × 0068 × 0065 ÷ # ÷ [0.2] LATIN SMALL LETTER E (Lower) × [998.0] LATIN SMALL LETTER T (Lower) × [998.0] LATIN SMALL LETTER C (Lower) × [998.0] FULL STOP (ATerm) × [9.0] RIGHT PARENTHESIS (Close) × [9.0] (LF) ÷ [4.0] COMBINING DIAERESIS (Extend_FE) × [998.0] LATIN CAPITAL LETTER T (Upper) × [998.0] LATIN SMALL LETTER H (Lower) × [998.0] LATIN SMALL LETTER E (Lower) ÷ [0.3] ÷ 0074 × 0068 × 0065 × 0020 × 0072 × 0065 × 0073 × 0070 × 002E × 0020 × 006C × 0065 × 0061 × 0064 × 0065 × 0072 × 0073 × 0020 × 0061 × 0072 × 0065 ÷ # ÷ [0.2] LATIN SMALL LETTER T (Lower) × [998.0] LATIN SMALL LETTER H (Lower) × [998.0] LATIN SMALL LETTER E (Lower) × [998.0] SPACE (Sp) × [998.0] LATIN SMALL LETTER R (Lower) × [998.0] LATIN SMALL LETTER E (Lower) × [998.0] LATIN SMALL LETTER S (Lower) × [998.0] LATIN SMALL LETTER P (Lower) × [998.0] FULL STOP (ATerm) × [8.0] SPACE (Sp) × [8.0] LATIN SMALL LETTER L (Lower) × [998.0] LATIN SMALL LETTER E (Lower) × [998.0] LATIN SMALL LETTER A (Lower) × [998.0] LATIN SMALL LETTER D (Lower) × [998.0] LATIN SMALL LETTER E (Lower) × [998.0] LATIN SMALL LETTER R (Lower) × [998.0] LATIN SMALL LETTER S (Lower) × [998.0] SPACE (Sp) × [998.0] LATIN SMALL LETTER A (Lower) × [998.0] LATIN SMALL LETTER R (Lower) × [998.0] LATIN SMALL LETTER E (Lower) ÷ [0.3] ÷ 5B57 × 002E ÷ 5B57 ÷ # ÷ [0.2] CJK UNIFIED IDEOGRAPH-5B57 (OLetter) × [998.0] FULL STOP (ATerm) ÷ [11.0] CJK UNIFIED IDEOGRAPH-5B57 (OLetter) ÷ [0.3] ÷ 0065 × 0074 × 0063 × 002E ÷ 5B83 ÷ # ÷ [0.2] LATIN SMALL LETTER E (Lower) × [998.0] LATIN SMALL LETTER T (Lower) × [998.0] LATIN SMALL LETTER C (Lower) × [998.0] FULL STOP (ATerm) ÷ [11.0] CJK UNIFIED IDEOGRAPH-5B83 (OLetter) ÷ [0.3] ÷ 0065 × 0074 × 0063 × 002E × 3002 ÷ # ÷ [0.2] LATIN SMALL LETTER E (Lower) × [998.0] LATIN SMALL LETTER T (Lower) × [998.0] LATIN SMALL LETTER C (Lower) × [998.0] FULL STOP (ATerm) × [8.1] IDEOGRAPHIC FULL STOP (STerm) ÷ [0.3] ÷ 5B57 × 3002 ÷ 5B83 ÷ # ÷ [0.2] CJK UNIFIED IDEOGRAPH-5B57 (OLetter) × [998.0] IDEOGRAPHIC FULL STOP (STerm) ÷ [11.0] CJK UNIFIED IDEOGRAPH-5B83 (OLetter) ÷ [0.3] ÷ 0021 × 0020 × 0020 ÷ # ÷ [0.2] EXCLAMATION MARK (STerm) × [9.0] SPACE (Sp) × [10.0] SPACE (Sp) ÷ [0.3] ÷ 2060 × 0028 × 2060 × 0022 × 2060 × 0047 × 2060 × 006F × 2060 × 002E × 2060 × 0022 × 2060 × 0029 × 2060 × 0020 × 2060 ÷ 0028 × 2060 × 0048 × 2060 × 0065 × 2060 × 0020 × 2060 × 0064 × 2060 × 0069 × 2060 × 0064 × 2060 × 002E × 2060 × 0029 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] LEFT PARENTHESIS (Close) × [5.0] WORD JOINER (Format_FE) × [998.0] QUOTATION MARK (Close) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN CAPITAL LETTER G (Upper) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER O (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [9.0] QUOTATION MARK (Close) × [5.0] WORD JOINER (Format_FE) × [9.0] RIGHT PARENTHESIS (Close) × [5.0] WORD JOINER (Format_FE) × [9.0] SPACE (Sp) × [5.0] WORD JOINER (Format_FE) ÷ [11.0] LEFT PARENTHESIS (Close) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN CAPITAL LETTER H (Upper) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] SPACE (Sp) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER D (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER I (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER D (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [9.0] RIGHT PARENTHESIS (Close) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 2060 × 0028 × 2060 × 201C × 2060 × 0047 × 2060 × 006F × 2060 × 003F × 2060 × 201D × 2060 × 0029 × 2060 × 0020 × 2060 ÷ 0028 × 2060 × 0048 × 2060 × 0065 × 2060 × 0020 × 2060 × 0064 × 2060 × 0069 × 2060 × 0064 × 2060 × 002E × 2060 × 0029 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] LEFT PARENTHESIS (Close) × [5.0] WORD JOINER (Format_FE) × [998.0] LEFT DOUBLE QUOTATION MARK (Close) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN CAPITAL LETTER G (Upper) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER O (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] QUESTION MARK (STerm) × [5.0] WORD JOINER (Format_FE) × [9.0] RIGHT DOUBLE QUOTATION MARK (Close) × [5.0] WORD JOINER (Format_FE) × [9.0] RIGHT PARENTHESIS (Close) × [5.0] WORD JOINER (Format_FE) × [9.0] SPACE (Sp) × [5.0] WORD JOINER (Format_FE) ÷ [11.0] LEFT PARENTHESIS (Close) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN CAPITAL LETTER H (Upper) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] SPACE (Sp) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER D (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER I (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER D (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [9.0] RIGHT PARENTHESIS (Close) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 2060 × 0055 × 2060 × 002E × 2060 × 0053 × 2060 × 002E × 2060 × 0041 × 2060 × 0300 × 002E × 2060 × 0020 × 2060 × 0069 × 2060 × 0073 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] LATIN CAPITAL LETTER U (Upper) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [7.0] LATIN CAPITAL LETTER S (Upper) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [7.0] LATIN CAPITAL LETTER A (Upper) × [5.0] WORD JOINER (Format_FE) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [8.0] SPACE (Sp) × [5.0] WORD JOINER (Format_FE) × [8.0] LATIN SMALL LETTER I (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER S (Lower) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 2060 × 0055 × 2060 × 002E × 2060 × 0053 × 2060 × 002E × 2060 × 0041 × 2060 × 0300 × 003F × 2060 × 0020 × 2060 ÷ 0048 × 2060 × 0065 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] LATIN CAPITAL LETTER U (Upper) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [7.0] LATIN CAPITAL LETTER S (Upper) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [7.0] LATIN CAPITAL LETTER A (Upper) × [5.0] WORD JOINER (Format_FE) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) × [998.0] QUESTION MARK (STerm) × [5.0] WORD JOINER (Format_FE) × [9.0] SPACE (Sp) × [5.0] WORD JOINER (Format_FE) ÷ [11.0] LATIN CAPITAL LETTER H (Upper) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 2060 × 0055 × 2060 × 002E × 2060 × 0053 × 2060 × 002E × 2060 × 0041 × 2060 × 0300 × 002E × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] LATIN CAPITAL LETTER U (Upper) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [7.0] LATIN CAPITAL LETTER S (Upper) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [7.0] LATIN CAPITAL LETTER A (Upper) × [5.0] WORD JOINER (Format_FE) × [5.0] COMBINING GRAVE ACCENT (Extend_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 2060 × 0033 × 2060 × 002E × 2060 × 0034 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] DIGIT THREE (Numeric) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [6.0] DIGIT FOUR (Numeric) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 2060 × 0063 × 2060 × 002E × 2060 × 0064 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER C (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [8.0] LATIN SMALL LETTER D (Lower) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 2060 × 0043 × 2060 × 002E × 2060 × 0064 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] LATIN CAPITAL LETTER C (Upper) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [8.0] LATIN SMALL LETTER D (Lower) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 2060 × 0063 × 2060 × 002E × 2060 × 0044 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER C (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [7.0] LATIN CAPITAL LETTER D (Upper) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 2060 × 0043 × 2060 × 002E × 2060 × 0044 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] LATIN CAPITAL LETTER C (Upper) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [7.0] LATIN CAPITAL LETTER D (Upper) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 2060 × 0065 × 2060 × 0074 × 2060 × 0063 × 2060 × 002E × 2060 × 0029 × 2060 × 2019 × 2060 × 00A0 × 2060 × 0074 × 2060 × 0068 × 2060 × 0065 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER T (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER C (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [8.0] RIGHT PARENTHESIS (Close) × [5.0] WORD JOINER (Format_FE) × [8.0] RIGHT SINGLE QUOTATION MARK (Close) × [5.0] WORD JOINER (Format_FE) × [8.0] NO-BREAK SPACE (Sp) × [5.0] WORD JOINER (Format_FE) × [8.0] LATIN SMALL LETTER T (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER H (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 2060 × 0065 × 2060 × 0074 × 2060 × 0063 × 2060 × 002E × 2060 × 0029 × 2060 × 2019 × 2060 × 00A0 × 2060 ÷ 0054 × 2060 × 0068 × 2060 × 0065 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER T (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER C (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [9.0] RIGHT PARENTHESIS (Close) × [5.0] WORD JOINER (Format_FE) × [9.0] RIGHT SINGLE QUOTATION MARK (Close) × [5.0] WORD JOINER (Format_FE) × [9.0] NO-BREAK SPACE (Sp) × [5.0] WORD JOINER (Format_FE) ÷ [11.0] LATIN CAPITAL LETTER T (Upper) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER H (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 2060 × 0065 × 2060 × 0074 × 2060 × 0063 × 2060 × 002E × 2060 × 0029 × 2060 × 2019 × 2060 × 00A0 × 2060 × 2018 × 2060 × 0028 × 2060 × 0074 × 2060 × 0068 × 2060 × 0065 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER T (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER C (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [8.0] RIGHT PARENTHESIS (Close) × [5.0] WORD JOINER (Format_FE) × [8.0] RIGHT SINGLE QUOTATION MARK (Close) × [5.0] WORD JOINER (Format_FE) × [8.0] NO-BREAK SPACE (Sp) × [5.0] WORD JOINER (Format_FE) × [8.0] LEFT SINGLE QUOTATION MARK (Close) × [5.0] WORD JOINER (Format_FE) × [998.0] LEFT PARENTHESIS (Close) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER T (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER H (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 2060 × 0065 × 2060 × 0074 × 2060 × 0063 × 2060 × 002E × 2060 × 0029 × 2060 × 2019 × 2060 × 00A0 × 2060 ÷ 2018 × 2060 × 0028 × 2060 × 0054 × 2060 × 0068 × 2060 × 0065 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER T (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER C (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [9.0] RIGHT PARENTHESIS (Close) × [5.0] WORD JOINER (Format_FE) × [9.0] RIGHT SINGLE QUOTATION MARK (Close) × [5.0] WORD JOINER (Format_FE) × [9.0] NO-BREAK SPACE (Sp) × [5.0] WORD JOINER (Format_FE) ÷ [11.0] LEFT SINGLE QUOTATION MARK (Close) × [5.0] WORD JOINER (Format_FE) × [998.0] LEFT PARENTHESIS (Close) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN CAPITAL LETTER T (Upper) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER H (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 2060 × 0065 × 2060 × 0074 × 2060 × 0063 × 2060 × 002E × 2060 × 0029 × 2060 × 2019 × 2060 × 00A0 × 2060 × 0308 × 0074 × 2060 × 0068 × 2060 × 0065 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER T (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER C (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [8.0] RIGHT PARENTHESIS (Close) × [5.0] WORD JOINER (Format_FE) × [8.0] RIGHT SINGLE QUOTATION MARK (Close) × [5.0] WORD JOINER (Format_FE) × [8.0] NO-BREAK SPACE (Sp) × [5.0] WORD JOINER (Format_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [8.0] LATIN SMALL LETTER T (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER H (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 2060 × 0065 × 2060 × 0074 × 2060 × 0063 × 2060 × 002E × 2060 × 0029 × 2060 × 2019 × 2060 × 00A0 × 2060 × 0308 ÷ 0054 × 2060 × 0068 × 2060 × 0065 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER T (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER C (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [9.0] RIGHT PARENTHESIS (Close) × [5.0] WORD JOINER (Format_FE) × [9.0] RIGHT SINGLE QUOTATION MARK (Close) × [5.0] WORD JOINER (Format_FE) × [9.0] NO-BREAK SPACE (Sp) × [5.0] WORD JOINER (Format_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) ÷ [11.0] LATIN CAPITAL LETTER T (Upper) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER H (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 2060 × 0065 × 2060 × 0074 × 2060 × 0063 × 2060 × 002E × 2060 × 0029 × 2060 × 2019 × 2060 × 0308 ÷ 0054 × 2060 × 0068 × 2060 × 0065 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER T (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER C (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [9.0] RIGHT PARENTHESIS (Close) × [5.0] WORD JOINER (Format_FE) × [9.0] RIGHT SINGLE QUOTATION MARK (Close) × [5.0] WORD JOINER (Format_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) ÷ [11.0] LATIN CAPITAL LETTER T (Upper) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER H (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 2060 × 0065 × 2060 × 0074 × 2060 × 0063 × 2060 × 002E × 2060 × 0029 × 2060 × 000A ÷ 2060 × 0308 × 2060 × 0054 × 2060 × 0068 × 2060 × 0065 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER T (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER C (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [9.0] RIGHT PARENTHESIS (Close) × [5.0] WORD JOINER (Format_FE) × [9.0] (LF) ÷ [4.0] WORD JOINER (Format_FE) × [5.0] COMBINING DIAERESIS (Extend_FE) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN CAPITAL LETTER T (Upper) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER H (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 2060 × 0074 × 2060 × 0068 × 2060 × 0065 × 2060 × 0020 × 2060 × 0072 × 2060 × 0065 × 2060 × 0073 × 2060 × 0070 × 2060 × 002E × 2060 × 0020 × 2060 × 006C × 2060 × 0065 × 2060 × 0061 × 2060 × 0064 × 2060 × 0065 × 2060 × 0072 × 2060 × 0073 × 2060 × 0020 × 2060 × 0061 × 2060 × 0072 × 2060 × 0065 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER T (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER H (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] SPACE (Sp) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER R (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER S (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER P (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [8.0] SPACE (Sp) × [5.0] WORD JOINER (Format_FE) × [8.0] LATIN SMALL LETTER L (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER A (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER D (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER R (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER S (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] SPACE (Sp) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER A (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER R (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 2060 × 5B57 × 2060 × 002E × 2060 ÷ 5B57 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] CJK UNIFIED IDEOGRAPH-5B57 (OLetter) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) ÷ [11.0] CJK UNIFIED IDEOGRAPH-5B57 (OLetter) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 2060 × 0065 × 2060 × 0074 × 2060 × 0063 × 2060 × 002E × 2060 ÷ 5B83 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER T (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER C (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) ÷ [11.0] CJK UNIFIED IDEOGRAPH-5B83 (OLetter) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 2060 × 0065 × 2060 × 0074 × 2060 × 0063 × 2060 × 002E × 2060 × 3002 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER E (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER T (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] LATIN SMALL LETTER C (Lower) × [5.0] WORD JOINER (Format_FE) × [998.0] FULL STOP (ATerm) × [5.0] WORD JOINER (Format_FE) × [8.1] IDEOGRAPHIC FULL STOP (STerm) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 2060 × 5B57 × 2060 × 3002 × 2060 ÷ 5B83 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] CJK UNIFIED IDEOGRAPH-5B57 (OLetter) × [5.0] WORD JOINER (Format_FE) × [998.0] IDEOGRAPHIC FULL STOP (STerm) × [5.0] WORD JOINER (Format_FE) ÷ [11.0] CJK UNIFIED IDEOGRAPH-5B83 (OLetter) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 2060 × 0021 × 2060 × 0020 × 2060 × 0020 × 2060 × 2060 ÷ # ÷ [0.2] WORD JOINER (Format_FE) × [998.0] EXCLAMATION MARK (STerm) × [5.0] WORD JOINER (Format_FE) × [9.0] SPACE (Sp) × [5.0] WORD JOINER (Format_FE) × [10.0] SPACE (Sp) × [5.0] WORD JOINER (Format_FE) × [5.0] WORD JOINER (Format_FE) ÷ [0.3] # # Lines: 502 # # EOF bstr-0.2.17/src/unicode/data/WordBreakTest.txt000064400000000000000000011050740072674642500173340ustar 00000000000000# WordBreakTest-12.1.0.txt # Date: 2019-03-10, 10:53:29 GMT # © 2019 Unicode®, Inc. # Unicode and the Unicode Logo are registered trademarks of Unicode, Inc. in the U.S. and other countries. # For terms of use, see http://www.unicode.org/terms_of_use.html # # Unicode Character Database # For documentation, see http://www.unicode.org/reports/tr44/ # # Default Word_Break Test # # Format: # (# )? # contains hex Unicode code points, with # ÷ wherever there is a break opportunity, and # × wherever there is not. # the format can change, but currently it shows: # - the sample character name # - (x) the Word_Break property value for the sample character # - [x] the rule that determines whether there is a break or not, # as listed in the Rules section of WordBreakTest.html # # These samples may be extended or changed in the future. # ÷ 0001 ÷ 0001 ÷ # ÷ [0.2] (Other) ÷ [999.0] (Other) ÷ [0.3] ÷ 0001 × 0308 ÷ 0001 ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] ÷ 0001 ÷ 000D ÷ # ÷ [0.2] (Other) ÷ [3.2] (CR) ÷ [0.3] ÷ 0001 × 0308 ÷ 000D ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] ÷ 0001 ÷ 000A ÷ # ÷ [0.2] (Other) ÷ [3.2] (LF) ÷ [0.3] ÷ 0001 × 0308 ÷ 000A ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] ÷ 0001 ÷ 000B ÷ # ÷ [0.2] (Other) ÷ [3.2] (Newline) ÷ [0.3] ÷ 0001 × 0308 ÷ 000B ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] ÷ 0001 ÷ 3031 ÷ # ÷ [0.2] (Other) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 0001 × 0308 ÷ 3031 ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 0001 ÷ 0041 ÷ # ÷ [0.2] (Other) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 0001 × 0308 ÷ 0041 ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 0001 ÷ 003A ÷ # ÷ [0.2] (Other) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0001 × 0308 ÷ 003A ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0001 ÷ 002C ÷ # ÷ [0.2] (Other) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0001 × 0308 ÷ 002C ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0001 ÷ 002E ÷ # ÷ [0.2] (Other) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 0001 × 0308 ÷ 002E ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 0001 ÷ 0030 ÷ # ÷ [0.2] (Other) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0001 × 0308 ÷ 0030 ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0001 ÷ 005F ÷ # ÷ [0.2] (Other) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 0001 × 0308 ÷ 005F ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 0001 ÷ 1F1E6 ÷ # ÷ [0.2] (Other) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0001 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0001 ÷ 05D0 ÷ # ÷ [0.2] (Other) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 0001 × 0308 ÷ 05D0 ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 0001 ÷ 0022 ÷ # ÷ [0.2] (Other) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 0001 × 0308 ÷ 0022 ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 0001 ÷ 0027 ÷ # ÷ [0.2] (Other) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0001 × 0308 ÷ 0027 ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0001 ÷ 231A ÷ # ÷ [0.2] (Other) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0001 × 0308 ÷ 231A ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0001 ÷ 0020 ÷ # ÷ [0.2] (Other) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 0001 × 0308 ÷ 0020 ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 0001 × 00AD ÷ # ÷ [0.2] (Other) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0001 × 0308 × 00AD ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0001 × 0300 ÷ # ÷ [0.2] (Other) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0001 × 0308 × 0300 ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0001 × 200D ÷ # ÷ [0.2] (Other) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 0001 × 0308 × 200D ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 0001 ÷ 0061 × 2060 ÷ # ÷ [0.2] (Other) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0001 × 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0001 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] (Other) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0001 × 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0001 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] (Other) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0001 × 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0001 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] (Other) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0001 × 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0001 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] (Other) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0001 × 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0001 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] (Other) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0001 × 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0001 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] (Other) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0001 × 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0001 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] (Other) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0001 × 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0001 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] (Other) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0001 × 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] (Other) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 000D ÷ 0001 ÷ # ÷ [0.2] (CR) ÷ [3.1] (Other) ÷ [0.3] ÷ 000D ÷ 0308 ÷ 0001 ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] ÷ 000D ÷ 000D ÷ # ÷ [0.2] (CR) ÷ [3.1] (CR) ÷ [0.3] ÷ 000D ÷ 0308 ÷ 000D ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] ÷ 000D × 000A ÷ # ÷ [0.2] (CR) × [3.0] (LF) ÷ [0.3] ÷ 000D ÷ 0308 ÷ 000A ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] ÷ 000D ÷ 000B ÷ # ÷ [0.2] (CR) ÷ [3.1] (Newline) ÷ [0.3] ÷ 000D ÷ 0308 ÷ 000B ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] ÷ 000D ÷ 3031 ÷ # ÷ [0.2] (CR) ÷ [3.1] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 000D ÷ 0308 ÷ 3031 ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 000D ÷ 0041 ÷ # ÷ [0.2] (CR) ÷ [3.1] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 000D ÷ 0308 ÷ 0041 ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 000D ÷ 003A ÷ # ÷ [0.2] (CR) ÷ [3.1] COLON (MidLetter) ÷ [0.3] ÷ 000D ÷ 0308 ÷ 003A ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 000D ÷ 002C ÷ # ÷ [0.2] (CR) ÷ [3.1] COMMA (MidNum) ÷ [0.3] ÷ 000D ÷ 0308 ÷ 002C ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 000D ÷ 002E ÷ # ÷ [0.2] (CR) ÷ [3.1] FULL STOP (MidNumLet) ÷ [0.3] ÷ 000D ÷ 0308 ÷ 002E ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 000D ÷ 0030 ÷ # ÷ [0.2] (CR) ÷ [3.1] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 000D ÷ 0308 ÷ 0030 ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 000D ÷ 005F ÷ # ÷ [0.2] (CR) ÷ [3.1] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 000D ÷ 0308 ÷ 005F ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 000D ÷ 1F1E6 ÷ # ÷ [0.2] (CR) ÷ [3.1] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 000D ÷ 0308 ÷ 1F1E6 ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 000D ÷ 05D0 ÷ # ÷ [0.2] (CR) ÷ [3.1] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 000D ÷ 0308 ÷ 05D0 ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 000D ÷ 0022 ÷ # ÷ [0.2] (CR) ÷ [3.1] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 000D ÷ 0308 ÷ 0022 ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 000D ÷ 0027 ÷ # ÷ [0.2] (CR) ÷ [3.1] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 000D ÷ 0308 ÷ 0027 ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 000D ÷ 231A ÷ # ÷ [0.2] (CR) ÷ [3.1] WATCH (ExtPict) ÷ [0.3] ÷ 000D ÷ 0308 ÷ 231A ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 000D ÷ 0020 ÷ # ÷ [0.2] (CR) ÷ [3.1] SPACE (WSegSpace) ÷ [0.3] ÷ 000D ÷ 0308 ÷ 0020 ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 000D ÷ 00AD ÷ # ÷ [0.2] (CR) ÷ [3.1] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 000D ÷ 0308 × 00AD ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 000D ÷ 0300 ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 000D ÷ 0308 × 0300 ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 000D ÷ 200D ÷ # ÷ [0.2] (CR) ÷ [3.1] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 000D ÷ 0308 × 200D ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 000D ÷ 0061 × 2060 ÷ # ÷ [0.2] (CR) ÷ [3.1] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 000D ÷ 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 000D ÷ 0061 ÷ 003A ÷ # ÷ [0.2] (CR) ÷ [3.1] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 000D ÷ 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 000D ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] (CR) ÷ [3.1] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 000D ÷ 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 000D ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] (CR) ÷ [3.1] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 000D ÷ 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 000D ÷ 0061 ÷ 002C ÷ # ÷ [0.2] (CR) ÷ [3.1] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 000D ÷ 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 000D ÷ 0031 ÷ 003A ÷ # ÷ [0.2] (CR) ÷ [3.1] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 000D ÷ 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 000D ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] (CR) ÷ [3.1] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 000D ÷ 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 000D ÷ 0031 ÷ 002C ÷ # ÷ [0.2] (CR) ÷ [3.1] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 000D ÷ 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 000D ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] (CR) ÷ [3.1] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 000D ÷ 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] (CR) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 000A ÷ 0001 ÷ # ÷ [0.2] (LF) ÷ [3.1] (Other) ÷ [0.3] ÷ 000A ÷ 0308 ÷ 0001 ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] ÷ 000A ÷ 000D ÷ # ÷ [0.2] (LF) ÷ [3.1] (CR) ÷ [0.3] ÷ 000A ÷ 0308 ÷ 000D ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] ÷ 000A ÷ 000A ÷ # ÷ [0.2] (LF) ÷ [3.1] (LF) ÷ [0.3] ÷ 000A ÷ 0308 ÷ 000A ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] ÷ 000A ÷ 000B ÷ # ÷ [0.2] (LF) ÷ [3.1] (Newline) ÷ [0.3] ÷ 000A ÷ 0308 ÷ 000B ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] ÷ 000A ÷ 3031 ÷ # ÷ [0.2] (LF) ÷ [3.1] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 000A ÷ 0308 ÷ 3031 ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 000A ÷ 0041 ÷ # ÷ [0.2] (LF) ÷ [3.1] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 000A ÷ 0308 ÷ 0041 ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 000A ÷ 003A ÷ # ÷ [0.2] (LF) ÷ [3.1] COLON (MidLetter) ÷ [0.3] ÷ 000A ÷ 0308 ÷ 003A ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 000A ÷ 002C ÷ # ÷ [0.2] (LF) ÷ [3.1] COMMA (MidNum) ÷ [0.3] ÷ 000A ÷ 0308 ÷ 002C ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 000A ÷ 002E ÷ # ÷ [0.2] (LF) ÷ [3.1] FULL STOP (MidNumLet) ÷ [0.3] ÷ 000A ÷ 0308 ÷ 002E ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 000A ÷ 0030 ÷ # ÷ [0.2] (LF) ÷ [3.1] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 000A ÷ 0308 ÷ 0030 ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 000A ÷ 005F ÷ # ÷ [0.2] (LF) ÷ [3.1] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 000A ÷ 0308 ÷ 005F ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 000A ÷ 1F1E6 ÷ # ÷ [0.2] (LF) ÷ [3.1] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 000A ÷ 0308 ÷ 1F1E6 ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 000A ÷ 05D0 ÷ # ÷ [0.2] (LF) ÷ [3.1] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 000A ÷ 0308 ÷ 05D0 ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 000A ÷ 0022 ÷ # ÷ [0.2] (LF) ÷ [3.1] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 000A ÷ 0308 ÷ 0022 ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 000A ÷ 0027 ÷ # ÷ [0.2] (LF) ÷ [3.1] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 000A ÷ 0308 ÷ 0027 ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 000A ÷ 231A ÷ # ÷ [0.2] (LF) ÷ [3.1] WATCH (ExtPict) ÷ [0.3] ÷ 000A ÷ 0308 ÷ 231A ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 000A ÷ 0020 ÷ # ÷ [0.2] (LF) ÷ [3.1] SPACE (WSegSpace) ÷ [0.3] ÷ 000A ÷ 0308 ÷ 0020 ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 000A ÷ 00AD ÷ # ÷ [0.2] (LF) ÷ [3.1] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 000A ÷ 0308 × 00AD ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 000A ÷ 0300 ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 000A ÷ 0308 × 0300 ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 000A ÷ 200D ÷ # ÷ [0.2] (LF) ÷ [3.1] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 000A ÷ 0308 × 200D ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 000A ÷ 0061 × 2060 ÷ # ÷ [0.2] (LF) ÷ [3.1] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 000A ÷ 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 000A ÷ 0061 ÷ 003A ÷ # ÷ [0.2] (LF) ÷ [3.1] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 000A ÷ 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 000A ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] (LF) ÷ [3.1] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 000A ÷ 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 000A ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] (LF) ÷ [3.1] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 000A ÷ 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 000A ÷ 0061 ÷ 002C ÷ # ÷ [0.2] (LF) ÷ [3.1] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 000A ÷ 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 000A ÷ 0031 ÷ 003A ÷ # ÷ [0.2] (LF) ÷ [3.1] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 000A ÷ 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 000A ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] (LF) ÷ [3.1] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 000A ÷ 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 000A ÷ 0031 ÷ 002C ÷ # ÷ [0.2] (LF) ÷ [3.1] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 000A ÷ 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 000A ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] (LF) ÷ [3.1] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 000A ÷ 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 000B ÷ 0001 ÷ # ÷ [0.2] (Newline) ÷ [3.1] (Other) ÷ [0.3] ÷ 000B ÷ 0308 ÷ 0001 ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] ÷ 000B ÷ 000D ÷ # ÷ [0.2] (Newline) ÷ [3.1] (CR) ÷ [0.3] ÷ 000B ÷ 0308 ÷ 000D ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] ÷ 000B ÷ 000A ÷ # ÷ [0.2] (Newline) ÷ [3.1] (LF) ÷ [0.3] ÷ 000B ÷ 0308 ÷ 000A ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] ÷ 000B ÷ 000B ÷ # ÷ [0.2] (Newline) ÷ [3.1] (Newline) ÷ [0.3] ÷ 000B ÷ 0308 ÷ 000B ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] ÷ 000B ÷ 3031 ÷ # ÷ [0.2] (Newline) ÷ [3.1] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 000B ÷ 0308 ÷ 3031 ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 000B ÷ 0041 ÷ # ÷ [0.2] (Newline) ÷ [3.1] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 000B ÷ 0308 ÷ 0041 ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 000B ÷ 003A ÷ # ÷ [0.2] (Newline) ÷ [3.1] COLON (MidLetter) ÷ [0.3] ÷ 000B ÷ 0308 ÷ 003A ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 000B ÷ 002C ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMMA (MidNum) ÷ [0.3] ÷ 000B ÷ 0308 ÷ 002C ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 000B ÷ 002E ÷ # ÷ [0.2] (Newline) ÷ [3.1] FULL STOP (MidNumLet) ÷ [0.3] ÷ 000B ÷ 0308 ÷ 002E ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 000B ÷ 0030 ÷ # ÷ [0.2] (Newline) ÷ [3.1] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 000B ÷ 0308 ÷ 0030 ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 000B ÷ 005F ÷ # ÷ [0.2] (Newline) ÷ [3.1] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 000B ÷ 0308 ÷ 005F ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 000B ÷ 1F1E6 ÷ # ÷ [0.2] (Newline) ÷ [3.1] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 000B ÷ 0308 ÷ 1F1E6 ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 000B ÷ 05D0 ÷ # ÷ [0.2] (Newline) ÷ [3.1] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 000B ÷ 0308 ÷ 05D0 ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 000B ÷ 0022 ÷ # ÷ [0.2] (Newline) ÷ [3.1] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 000B ÷ 0308 ÷ 0022 ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 000B ÷ 0027 ÷ # ÷ [0.2] (Newline) ÷ [3.1] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 000B ÷ 0308 ÷ 0027 ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 000B ÷ 231A ÷ # ÷ [0.2] (Newline) ÷ [3.1] WATCH (ExtPict) ÷ [0.3] ÷ 000B ÷ 0308 ÷ 231A ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 000B ÷ 0020 ÷ # ÷ [0.2] (Newline) ÷ [3.1] SPACE (WSegSpace) ÷ [0.3] ÷ 000B ÷ 0308 ÷ 0020 ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 000B ÷ 00AD ÷ # ÷ [0.2] (Newline) ÷ [3.1] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 000B ÷ 0308 × 00AD ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 000B ÷ 0300 ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 000B ÷ 0308 × 0300 ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 000B ÷ 200D ÷ # ÷ [0.2] (Newline) ÷ [3.1] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 000B ÷ 0308 × 200D ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 000B ÷ 0061 × 2060 ÷ # ÷ [0.2] (Newline) ÷ [3.1] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 000B ÷ 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 000B ÷ 0061 ÷ 003A ÷ # ÷ [0.2] (Newline) ÷ [3.1] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 000B ÷ 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 000B ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] (Newline) ÷ [3.1] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 000B ÷ 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 000B ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] (Newline) ÷ [3.1] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 000B ÷ 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 000B ÷ 0061 ÷ 002C ÷ # ÷ [0.2] (Newline) ÷ [3.1] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 000B ÷ 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 000B ÷ 0031 ÷ 003A ÷ # ÷ [0.2] (Newline) ÷ [3.1] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 000B ÷ 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 000B ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] (Newline) ÷ [3.1] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 000B ÷ 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 000B ÷ 0031 ÷ 002C ÷ # ÷ [0.2] (Newline) ÷ [3.1] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 000B ÷ 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 000B ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] (Newline) ÷ [3.1] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 000B ÷ 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] (Newline) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 3031 ÷ 0001 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] (Other) ÷ [0.3] ÷ 3031 × 0308 ÷ 0001 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] ÷ 3031 ÷ 000D ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [3.2] (CR) ÷ [0.3] ÷ 3031 × 0308 ÷ 000D ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] ÷ 3031 ÷ 000A ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [3.2] (LF) ÷ [0.3] ÷ 3031 × 0308 ÷ 000A ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] ÷ 3031 ÷ 000B ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [3.2] (Newline) ÷ [0.3] ÷ 3031 × 0308 ÷ 000B ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] ÷ 3031 × 3031 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [13.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 3031 × 0308 × 3031 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) × [13.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 3031 ÷ 0041 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 3031 × 0308 ÷ 0041 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 3031 ÷ 003A ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 3031 × 0308 ÷ 003A ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 3031 ÷ 002C ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 3031 × 0308 ÷ 002C ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 3031 ÷ 002E ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 3031 × 0308 ÷ 002E ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 3031 ÷ 0030 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 3031 × 0308 ÷ 0030 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 3031 × 005F ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [13.1] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 3031 × 0308 × 005F ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) × [13.1] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 3031 ÷ 1F1E6 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 3031 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 3031 ÷ 05D0 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 3031 × 0308 ÷ 05D0 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 3031 ÷ 0022 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 3031 × 0308 ÷ 0022 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 3031 ÷ 0027 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 3031 × 0308 ÷ 0027 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 3031 ÷ 231A ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 3031 × 0308 ÷ 231A ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 3031 ÷ 0020 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 3031 × 0308 ÷ 0020 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 3031 × 00AD ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 3031 × 0308 × 00AD ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 3031 × 0300 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 3031 × 0308 × 0300 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 3031 × 200D ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 3031 × 0308 × 200D ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 3031 ÷ 0061 × 2060 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 3031 × 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 3031 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 3031 × 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 3031 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 3031 × 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 3031 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 3031 × 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 3031 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 3031 × 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 3031 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 3031 × 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 3031 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 3031 × 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 3031 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 3031 × 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 3031 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 3031 × 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0041 ÷ 0001 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) ÷ [999.0] (Other) ÷ [0.3] ÷ 0041 × 0308 ÷ 0001 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] ÷ 0041 ÷ 000D ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) ÷ [3.2] (CR) ÷ [0.3] ÷ 0041 × 0308 ÷ 000D ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] ÷ 0041 ÷ 000A ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) ÷ [3.2] (LF) ÷ [0.3] ÷ 0041 × 0308 ÷ 000A ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] ÷ 0041 ÷ 000B ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) ÷ [3.2] (Newline) ÷ [0.3] ÷ 0041 × 0308 ÷ 000B ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] ÷ 0041 ÷ 3031 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 0041 × 0308 ÷ 3031 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 0041 × 0041 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [5.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 0041 × 0308 × 0041 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 0041 ÷ 003A ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0041 × 0308 ÷ 003A ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0041 ÷ 002C ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0041 × 0308 ÷ 002C ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0041 ÷ 002E ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 0041 × 0308 ÷ 002E ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 0041 × 0030 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [9.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0041 × 0308 × 0030 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [9.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0041 × 005F ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 0041 × 0308 × 005F ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [13.1] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 0041 ÷ 1F1E6 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0041 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0041 × 05D0 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [5.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 0041 × 0308 × 05D0 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 0041 ÷ 0022 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 0041 × 0308 ÷ 0022 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 0041 ÷ 0027 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0041 × 0308 ÷ 0027 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0041 ÷ 231A ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0041 × 0308 ÷ 231A ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0041 ÷ 0020 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 0041 × 0308 ÷ 0020 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 0041 × 00AD ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0041 × 0308 × 00AD ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0041 × 0300 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0041 × 0308 × 0300 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0041 × 200D ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 0041 × 0308 × 200D ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 0041 × 0061 × 2060 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [5.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0041 × 0308 × 0061 × 2060 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0041 × 0061 ÷ 003A ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0041 × 0308 × 0061 ÷ 003A ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0041 × 0061 ÷ 0027 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0041 × 0308 × 0061 ÷ 0027 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0041 × 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0041 × 0308 × 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0041 × 0061 ÷ 002C ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0041 × 0308 × 0061 ÷ 002C ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0041 × 0031 ÷ 003A ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0041 × 0308 × 0031 ÷ 003A ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0041 × 0031 ÷ 0027 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0041 × 0308 × 0031 ÷ 0027 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0041 × 0031 ÷ 002C ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0041 × 0308 × 0031 ÷ 002C ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0041 × 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0041 × 0308 × 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 003A ÷ 0001 ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] (Other) ÷ [0.3] ÷ 003A × 0308 ÷ 0001 ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] ÷ 003A ÷ 000D ÷ # ÷ [0.2] COLON (MidLetter) ÷ [3.2] (CR) ÷ [0.3] ÷ 003A × 0308 ÷ 000D ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] ÷ 003A ÷ 000A ÷ # ÷ [0.2] COLON (MidLetter) ÷ [3.2] (LF) ÷ [0.3] ÷ 003A × 0308 ÷ 000A ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] ÷ 003A ÷ 000B ÷ # ÷ [0.2] COLON (MidLetter) ÷ [3.2] (Newline) ÷ [0.3] ÷ 003A × 0308 ÷ 000B ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] ÷ 003A ÷ 3031 ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 003A × 0308 ÷ 3031 ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 003A ÷ 0041 ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 003A × 0308 ÷ 0041 ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 003A ÷ 003A ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 003A × 0308 ÷ 003A ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 003A ÷ 002C ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 003A × 0308 ÷ 002C ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 003A ÷ 002E ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 003A × 0308 ÷ 002E ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 003A ÷ 0030 ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 003A × 0308 ÷ 0030 ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 003A ÷ 005F ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 003A × 0308 ÷ 005F ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 003A ÷ 1F1E6 ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 003A × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 003A ÷ 05D0 ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 003A × 0308 ÷ 05D0 ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 003A ÷ 0022 ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 003A × 0308 ÷ 0022 ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 003A ÷ 0027 ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 003A × 0308 ÷ 0027 ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 003A ÷ 231A ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 003A × 0308 ÷ 231A ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 003A ÷ 0020 ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 003A × 0308 ÷ 0020 ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 003A × 00AD ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 003A × 0308 × 00AD ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 003A × 0300 ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 003A × 0308 × 0300 ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 003A × 200D ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 003A × 0308 × 200D ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 003A ÷ 0061 × 2060 ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 003A × 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 003A ÷ 0061 ÷ 003A ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 003A × 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 003A ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 003A × 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 003A ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 003A × 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 003A ÷ 0061 ÷ 002C ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 003A × 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 003A ÷ 0031 ÷ 003A ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 003A × 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 003A ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 003A × 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 003A ÷ 0031 ÷ 002C ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 003A × 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 003A ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 003A × 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 002C ÷ 0001 ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] (Other) ÷ [0.3] ÷ 002C × 0308 ÷ 0001 ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] ÷ 002C ÷ 000D ÷ # ÷ [0.2] COMMA (MidNum) ÷ [3.2] (CR) ÷ [0.3] ÷ 002C × 0308 ÷ 000D ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] ÷ 002C ÷ 000A ÷ # ÷ [0.2] COMMA (MidNum) ÷ [3.2] (LF) ÷ [0.3] ÷ 002C × 0308 ÷ 000A ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] ÷ 002C ÷ 000B ÷ # ÷ [0.2] COMMA (MidNum) ÷ [3.2] (Newline) ÷ [0.3] ÷ 002C × 0308 ÷ 000B ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] ÷ 002C ÷ 3031 ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 002C × 0308 ÷ 3031 ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 002C ÷ 0041 ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 002C × 0308 ÷ 0041 ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 002C ÷ 003A ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 002C × 0308 ÷ 003A ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 002C ÷ 002C ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 002C × 0308 ÷ 002C ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 002C ÷ 002E ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 002C × 0308 ÷ 002E ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 002C ÷ 0030 ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 002C × 0308 ÷ 0030 ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 002C ÷ 005F ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 002C × 0308 ÷ 005F ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 002C ÷ 1F1E6 ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 002C × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 002C ÷ 05D0 ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 002C × 0308 ÷ 05D0 ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 002C ÷ 0022 ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 002C × 0308 ÷ 0022 ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 002C ÷ 0027 ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 002C × 0308 ÷ 0027 ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 002C ÷ 231A ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 002C × 0308 ÷ 231A ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 002C ÷ 0020 ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 002C × 0308 ÷ 0020 ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 002C × 00AD ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 002C × 0308 × 00AD ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 002C × 0300 ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 002C × 0308 × 0300 ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 002C × 200D ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 002C × 0308 × 200D ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 002C ÷ 0061 × 2060 ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 002C × 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 002C ÷ 0061 ÷ 003A ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 002C × 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 002C ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 002C × 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 002C ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 002C × 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 002C ÷ 0061 ÷ 002C ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 002C × 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 002C ÷ 0031 ÷ 003A ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 002C × 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 002C ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 002C × 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 002C ÷ 0031 ÷ 002C ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 002C × 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 002C ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 002C × 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 002E ÷ 0001 ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] (Other) ÷ [0.3] ÷ 002E × 0308 ÷ 0001 ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] ÷ 002E ÷ 000D ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [3.2] (CR) ÷ [0.3] ÷ 002E × 0308 ÷ 000D ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] ÷ 002E ÷ 000A ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [3.2] (LF) ÷ [0.3] ÷ 002E × 0308 ÷ 000A ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] ÷ 002E ÷ 000B ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [3.2] (Newline) ÷ [0.3] ÷ 002E × 0308 ÷ 000B ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] ÷ 002E ÷ 3031 ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 002E × 0308 ÷ 3031 ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 002E ÷ 0041 ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 002E × 0308 ÷ 0041 ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 002E ÷ 003A ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 002E × 0308 ÷ 003A ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 002E ÷ 002C ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 002E × 0308 ÷ 002C ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 002E ÷ 002E ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 002E × 0308 ÷ 002E ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 002E ÷ 0030 ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 002E × 0308 ÷ 0030 ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 002E ÷ 005F ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 002E × 0308 ÷ 005F ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 002E ÷ 1F1E6 ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 002E × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 002E ÷ 05D0 ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 002E × 0308 ÷ 05D0 ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 002E ÷ 0022 ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 002E × 0308 ÷ 0022 ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 002E ÷ 0027 ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 002E × 0308 ÷ 0027 ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 002E ÷ 231A ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 002E × 0308 ÷ 231A ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 002E ÷ 0020 ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 002E × 0308 ÷ 0020 ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 002E × 00AD ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 002E × 0308 × 00AD ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 002E × 0300 ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 002E × 0308 × 0300 ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 002E × 200D ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 002E × 0308 × 200D ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 002E ÷ 0061 × 2060 ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 002E × 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 002E ÷ 0061 ÷ 003A ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 002E × 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 002E ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 002E × 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 002E ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 002E × 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 002E ÷ 0061 ÷ 002C ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 002E × 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 002E ÷ 0031 ÷ 003A ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 002E × 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 002E ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 002E × 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 002E ÷ 0031 ÷ 002C ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 002E × 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 002E ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 002E × 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] FULL STOP (MidNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0030 ÷ 0001 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) ÷ [999.0] (Other) ÷ [0.3] ÷ 0030 × 0308 ÷ 0001 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] ÷ 0030 ÷ 000D ÷ # ÷ [0.2] DIGIT ZERO (Numeric) ÷ [3.2] (CR) ÷ [0.3] ÷ 0030 × 0308 ÷ 000D ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] ÷ 0030 ÷ 000A ÷ # ÷ [0.2] DIGIT ZERO (Numeric) ÷ [3.2] (LF) ÷ [0.3] ÷ 0030 × 0308 ÷ 000A ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] ÷ 0030 ÷ 000B ÷ # ÷ [0.2] DIGIT ZERO (Numeric) ÷ [3.2] (Newline) ÷ [0.3] ÷ 0030 × 0308 ÷ 000B ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] ÷ 0030 ÷ 3031 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 0030 × 0308 ÷ 3031 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 0030 × 0041 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [10.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 0030 × 0308 × 0041 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) × [10.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 0030 ÷ 003A ÷ # ÷ [0.2] DIGIT ZERO (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0030 × 0308 ÷ 003A ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0030 ÷ 002C ÷ # ÷ [0.2] DIGIT ZERO (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0030 × 0308 ÷ 002C ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0030 ÷ 002E ÷ # ÷ [0.2] DIGIT ZERO (Numeric) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 0030 × 0308 ÷ 002E ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 0030 × 0030 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [8.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0030 × 0308 × 0030 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) × [8.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0030 × 005F ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [13.1] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 0030 × 0308 × 005F ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) × [13.1] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 0030 ÷ 1F1E6 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0030 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0030 × 05D0 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [10.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 0030 × 0308 × 05D0 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) × [10.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 0030 ÷ 0022 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 0030 × 0308 ÷ 0022 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 0030 ÷ 0027 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0030 × 0308 ÷ 0027 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0030 ÷ 231A ÷ # ÷ [0.2] DIGIT ZERO (Numeric) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0030 × 0308 ÷ 231A ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0030 ÷ 0020 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 0030 × 0308 ÷ 0020 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 0030 × 00AD ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0030 × 0308 × 00AD ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0030 × 0300 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0030 × 0308 × 0300 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0030 × 200D ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 0030 × 0308 × 200D ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 0030 × 0061 × 2060 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [10.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0030 × 0308 × 0061 × 2060 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) × [10.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0030 × 0061 ÷ 003A ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [10.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0030 × 0308 × 0061 ÷ 003A ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) × [10.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0030 × 0061 ÷ 0027 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [10.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0030 × 0308 × 0061 ÷ 0027 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) × [10.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0030 × 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [10.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0030 × 0308 × 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) × [10.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0030 × 0061 ÷ 002C ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [10.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0030 × 0308 × 0061 ÷ 002C ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) × [10.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0030 × 0031 ÷ 003A ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [8.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0030 × 0308 × 0031 ÷ 003A ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) × [8.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0030 × 0031 ÷ 0027 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [8.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0030 × 0308 × 0031 ÷ 0027 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) × [8.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0030 × 0031 ÷ 002C ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [8.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0030 × 0308 × 0031 ÷ 002C ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) × [8.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0030 × 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [8.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0030 × 0308 × 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [4.0] COMBINING DIAERESIS (Extend_FE) × [8.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 005F ÷ 0001 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) ÷ [999.0] (Other) ÷ [0.3] ÷ 005F × 0308 ÷ 0001 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] ÷ 005F ÷ 000D ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) ÷ [3.2] (CR) ÷ [0.3] ÷ 005F × 0308 ÷ 000D ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] ÷ 005F ÷ 000A ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) ÷ [3.2] (LF) ÷ [0.3] ÷ 005F × 0308 ÷ 000A ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] ÷ 005F ÷ 000B ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) ÷ [3.2] (Newline) ÷ [0.3] ÷ 005F × 0308 ÷ 000B ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] ÷ 005F × 3031 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [13.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 005F × 0308 × 3031 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) × [13.2] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 005F × 0041 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [13.2] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 005F × 0308 × 0041 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) × [13.2] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 005F ÷ 003A ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 005F × 0308 ÷ 003A ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 005F ÷ 002C ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 005F × 0308 ÷ 002C ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 005F ÷ 002E ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 005F × 0308 ÷ 002E ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 005F × 0030 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [13.2] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 005F × 0308 × 0030 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) × [13.2] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 005F × 005F ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [13.1] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 005F × 0308 × 005F ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) × [13.1] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 005F ÷ 1F1E6 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 005F × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 005F × 05D0 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [13.2] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 005F × 0308 × 05D0 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) × [13.2] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 005F ÷ 0022 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 005F × 0308 ÷ 0022 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 005F ÷ 0027 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 005F × 0308 ÷ 0027 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 005F ÷ 231A ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 005F × 0308 ÷ 231A ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 005F ÷ 0020 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 005F × 0308 ÷ 0020 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 005F × 00AD ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 005F × 0308 × 00AD ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 005F × 0300 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 005F × 0308 × 0300 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 005F × 200D ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 005F × 0308 × 200D ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 005F × 0061 × 2060 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 005F × 0308 × 0061 × 2060 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) × [13.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 005F × 0061 ÷ 003A ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 005F × 0308 × 0061 ÷ 003A ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 005F × 0061 ÷ 0027 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 005F × 0308 × 0061 ÷ 0027 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 005F × 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 005F × 0308 × 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 005F × 0061 ÷ 002C ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 005F × 0308 × 0061 ÷ 002C ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 005F × 0031 ÷ 003A ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 005F × 0308 × 0031 ÷ 003A ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 005F × 0031 ÷ 0027 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 005F × 0308 × 0031 ÷ 0027 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 005F × 0031 ÷ 002C ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 005F × 0308 × 0031 ÷ 002C ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 005F × 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 005F × 0308 × 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] LOW LINE (ExtendNumLet) × [4.0] COMBINING DIAERESIS (Extend_FE) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 1F1E6 ÷ 0001 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] (Other) ÷ [0.3] ÷ 1F1E6 × 0308 ÷ 0001 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] ÷ 1F1E6 ÷ 000D ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [3.2] (CR) ÷ [0.3] ÷ 1F1E6 × 0308 ÷ 000D ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] ÷ 1F1E6 ÷ 000A ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [3.2] (LF) ÷ [0.3] ÷ 1F1E6 × 0308 ÷ 000A ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] ÷ 1F1E6 ÷ 000B ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [3.2] (Newline) ÷ [0.3] ÷ 1F1E6 × 0308 ÷ 000B ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] ÷ 1F1E6 ÷ 3031 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 1F1E6 × 0308 ÷ 3031 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 1F1E6 ÷ 0041 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 1F1E6 × 0308 ÷ 0041 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 1F1E6 ÷ 003A ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 1F1E6 × 0308 ÷ 003A ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 1F1E6 ÷ 002C ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 1F1E6 × 0308 ÷ 002C ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 1F1E6 ÷ 002E ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 1F1E6 × 0308 ÷ 002E ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 1F1E6 ÷ 0030 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 1F1E6 × 0308 ÷ 0030 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 1F1E6 ÷ 005F ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 1F1E6 × 0308 ÷ 005F ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 1F1E6 × 1F1E6 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [15.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 1F1E6 × 0308 × 1F1E6 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) × [15.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 1F1E6 ÷ 05D0 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 1F1E6 × 0308 ÷ 05D0 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 1F1E6 ÷ 0022 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 1F1E6 × 0308 ÷ 0022 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 1F1E6 ÷ 0027 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 1F1E6 × 0308 ÷ 0027 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 1F1E6 ÷ 231A ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 1F1E6 × 0308 ÷ 231A ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 1F1E6 ÷ 0020 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 1F1E6 × 0308 ÷ 0020 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 1F1E6 × 00AD ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 1F1E6 × 0308 × 00AD ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 1F1E6 × 0300 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 1F1E6 × 0308 × 0300 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 1F1E6 × 200D ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 1F1E6 × 0308 × 200D ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 1F1E6 ÷ 0061 × 2060 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 1F1E6 × 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 1F1E6 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 1F1E6 × 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 1F1E6 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 1F1E6 × 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 1F1E6 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 1F1E6 × 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 1F1E6 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 1F1E6 × 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 1F1E6 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 1F1E6 × 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 1F1E6 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 1F1E6 × 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 1F1E6 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 1F1E6 × 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 1F1E6 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 1F1E6 × 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 05D0 ÷ 0001 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [999.0] (Other) ÷ [0.3] ÷ 05D0 × 0308 ÷ 0001 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] ÷ 05D0 ÷ 000D ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [3.2] (CR) ÷ [0.3] ÷ 05D0 × 0308 ÷ 000D ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] ÷ 05D0 ÷ 000A ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [3.2] (LF) ÷ [0.3] ÷ 05D0 × 0308 ÷ 000A ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] ÷ 05D0 ÷ 000B ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [3.2] (Newline) ÷ [0.3] ÷ 05D0 × 0308 ÷ 000B ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] ÷ 05D0 ÷ 3031 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 05D0 × 0308 ÷ 3031 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 05D0 × 0041 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [5.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 05D0 × 0308 × 0041 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 05D0 ÷ 003A ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 05D0 × 0308 ÷ 003A ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 05D0 ÷ 002C ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 05D0 × 0308 ÷ 002C ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 05D0 ÷ 002E ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 05D0 × 0308 ÷ 002E ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 05D0 × 0030 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [9.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 05D0 × 0308 × 0030 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [9.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 05D0 × 005F ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [13.1] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 05D0 × 0308 × 005F ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [13.1] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 05D0 ÷ 1F1E6 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 05D0 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 05D0 × 05D0 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [5.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 05D0 × 0308 × 05D0 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 05D0 ÷ 0022 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 05D0 × 0308 ÷ 0022 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 05D0 × 0027 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [7.1] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 05D0 × 0308 × 0027 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.1] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 05D0 ÷ 231A ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 05D0 × 0308 ÷ 231A ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 05D0 ÷ 0020 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 05D0 × 0308 ÷ 0020 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 05D0 × 00AD ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 05D0 × 0308 × 00AD ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 05D0 × 0300 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 05D0 × 0308 × 0300 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 05D0 × 200D ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 05D0 × 0308 × 200D ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 05D0 × 0061 × 2060 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [5.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 05D0 × 0308 × 0061 × 2060 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 05D0 × 0061 ÷ 003A ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 05D0 × 0308 × 0061 ÷ 003A ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 05D0 × 0061 ÷ 0027 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 05D0 × 0308 × 0061 ÷ 0027 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 05D0 × 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 05D0 × 0308 × 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 05D0 × 0061 ÷ 002C ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 05D0 × 0308 × 0061 ÷ 002C ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 05D0 × 0031 ÷ 003A ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 05D0 × 0308 × 0031 ÷ 003A ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 05D0 × 0031 ÷ 0027 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 05D0 × 0308 × 0031 ÷ 0027 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 05D0 × 0031 ÷ 002C ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 05D0 × 0308 × 0031 ÷ 002C ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 05D0 × 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 05D0 × 0308 × 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0022 ÷ 0001 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] (Other) ÷ [0.3] ÷ 0022 × 0308 ÷ 0001 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] ÷ 0022 ÷ 000D ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [3.2] (CR) ÷ [0.3] ÷ 0022 × 0308 ÷ 000D ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] ÷ 0022 ÷ 000A ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [3.2] (LF) ÷ [0.3] ÷ 0022 × 0308 ÷ 000A ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] ÷ 0022 ÷ 000B ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [3.2] (Newline) ÷ [0.3] ÷ 0022 × 0308 ÷ 000B ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] ÷ 0022 ÷ 3031 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 0022 × 0308 ÷ 3031 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 0022 ÷ 0041 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 0022 × 0308 ÷ 0041 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 0022 ÷ 003A ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0022 × 0308 ÷ 003A ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0022 ÷ 002C ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0022 × 0308 ÷ 002C ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0022 ÷ 002E ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 0022 × 0308 ÷ 002E ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 0022 ÷ 0030 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0022 × 0308 ÷ 0030 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0022 ÷ 005F ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 0022 × 0308 ÷ 005F ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 0022 ÷ 1F1E6 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0022 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0022 ÷ 05D0 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 0022 × 0308 ÷ 05D0 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 0022 ÷ 0022 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 0022 × 0308 ÷ 0022 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 0022 ÷ 0027 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0022 × 0308 ÷ 0027 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0022 ÷ 231A ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0022 × 0308 ÷ 231A ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0022 ÷ 0020 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 0022 × 0308 ÷ 0020 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 0022 × 00AD ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0022 × 0308 × 00AD ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0022 × 0300 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0022 × 0308 × 0300 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0022 × 200D ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 0022 × 0308 × 200D ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 0022 ÷ 0061 × 2060 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0022 × 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0022 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0022 × 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0022 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0022 × 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0022 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0022 × 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0022 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0022 × 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0022 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0022 × 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0022 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0022 × 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0022 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0022 × 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0022 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0022 × 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] QUOTATION MARK (Double_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0027 ÷ 0001 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] (Other) ÷ [0.3] ÷ 0027 × 0308 ÷ 0001 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] ÷ 0027 ÷ 000D ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [3.2] (CR) ÷ [0.3] ÷ 0027 × 0308 ÷ 000D ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] ÷ 0027 ÷ 000A ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [3.2] (LF) ÷ [0.3] ÷ 0027 × 0308 ÷ 000A ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] ÷ 0027 ÷ 000B ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [3.2] (Newline) ÷ [0.3] ÷ 0027 × 0308 ÷ 000B ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] ÷ 0027 ÷ 3031 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 0027 × 0308 ÷ 3031 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 0027 ÷ 0041 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 0027 × 0308 ÷ 0041 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 0027 ÷ 003A ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0027 × 0308 ÷ 003A ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0027 ÷ 002C ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0027 × 0308 ÷ 002C ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0027 ÷ 002E ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 0027 × 0308 ÷ 002E ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 0027 ÷ 0030 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0027 × 0308 ÷ 0030 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0027 ÷ 005F ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 0027 × 0308 ÷ 005F ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 0027 ÷ 1F1E6 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0027 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0027 ÷ 05D0 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 0027 × 0308 ÷ 05D0 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 0027 ÷ 0022 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 0027 × 0308 ÷ 0022 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 0027 ÷ 0027 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0027 × 0308 ÷ 0027 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0027 ÷ 231A ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0027 × 0308 ÷ 231A ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0027 ÷ 0020 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 0027 × 0308 ÷ 0020 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 0027 × 00AD ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0027 × 0308 × 00AD ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0027 × 0300 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0027 × 0308 × 0300 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0027 × 200D ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 0027 × 0308 × 200D ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 0027 ÷ 0061 × 2060 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0027 × 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0027 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0027 × 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0027 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0027 × 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0027 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0027 × 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0027 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0027 × 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0027 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0027 × 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0027 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0027 × 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0027 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0027 × 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0027 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0027 × 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 231A ÷ 0001 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] (Other) ÷ [0.3] ÷ 231A × 0308 ÷ 0001 ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] ÷ 231A ÷ 000D ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [3.2] (CR) ÷ [0.3] ÷ 231A × 0308 ÷ 000D ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] ÷ 231A ÷ 000A ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [3.2] (LF) ÷ [0.3] ÷ 231A × 0308 ÷ 000A ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] ÷ 231A ÷ 000B ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [3.2] (Newline) ÷ [0.3] ÷ 231A × 0308 ÷ 000B ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] ÷ 231A ÷ 3031 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 231A × 0308 ÷ 3031 ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 231A ÷ 0041 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 231A × 0308 ÷ 0041 ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 231A ÷ 003A ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 231A × 0308 ÷ 003A ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 231A ÷ 002C ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 231A × 0308 ÷ 002C ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 231A ÷ 002E ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 231A × 0308 ÷ 002E ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 231A ÷ 0030 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 231A × 0308 ÷ 0030 ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 231A ÷ 005F ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 231A × 0308 ÷ 005F ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 231A ÷ 1F1E6 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 231A × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 231A ÷ 05D0 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 231A × 0308 ÷ 05D0 ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 231A ÷ 0022 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 231A × 0308 ÷ 0022 ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 231A ÷ 0027 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 231A × 0308 ÷ 0027 ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 231A ÷ 231A ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 231A × 0308 ÷ 231A ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 231A ÷ 0020 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 231A × 0308 ÷ 0020 ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 231A × 00AD ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 231A × 0308 × 00AD ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 231A × 0300 ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 231A × 0308 × 0300 ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 231A × 200D ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 231A × 0308 × 200D ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 231A ÷ 0061 × 2060 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 231A × 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 231A ÷ 0061 ÷ 003A ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 231A × 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 231A ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 231A × 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 231A ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 231A × 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 231A ÷ 0061 ÷ 002C ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 231A × 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 231A ÷ 0031 ÷ 003A ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 231A × 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 231A ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 231A × 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 231A ÷ 0031 ÷ 002C ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 231A × 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 231A ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] WATCH (ExtPict) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 231A × 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] WATCH (ExtPict) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0020 ÷ 0001 ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] (Other) ÷ [0.3] ÷ 0020 × 0308 ÷ 0001 ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] ÷ 0020 ÷ 000D ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [3.2] (CR) ÷ [0.3] ÷ 0020 × 0308 ÷ 000D ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] ÷ 0020 ÷ 000A ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [3.2] (LF) ÷ [0.3] ÷ 0020 × 0308 ÷ 000A ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] ÷ 0020 ÷ 000B ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [3.2] (Newline) ÷ [0.3] ÷ 0020 × 0308 ÷ 000B ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] ÷ 0020 ÷ 3031 ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 0020 × 0308 ÷ 3031 ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 0020 ÷ 0041 ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 0020 × 0308 ÷ 0041 ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 0020 ÷ 003A ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0020 × 0308 ÷ 003A ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0020 ÷ 002C ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0020 × 0308 ÷ 002C ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0020 ÷ 002E ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 0020 × 0308 ÷ 002E ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 0020 ÷ 0030 ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0020 × 0308 ÷ 0030 ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0020 ÷ 005F ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 0020 × 0308 ÷ 005F ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 0020 ÷ 1F1E6 ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0020 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0020 ÷ 05D0 ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 0020 × 0308 ÷ 05D0 ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 0020 ÷ 0022 ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 0020 × 0308 ÷ 0022 ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 0020 ÷ 0027 ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0020 × 0308 ÷ 0027 ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0020 ÷ 231A ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0020 × 0308 ÷ 231A ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0020 × 0020 ÷ # ÷ [0.2] SPACE (WSegSpace) × [3.4] SPACE (WSegSpace) ÷ [0.3] ÷ 0020 × 0308 ÷ 0020 ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 0020 × 00AD ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0020 × 0308 × 00AD ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0020 × 0300 ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0020 × 0308 × 0300 ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0020 × 200D ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 0020 × 0308 × 200D ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 0020 ÷ 0061 × 2060 ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0020 × 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0020 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0020 × 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0020 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0020 × 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0020 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0020 × 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0020 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0020 × 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0020 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0020 × 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0020 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0020 × 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0020 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0020 × 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0020 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] SPACE (WSegSpace) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0020 × 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 00AD ÷ 0001 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] (Other) ÷ [0.3] ÷ 00AD × 0308 ÷ 0001 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] ÷ 00AD ÷ 000D ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [3.2] (CR) ÷ [0.3] ÷ 00AD × 0308 ÷ 000D ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] ÷ 00AD ÷ 000A ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [3.2] (LF) ÷ [0.3] ÷ 00AD × 0308 ÷ 000A ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] ÷ 00AD ÷ 000B ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [3.2] (Newline) ÷ [0.3] ÷ 00AD × 0308 ÷ 000B ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] ÷ 00AD ÷ 3031 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 00AD × 0308 ÷ 3031 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 00AD ÷ 0041 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 00AD × 0308 ÷ 0041 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 00AD ÷ 003A ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 00AD × 0308 ÷ 003A ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 00AD ÷ 002C ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 00AD × 0308 ÷ 002C ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 00AD ÷ 002E ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 00AD × 0308 ÷ 002E ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 00AD ÷ 0030 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 00AD × 0308 ÷ 0030 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 00AD ÷ 005F ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 00AD × 0308 ÷ 005F ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 00AD ÷ 1F1E6 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 00AD × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 00AD ÷ 05D0 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 00AD × 0308 ÷ 05D0 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 00AD ÷ 0022 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 00AD × 0308 ÷ 0022 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 00AD ÷ 0027 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 00AD × 0308 ÷ 0027 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 00AD ÷ 231A ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 00AD × 0308 ÷ 231A ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 00AD ÷ 0020 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 00AD × 0308 ÷ 0020 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 00AD × 00AD ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 00AD × 0308 × 00AD ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 00AD × 0300 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 00AD × 0308 × 0300 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 00AD × 200D ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 00AD × 0308 × 200D ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 00AD ÷ 0061 × 2060 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 00AD × 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 00AD ÷ 0061 ÷ 003A ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 00AD × 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 00AD ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 00AD × 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 00AD ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 00AD × 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 00AD ÷ 0061 ÷ 002C ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 00AD × 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 00AD ÷ 0031 ÷ 003A ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 00AD × 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 00AD ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 00AD × 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 00AD ÷ 0031 ÷ 002C ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 00AD × 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 00AD ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 00AD × 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] SOFT HYPHEN (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0300 ÷ 0001 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] ÷ 0300 × 0308 ÷ 0001 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] ÷ 0300 ÷ 000D ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] ÷ 0300 × 0308 ÷ 000D ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] ÷ 0300 ÷ 000A ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] ÷ 0300 × 0308 ÷ 000A ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] ÷ 0300 ÷ 000B ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] ÷ 0300 × 0308 ÷ 000B ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] ÷ 0300 ÷ 3031 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 0300 × 0308 ÷ 3031 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 0300 ÷ 0041 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 0300 × 0308 ÷ 0041 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 0300 ÷ 003A ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0300 × 0308 ÷ 003A ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0300 ÷ 002C ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0300 × 0308 ÷ 002C ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0300 ÷ 002E ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 0300 × 0308 ÷ 002E ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 0300 ÷ 0030 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0300 × 0308 ÷ 0030 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0300 ÷ 005F ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 0300 × 0308 ÷ 005F ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 0300 ÷ 1F1E6 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0300 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0300 ÷ 05D0 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 0300 × 0308 ÷ 05D0 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 0300 ÷ 0022 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 0300 × 0308 ÷ 0022 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 0300 ÷ 0027 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0300 × 0308 ÷ 0027 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0300 ÷ 231A ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0300 × 0308 ÷ 231A ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0300 ÷ 0020 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 0300 × 0308 ÷ 0020 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 0300 × 00AD ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0300 × 0308 × 00AD ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0300 × 0300 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0300 × 0308 × 0300 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0300 × 200D ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 0300 × 0308 × 200D ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 0300 ÷ 0061 × 2060 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0300 × 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0300 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0300 × 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0300 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0300 × 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0300 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0300 × 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0300 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0300 × 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0300 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0300 × 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0300 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0300 × 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0300 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0300 × 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0300 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0300 × 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] COMBINING GRAVE ACCENT (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 200D ÷ 0001 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] (Other) ÷ [0.3] ÷ 200D × 0308 ÷ 0001 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] ÷ 200D ÷ 000D ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [3.2] (CR) ÷ [0.3] ÷ 200D × 0308 ÷ 000D ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] ÷ 200D ÷ 000A ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [3.2] (LF) ÷ [0.3] ÷ 200D × 0308 ÷ 000A ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] ÷ 200D ÷ 000B ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [3.2] (Newline) ÷ [0.3] ÷ 200D × 0308 ÷ 000B ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] ÷ 200D ÷ 3031 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 200D × 0308 ÷ 3031 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 200D ÷ 0041 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 200D × 0308 ÷ 0041 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 200D ÷ 003A ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 200D × 0308 ÷ 003A ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 200D ÷ 002C ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 200D × 0308 ÷ 002C ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 200D ÷ 002E ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 200D × 0308 ÷ 002E ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 200D ÷ 0030 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 200D × 0308 ÷ 0030 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 200D ÷ 005F ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 200D × 0308 ÷ 005F ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 200D ÷ 1F1E6 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 200D × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 200D ÷ 05D0 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 200D × 0308 ÷ 05D0 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 200D ÷ 0022 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 200D × 0308 ÷ 0022 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 200D ÷ 0027 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 200D × 0308 ÷ 0027 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 200D × 231A ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [3.3] WATCH (ExtPict) ÷ [0.3] ÷ 200D × 0308 ÷ 231A ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 200D ÷ 0020 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 200D × 0308 ÷ 0020 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 200D × 00AD ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 200D × 0308 × 00AD ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 200D × 0300 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 200D × 0308 × 0300 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 200D × 200D ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 200D × 0308 × 200D ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 200D ÷ 0061 × 2060 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 200D × 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 200D ÷ 0061 ÷ 003A ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 200D × 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 200D ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 200D × 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 200D ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 200D × 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 200D ÷ 0061 ÷ 002C ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 200D × 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 200D ÷ 0031 ÷ 003A ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 200D × 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 200D ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 200D × 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 200D ÷ 0031 ÷ 002C ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 200D × 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 200D ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 200D × 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0061 × 2060 ÷ 0001 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] (Other) ÷ [0.3] ÷ 0061 × 2060 × 0308 ÷ 0001 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] ÷ 0061 × 2060 ÷ 000D ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [3.2] (CR) ÷ [0.3] ÷ 0061 × 2060 × 0308 ÷ 000D ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] ÷ 0061 × 2060 ÷ 000A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [3.2] (LF) ÷ [0.3] ÷ 0061 × 2060 × 0308 ÷ 000A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] ÷ 0061 × 2060 ÷ 000B ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [3.2] (Newline) ÷ [0.3] ÷ 0061 × 2060 × 0308 ÷ 000B ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] ÷ 0061 × 2060 ÷ 3031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 0061 × 2060 × 0308 ÷ 3031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 0061 × 2060 × 0041 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [5.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 0061 × 2060 × 0308 × 0041 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 0061 × 2060 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0061 × 2060 × 0308 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0061 × 2060 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0061 × 2060 × 0308 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0061 × 2060 ÷ 002E ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 0061 × 2060 × 0308 ÷ 002E ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 0061 × 2060 × 0030 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [9.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0061 × 2060 × 0308 × 0030 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [9.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0061 × 2060 × 005F ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [13.1] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 0061 × 2060 × 0308 × 005F ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [13.1] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 0061 × 2060 ÷ 1F1E6 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0061 × 2060 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0061 × 2060 × 05D0 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [5.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 0061 × 2060 × 0308 × 05D0 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 0061 × 2060 ÷ 0022 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 0061 × 2060 × 0308 ÷ 0022 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 0061 × 2060 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0061 × 2060 × 0308 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0061 × 2060 ÷ 231A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0061 × 2060 × 0308 ÷ 231A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0061 × 2060 ÷ 0020 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 0061 × 2060 × 0308 ÷ 0020 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 0061 × 2060 × 00AD ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0061 × 2060 × 0308 × 00AD ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0061 × 2060 × 0300 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0061 × 2060 × 0308 × 0300 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0061 × 2060 × 200D ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 0061 × 2060 × 0308 × 200D ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 0061 × 2060 × 0061 × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [5.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0061 × 2060 × 0308 × 0061 × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0061 × 2060 × 0061 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0061 × 2060 × 0308 × 0061 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0061 × 2060 × 0061 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0061 × 2060 × 0308 × 0061 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0061 × 2060 × 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0061 × 2060 × 0308 × 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0061 × 2060 × 0061 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0061 × 2060 × 0308 × 0061 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0061 × 2060 × 0031 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0061 × 2060 × 0308 × 0031 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0061 × 2060 × 0031 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0061 × 2060 × 0308 × 0031 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0061 × 2060 × 0031 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0061 × 2060 × 0308 × 0031 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0061 × 2060 × 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0061 × 2060 × 0308 × 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [9.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0061 ÷ 003A ÷ 0001 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] (Other) ÷ [0.3] ÷ 0061 ÷ 003A × 0308 ÷ 0001 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] ÷ 0061 ÷ 003A ÷ 000D ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [3.2] (CR) ÷ [0.3] ÷ 0061 ÷ 003A × 0308 ÷ 000D ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] ÷ 0061 ÷ 003A ÷ 000A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [3.2] (LF) ÷ [0.3] ÷ 0061 ÷ 003A × 0308 ÷ 000A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] ÷ 0061 ÷ 003A ÷ 000B ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [3.2] (Newline) ÷ [0.3] ÷ 0061 ÷ 003A × 0308 ÷ 000B ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] ÷ 0061 ÷ 003A ÷ 3031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 0061 ÷ 003A × 0308 ÷ 3031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 0061 × 003A × 0041 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] COLON (MidLetter) × [7.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 0061 × 003A × 0308 × 0041 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 0061 ÷ 003A ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0061 ÷ 003A × 0308 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0061 ÷ 003A ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0061 ÷ 003A × 0308 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0061 ÷ 003A ÷ 002E ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 0061 ÷ 003A × 0308 ÷ 002E ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 0061 ÷ 003A ÷ 0030 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0061 ÷ 003A × 0308 ÷ 0030 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0061 ÷ 003A ÷ 005F ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 0061 ÷ 003A × 0308 ÷ 005F ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 0061 ÷ 003A ÷ 1F1E6 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0061 ÷ 003A × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0061 × 003A × 05D0 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] COLON (MidLetter) × [7.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 0061 × 003A × 0308 × 05D0 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 0061 ÷ 003A ÷ 0022 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 0061 ÷ 003A × 0308 ÷ 0022 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 0061 ÷ 003A ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0061 ÷ 003A × 0308 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0061 ÷ 003A ÷ 231A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0061 ÷ 003A × 0308 ÷ 231A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0061 ÷ 003A ÷ 0020 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 0061 ÷ 003A × 0308 ÷ 0020 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 0061 ÷ 003A × 00AD ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0061 ÷ 003A × 0308 × 00AD ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0061 ÷ 003A × 0300 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0061 ÷ 003A × 0308 × 0300 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0061 ÷ 003A × 200D ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 0061 ÷ 003A × 0308 × 200D ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 0061 × 003A × 0061 × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] COLON (MidLetter) × [7.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0061 × 003A × 0308 × 0061 × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0061 × 003A × 0061 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] COLON (MidLetter) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0061 × 003A × 0308 × 0061 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0061 × 003A × 0061 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] COLON (MidLetter) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0061 × 003A × 0308 × 0061 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0061 × 003A × 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] COLON (MidLetter) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0061 × 003A × 0308 × 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0061 × 003A × 0061 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] COLON (MidLetter) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0061 × 003A × 0308 × 0061 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0061 ÷ 003A ÷ 0031 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0061 ÷ 003A × 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0061 ÷ 003A ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0061 ÷ 003A × 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0061 ÷ 003A ÷ 0031 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0061 ÷ 003A × 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0061 ÷ 003A ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0061 ÷ 003A × 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0061 ÷ 0027 ÷ 0001 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] (Other) ÷ [0.3] ÷ 0061 ÷ 0027 × 0308 ÷ 0001 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] ÷ 0061 ÷ 0027 ÷ 000D ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [3.2] (CR) ÷ [0.3] ÷ 0061 ÷ 0027 × 0308 ÷ 000D ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] ÷ 0061 ÷ 0027 ÷ 000A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [3.2] (LF) ÷ [0.3] ÷ 0061 ÷ 0027 × 0308 ÷ 000A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] ÷ 0061 ÷ 0027 ÷ 000B ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [3.2] (Newline) ÷ [0.3] ÷ 0061 ÷ 0027 × 0308 ÷ 000B ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] ÷ 0061 ÷ 0027 ÷ 3031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 0061 ÷ 0027 × 0308 ÷ 3031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 0061 × 0027 × 0041 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [7.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 0061 × 0027 × 0308 × 0041 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 0061 ÷ 0027 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0061 ÷ 0027 × 0308 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0061 ÷ 0027 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0061 ÷ 0027 × 0308 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0061 ÷ 0027 ÷ 002E ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 0061 ÷ 0027 × 0308 ÷ 002E ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 0061 ÷ 0027 ÷ 0030 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0061 ÷ 0027 × 0308 ÷ 0030 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0061 ÷ 0027 ÷ 005F ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 0061 ÷ 0027 × 0308 ÷ 005F ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 0061 ÷ 0027 ÷ 1F1E6 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0061 ÷ 0027 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0061 × 0027 × 05D0 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [7.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 0061 × 0027 × 0308 × 05D0 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 0061 ÷ 0027 ÷ 0022 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 0061 ÷ 0027 × 0308 ÷ 0022 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 0061 ÷ 0027 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0061 ÷ 0027 × 0308 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0061 ÷ 0027 ÷ 231A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0061 ÷ 0027 × 0308 ÷ 231A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0061 ÷ 0027 ÷ 0020 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 0061 ÷ 0027 × 0308 ÷ 0020 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 0061 ÷ 0027 × 00AD ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0061 ÷ 0027 × 0308 × 00AD ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0061 ÷ 0027 × 0300 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0061 ÷ 0027 × 0308 × 0300 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0061 ÷ 0027 × 200D ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 0061 ÷ 0027 × 0308 × 200D ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 0061 × 0027 × 0061 × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [7.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0061 × 0027 × 0308 × 0061 × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0061 × 0027 × 0061 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0061 × 0027 × 0308 × 0061 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0061 × 0027 × 0061 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0061 × 0027 × 0308 × 0061 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0061 × 0027 × 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0061 × 0027 × 0308 × 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0061 × 0027 × 0061 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0061 × 0027 × 0308 × 0061 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0061 ÷ 0027 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0061 ÷ 0027 × 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0061 ÷ 0027 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0061 ÷ 0027 × 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0061 ÷ 0027 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0061 ÷ 0027 × 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0061 ÷ 0027 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0061 ÷ 0027 × 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 ÷ 0001 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] (Other) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 × 0308 ÷ 0001 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 ÷ 000D ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [3.2] (CR) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 × 0308 ÷ 000D ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 ÷ 000A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [3.2] (LF) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 × 0308 ÷ 000A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 ÷ 000B ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [3.2] (Newline) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 × 0308 ÷ 000B ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 ÷ 3031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 × 0308 ÷ 3031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 0061 × 0027 × 2060 × 0041 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [7.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 0061 × 0027 × 2060 × 0308 × 0041 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 × 0308 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 × 0308 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 ÷ 002E ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 × 0308 ÷ 002E ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 ÷ 0030 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 × 0308 ÷ 0030 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 ÷ 005F ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 × 0308 ÷ 005F ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 ÷ 1F1E6 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0061 × 0027 × 2060 × 05D0 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [7.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 0061 × 0027 × 2060 × 0308 × 05D0 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 ÷ 0022 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 × 0308 ÷ 0022 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 × 0308 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 ÷ 231A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 × 0308 ÷ 231A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 ÷ 0020 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 × 0308 ÷ 0020 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 × 00AD ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 × 0308 × 00AD ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 × 0300 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 × 0308 × 0300 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 × 200D ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 × 0308 × 200D ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 0061 × 0027 × 2060 × 0061 × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [7.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0061 × 0027 × 2060 × 0308 × 0061 × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0061 × 0027 × 2060 × 0061 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0061 × 0027 × 2060 × 0308 × 0061 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0061 × 0027 × 2060 × 0061 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0061 × 0027 × 2060 × 0308 × 0061 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0061 × 0027 × 2060 × 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0061 × 0027 × 2060 × 0308 × 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0061 × 0027 × 2060 × 0061 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0061 × 0027 × 2060 × 0308 × 0061 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [6.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [7.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 × 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 × 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 × 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0061 ÷ 0027 × 2060 × 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0061 ÷ 002C ÷ 0001 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] (Other) ÷ [0.3] ÷ 0061 ÷ 002C × 0308 ÷ 0001 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] ÷ 0061 ÷ 002C ÷ 000D ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [3.2] (CR) ÷ [0.3] ÷ 0061 ÷ 002C × 0308 ÷ 000D ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] ÷ 0061 ÷ 002C ÷ 000A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [3.2] (LF) ÷ [0.3] ÷ 0061 ÷ 002C × 0308 ÷ 000A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] ÷ 0061 ÷ 002C ÷ 000B ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [3.2] (Newline) ÷ [0.3] ÷ 0061 ÷ 002C × 0308 ÷ 000B ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] ÷ 0061 ÷ 002C ÷ 3031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 0061 ÷ 002C × 0308 ÷ 3031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 0061 ÷ 002C ÷ 0041 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 0061 ÷ 002C × 0308 ÷ 0041 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 0061 ÷ 002C ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0061 ÷ 002C × 0308 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0061 ÷ 002C ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0061 ÷ 002C × 0308 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0061 ÷ 002C ÷ 002E ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 0061 ÷ 002C × 0308 ÷ 002E ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 0061 ÷ 002C ÷ 0030 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0061 ÷ 002C × 0308 ÷ 0030 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0061 ÷ 002C ÷ 005F ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 0061 ÷ 002C × 0308 ÷ 005F ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 0061 ÷ 002C ÷ 1F1E6 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0061 ÷ 002C × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0061 ÷ 002C ÷ 05D0 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 0061 ÷ 002C × 0308 ÷ 05D0 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 0061 ÷ 002C ÷ 0022 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 0061 ÷ 002C × 0308 ÷ 0022 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 0061 ÷ 002C ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0061 ÷ 002C × 0308 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0061 ÷ 002C ÷ 231A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0061 ÷ 002C × 0308 ÷ 231A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0061 ÷ 002C ÷ 0020 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 0061 ÷ 002C × 0308 ÷ 0020 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 0061 ÷ 002C × 00AD ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0061 ÷ 002C × 0308 × 00AD ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0061 ÷ 002C × 0300 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0061 ÷ 002C × 0308 × 0300 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0061 ÷ 002C × 200D ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 0061 ÷ 002C × 0308 × 200D ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 0061 ÷ 002C ÷ 0061 × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0061 ÷ 002C × 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0061 ÷ 002C ÷ 0061 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0061 ÷ 002C × 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0061 ÷ 002C ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0061 ÷ 002C × 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0061 ÷ 002C ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0061 ÷ 002C × 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0061 ÷ 002C ÷ 0061 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0061 ÷ 002C × 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0061 ÷ 002C ÷ 0031 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0061 ÷ 002C × 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0061 ÷ 002C ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0061 ÷ 002C × 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0061 ÷ 002C ÷ 0031 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0061 ÷ 002C × 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0061 ÷ 002C ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0061 ÷ 002C × 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0031 ÷ 003A ÷ 0001 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] (Other) ÷ [0.3] ÷ 0031 ÷ 003A × 0308 ÷ 0001 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] ÷ 0031 ÷ 003A ÷ 000D ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [3.2] (CR) ÷ [0.3] ÷ 0031 ÷ 003A × 0308 ÷ 000D ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] ÷ 0031 ÷ 003A ÷ 000A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [3.2] (LF) ÷ [0.3] ÷ 0031 ÷ 003A × 0308 ÷ 000A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] ÷ 0031 ÷ 003A ÷ 000B ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [3.2] (Newline) ÷ [0.3] ÷ 0031 ÷ 003A × 0308 ÷ 000B ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] ÷ 0031 ÷ 003A ÷ 3031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 0031 ÷ 003A × 0308 ÷ 3031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 0031 ÷ 003A ÷ 0041 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 0031 ÷ 003A × 0308 ÷ 0041 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 0031 ÷ 003A ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0031 ÷ 003A × 0308 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0031 ÷ 003A ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0031 ÷ 003A × 0308 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0031 ÷ 003A ÷ 002E ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 0031 ÷ 003A × 0308 ÷ 002E ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 0031 ÷ 003A ÷ 0030 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0031 ÷ 003A × 0308 ÷ 0030 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0031 ÷ 003A ÷ 005F ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 0031 ÷ 003A × 0308 ÷ 005F ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 0031 ÷ 003A ÷ 1F1E6 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0031 ÷ 003A × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0031 ÷ 003A ÷ 05D0 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 0031 ÷ 003A × 0308 ÷ 05D0 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 0031 ÷ 003A ÷ 0022 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 0031 ÷ 003A × 0308 ÷ 0022 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 0031 ÷ 003A ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0031 ÷ 003A × 0308 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0031 ÷ 003A ÷ 231A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0031 ÷ 003A × 0308 ÷ 231A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0031 ÷ 003A ÷ 0020 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 0031 ÷ 003A × 0308 ÷ 0020 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 0031 ÷ 003A × 00AD ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0031 ÷ 003A × 0308 × 00AD ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0031 ÷ 003A × 0300 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0031 ÷ 003A × 0308 × 0300 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0031 ÷ 003A × 200D ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 0031 ÷ 003A × 0308 × 200D ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 0031 ÷ 003A ÷ 0061 × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0031 ÷ 003A × 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0031 ÷ 003A ÷ 0061 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0031 ÷ 003A × 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0031 ÷ 003A ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0031 ÷ 003A × 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0031 ÷ 003A ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0031 ÷ 003A × 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0031 ÷ 003A ÷ 0061 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0031 ÷ 003A × 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0031 ÷ 003A ÷ 0031 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0031 ÷ 003A × 0308 ÷ 0031 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0031 ÷ 003A ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0031 ÷ 003A × 0308 ÷ 0031 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0031 ÷ 003A ÷ 0031 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0031 ÷ 003A × 0308 ÷ 0031 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0031 ÷ 003A ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0031 ÷ 003A × 0308 ÷ 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0031 ÷ 0027 ÷ 0001 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] (Other) ÷ [0.3] ÷ 0031 ÷ 0027 × 0308 ÷ 0001 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] ÷ 0031 ÷ 0027 ÷ 000D ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [3.2] (CR) ÷ [0.3] ÷ 0031 ÷ 0027 × 0308 ÷ 000D ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] ÷ 0031 ÷ 0027 ÷ 000A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [3.2] (LF) ÷ [0.3] ÷ 0031 ÷ 0027 × 0308 ÷ 000A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] ÷ 0031 ÷ 0027 ÷ 000B ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [3.2] (Newline) ÷ [0.3] ÷ 0031 ÷ 0027 × 0308 ÷ 000B ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] ÷ 0031 ÷ 0027 ÷ 3031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 0031 ÷ 0027 × 0308 ÷ 3031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 0031 ÷ 0027 ÷ 0041 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 0031 ÷ 0027 × 0308 ÷ 0041 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 0031 ÷ 0027 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0031 ÷ 0027 × 0308 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0031 ÷ 0027 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0031 ÷ 0027 × 0308 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0031 ÷ 0027 ÷ 002E ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 0031 ÷ 0027 × 0308 ÷ 002E ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 0031 × 0027 × 0030 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] APOSTROPHE (Single_Quote) × [11.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0031 × 0027 × 0308 × 0030 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [11.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0031 ÷ 0027 ÷ 005F ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 0031 ÷ 0027 × 0308 ÷ 005F ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 0031 ÷ 0027 ÷ 1F1E6 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0031 ÷ 0027 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0031 ÷ 0027 ÷ 05D0 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 0031 ÷ 0027 × 0308 ÷ 05D0 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 0031 ÷ 0027 ÷ 0022 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 0031 ÷ 0027 × 0308 ÷ 0022 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 0031 ÷ 0027 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0031 ÷ 0027 × 0308 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0031 ÷ 0027 ÷ 231A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0031 ÷ 0027 × 0308 ÷ 231A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0031 ÷ 0027 ÷ 0020 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 0031 ÷ 0027 × 0308 ÷ 0020 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 0031 ÷ 0027 × 00AD ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0031 ÷ 0027 × 0308 × 00AD ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0031 ÷ 0027 × 0300 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0031 ÷ 0027 × 0308 × 0300 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0031 ÷ 0027 × 200D ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 0031 ÷ 0027 × 0308 × 200D ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 0031 ÷ 0027 ÷ 0061 × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0031 ÷ 0027 × 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0031 ÷ 0027 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0031 ÷ 0027 × 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0031 ÷ 0027 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0031 ÷ 0027 × 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0031 ÷ 0027 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0031 ÷ 0027 × 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0031 ÷ 0027 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0031 ÷ 0027 × 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0031 × 0027 × 0031 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] APOSTROPHE (Single_Quote) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0031 × 0027 × 0308 × 0031 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0031 × 0027 × 0031 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] APOSTROPHE (Single_Quote) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0031 × 0027 × 0308 × 0031 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0031 × 0027 × 0031 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] APOSTROPHE (Single_Quote) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0031 × 0027 × 0308 × 0031 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0031 × 0027 × 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] APOSTROPHE (Single_Quote) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0031 × 0027 × 0308 × 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] APOSTROPHE (Single_Quote) × [4.0] COMBINING DIAERESIS (Extend_FE) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0031 ÷ 002C ÷ 0001 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] (Other) ÷ [0.3] ÷ 0031 ÷ 002C × 0308 ÷ 0001 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] ÷ 0031 ÷ 002C ÷ 000D ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [3.2] (CR) ÷ [0.3] ÷ 0031 ÷ 002C × 0308 ÷ 000D ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] ÷ 0031 ÷ 002C ÷ 000A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [3.2] (LF) ÷ [0.3] ÷ 0031 ÷ 002C × 0308 ÷ 000A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] ÷ 0031 ÷ 002C ÷ 000B ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [3.2] (Newline) ÷ [0.3] ÷ 0031 ÷ 002C × 0308 ÷ 000B ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] ÷ 0031 ÷ 002C ÷ 3031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 0031 ÷ 002C × 0308 ÷ 3031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 0031 ÷ 002C ÷ 0041 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 0031 ÷ 002C × 0308 ÷ 0041 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 0031 ÷ 002C ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0031 ÷ 002C × 0308 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0031 ÷ 002C ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0031 ÷ 002C × 0308 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0031 ÷ 002C ÷ 002E ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 0031 ÷ 002C × 0308 ÷ 002E ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 0031 × 002C × 0030 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] COMMA (MidNum) × [11.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0031 × 002C × 0308 × 0030 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) × [11.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0031 ÷ 002C ÷ 005F ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 0031 ÷ 002C × 0308 ÷ 005F ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 0031 ÷ 002C ÷ 1F1E6 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0031 ÷ 002C × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0031 ÷ 002C ÷ 05D0 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 0031 ÷ 002C × 0308 ÷ 05D0 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 0031 ÷ 002C ÷ 0022 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 0031 ÷ 002C × 0308 ÷ 0022 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 0031 ÷ 002C ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0031 ÷ 002C × 0308 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0031 ÷ 002C ÷ 231A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0031 ÷ 002C × 0308 ÷ 231A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0031 ÷ 002C ÷ 0020 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 0031 ÷ 002C × 0308 ÷ 0020 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 0031 ÷ 002C × 00AD ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0031 ÷ 002C × 0308 × 00AD ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0031 ÷ 002C × 0300 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0031 ÷ 002C × 0308 × 0300 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0031 ÷ 002C × 200D ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 0031 ÷ 002C × 0308 × 200D ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 0031 ÷ 002C ÷ 0061 × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0031 ÷ 002C × 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0031 ÷ 002C ÷ 0061 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0031 ÷ 002C × 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0031 ÷ 002C ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0031 ÷ 002C × 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0031 ÷ 002C ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0031 ÷ 002C × 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0031 ÷ 002C ÷ 0061 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0031 ÷ 002C × 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0031 × 002C × 0031 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] COMMA (MidNum) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0031 × 002C × 0308 × 0031 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0031 × 002C × 0031 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] COMMA (MidNum) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0031 × 002C × 0308 × 0031 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0031 × 002C × 0031 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] COMMA (MidNum) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0031 × 002C × 0308 × 0031 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0031 × 002C × 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] COMMA (MidNum) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0031 × 002C × 0308 × 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] COMMA (MidNum) × [4.0] COMBINING DIAERESIS (Extend_FE) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 ÷ 0001 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] (Other) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 × 0308 ÷ 0001 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] (Other) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 ÷ 000D ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [3.2] (CR) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 × 0308 ÷ 000D ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (CR) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 ÷ 000A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [3.2] (LF) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 × 0308 ÷ 000A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (LF) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 ÷ 000B ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [3.2] (Newline) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 × 0308 ÷ 000B ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [3.2] (Newline) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 ÷ 3031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 × 0308 ÷ 3031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 ÷ 0041 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 × 0308 ÷ 0041 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 × 0308 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 × 0308 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 ÷ 002E ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 × 0308 ÷ 002E ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] FULL STOP (MidNumLet) ÷ [0.3] ÷ 0031 × 002E × 2060 × 0030 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [11.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0031 × 002E × 2060 × 0308 × 0030 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [11.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 ÷ 005F ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 × 0308 ÷ 005F ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 ÷ 1F1E6 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 × 0308 ÷ 1F1E6 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 ÷ 05D0 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 × 0308 ÷ 05D0 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 ÷ 0022 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 × 0308 ÷ 0022 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] QUOTATION MARK (Double_Quote) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 × 0308 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 ÷ 231A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 × 0308 ÷ 231A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] WATCH (ExtPict) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 ÷ 0020 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 × 0308 ÷ 0020 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 × 00AD ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 × 0308 × 00AD ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] SOFT HYPHEN (Format_FE) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 × 0300 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 × 0308 × 0300 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] COMBINING GRAVE ACCENT (Extend_FE) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 × 200D ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 × 0308 × 200D ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 ÷ 0061 × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 × 0308 ÷ 0061 × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 × 0308 ÷ 0061 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 × 0308 ÷ 0061 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 × 0308 ÷ 0061 ÷ 0027 × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] APOSTROPHE (Single_Quote) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0031 ÷ 002E × 2060 × 0308 ÷ 0061 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0031 × 002E × 2060 × 0031 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0031 × 002E × 2060 × 0308 × 0031 ÷ 003A ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [0.3] ÷ 0031 × 002E × 2060 × 0031 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0031 × 002E × 2060 × 0308 × 0031 ÷ 0027 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 0031 × 002E × 2060 × 0031 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0031 × 002E × 2060 × 0308 × 0031 ÷ 002C ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [0.3] ÷ 0031 × 002E × 2060 × 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 0031 × 002E × 2060 × 0308 × 0031 ÷ 002E × 2060 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [12.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [11.0] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) × [4.0] WORD JOINER (Format_FE) ÷ [0.3] ÷ 000D × 000A ÷ 0061 ÷ 000A ÷ 0308 ÷ # ÷ [0.2] (CR) × [3.0] (LF) ÷ [3.1] LATIN SMALL LETTER A (ALetter) ÷ [3.2] (LF) ÷ [3.1] COMBINING DIAERESIS (Extend_FE) ÷ [0.3] ÷ 0061 × 0308 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) ÷ [0.3] ÷ 0020 × 200D ÷ 0646 ÷ # ÷ [0.2] SPACE (WSegSpace) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] ARABIC LETTER NOON (ALetter) ÷ [0.3] ÷ 0646 × 200D ÷ 0020 ÷ # ÷ [0.2] ARABIC LETTER NOON (ALetter) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] SPACE (WSegSpace) ÷ [0.3] ÷ 0041 × 0041 × 0041 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [5.0] LATIN CAPITAL LETTER A (ALetter) × [5.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 0041 × 003A × 0041 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [6.0] COLON (MidLetter) × [7.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 0041 ÷ 003A ÷ 003A ÷ 0041 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 05D0 × 0027 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [7.1] APOSTROPHE (Single_Quote) ÷ [0.3] ÷ 05D0 × 0022 × 05D0 ÷ # ÷ [0.2] HEBREW LETTER ALEF (Hebrew_Letter) × [7.2] QUOTATION MARK (Double_Quote) × [7.3] HEBREW LETTER ALEF (Hebrew_Letter) ÷ [0.3] ÷ 0041 × 0030 × 0030 × 0041 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [9.0] DIGIT ZERO (Numeric) × [8.0] DIGIT ZERO (Numeric) × [10.0] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 0030 × 002C × 0030 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) × [12.0] COMMA (MidNum) × [11.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 0030 ÷ 002C ÷ 002C ÷ 0030 ÷ # ÷ [0.2] DIGIT ZERO (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ZERO (Numeric) ÷ [0.3] ÷ 3031 × 3031 ÷ # ÷ [0.2] VERTICAL KANA REPEAT MARK (Katakana) × [13.0] VERTICAL KANA REPEAT MARK (Katakana) ÷ [0.3] ÷ 0041 × 005F × 0030 × 005F × 3031 × 005F ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ZERO (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] VERTICAL KANA REPEAT MARK (Katakana) × [13.1] LOW LINE (ExtendNumLet) ÷ [0.3] ÷ 0041 × 005F × 005F × 0041 ÷ # ÷ [0.2] LATIN CAPITAL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN CAPITAL LETTER A (ALetter) ÷ [0.3] ÷ 1F1E6 × 1F1E7 ÷ 1F1E8 ÷ 0062 ÷ # ÷ [0.2] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [15.0] REGIONAL INDICATOR SYMBOL LETTER B (RI) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER C (RI) ÷ [999.0] LATIN SMALL LETTER B (ALetter) ÷ [0.3] ÷ 0061 ÷ 1F1E6 × 1F1E7 ÷ 1F1E8 ÷ 0062 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [16.0] REGIONAL INDICATOR SYMBOL LETTER B (RI) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER C (RI) ÷ [999.0] LATIN SMALL LETTER B (ALetter) ÷ [0.3] ÷ 0061 ÷ 1F1E6 × 1F1E7 × 200D ÷ 1F1E8 ÷ 0062 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [16.0] REGIONAL INDICATOR SYMBOL LETTER B (RI) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER C (RI) ÷ [999.0] LATIN SMALL LETTER B (ALetter) ÷ [0.3] ÷ 0061 ÷ 1F1E6 × 200D × 1F1E7 ÷ 1F1E8 ÷ 0062 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) × [16.0] REGIONAL INDICATOR SYMBOL LETTER B (RI) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER C (RI) ÷ [999.0] LATIN SMALL LETTER B (ALetter) ÷ [0.3] ÷ 0061 ÷ 1F1E6 × 1F1E7 ÷ 1F1E8 × 1F1E9 ÷ 0062 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER A (RI) × [16.0] REGIONAL INDICATOR SYMBOL LETTER B (RI) ÷ [999.0] REGIONAL INDICATOR SYMBOL LETTER C (RI) × [16.0] REGIONAL INDICATOR SYMBOL LETTER D (RI) ÷ [999.0] LATIN SMALL LETTER B (ALetter) ÷ [0.3] ÷ 1F476 × 1F3FF ÷ 1F476 ÷ # ÷ [0.2] BABY (ExtPict) × [4.0] EMOJI MODIFIER FITZPATRICK TYPE-6 (Extend_FE) ÷ [999.0] BABY (ExtPict) ÷ [0.3] ÷ 1F6D1 × 200D × 1F6D1 ÷ # ÷ [0.2] OCTAGONAL SIGN (ExtPict) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) × [3.3] OCTAGONAL SIGN (ExtPict) ÷ [0.3] ÷ 0061 × 200D × 1F6D1 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) × [3.3] OCTAGONAL SIGN (ExtPict) ÷ [0.3] ÷ 2701 × 200D × 2701 ÷ # ÷ [0.2] UPPER BLADE SCISSORS (Other) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) × [3.3] UPPER BLADE SCISSORS (Other) ÷ [0.3] ÷ 0061 × 200D × 2701 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) × [3.3] UPPER BLADE SCISSORS (Other) ÷ [0.3] ÷ 1F476 × 1F3FF × 0308 × 200D × 1F476 × 1F3FF ÷ # ÷ [0.2] BABY (ExtPict) × [4.0] EMOJI MODIFIER FITZPATRICK TYPE-6 (Extend_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) × [3.3] BABY (ExtPict) × [4.0] EMOJI MODIFIER FITZPATRICK TYPE-6 (Extend_FE) ÷ [0.3] ÷ 1F6D1 × 1F3FF ÷ # ÷ [0.2] OCTAGONAL SIGN (ExtPict) × [4.0] EMOJI MODIFIER FITZPATRICK TYPE-6 (Extend_FE) ÷ [0.3] ÷ 200D × 1F6D1 × 1F3FF ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [3.3] OCTAGONAL SIGN (ExtPict) × [4.0] EMOJI MODIFIER FITZPATRICK TYPE-6 (Extend_FE) ÷ [0.3] ÷ 200D × 1F6D1 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [3.3] OCTAGONAL SIGN (ExtPict) ÷ [0.3] ÷ 200D × 1F6D1 ÷ # ÷ [0.2] ZERO WIDTH JOINER (ZWJ_FE) × [3.3] OCTAGONAL SIGN (ExtPict) ÷ [0.3] ÷ 1F6D1 ÷ 1F6D1 ÷ # ÷ [0.2] OCTAGONAL SIGN (ExtPict) ÷ [999.0] OCTAGONAL SIGN (ExtPict) ÷ [0.3] ÷ 0061 × 0308 × 200D × 0308 × 0062 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [4.0] COMBINING DIAERESIS (Extend_FE) × [4.0] ZERO WIDTH JOINER (ZWJ_FE) × [4.0] COMBINING DIAERESIS (Extend_FE) × [5.0] LATIN SMALL LETTER B (ALetter) ÷ [0.3] ÷ 0061 ÷ 0020 × 0020 ÷ 0062 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] SPACE (WSegSpace) × [3.4] SPACE (WSegSpace) ÷ [999.0] LATIN SMALL LETTER B (ALetter) ÷ [0.3] ÷ 0031 ÷ 003A ÷ 003A ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0031 × 005F × 0031 ÷ 003A ÷ 003A ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0031 × 005F × 0061 ÷ 003A ÷ 003A ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0031 ÷ 003A ÷ 003A ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0031 × 005F × 0031 ÷ 003A ÷ 003A ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0031 × 005F × 0061 ÷ 003A ÷ 003A ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0031 ÷ 003A ÷ 002E ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0031 × 005F × 0031 ÷ 003A ÷ 002E ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0031 × 005F × 0061 ÷ 003A ÷ 002E ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0031 ÷ 003A ÷ 002E ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0031 × 005F × 0031 ÷ 003A ÷ 002E ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0031 × 005F × 0061 ÷ 003A ÷ 002E ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0031 ÷ 003A ÷ 002C ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0031 × 005F × 0031 ÷ 003A ÷ 002C ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0031 × 005F × 0061 ÷ 003A ÷ 002C ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0031 ÷ 003A ÷ 002C ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0031 × 005F × 0031 ÷ 003A ÷ 002C ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0031 × 005F × 0061 ÷ 003A ÷ 002C ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0031 ÷ 002E ÷ 003A ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0031 × 005F × 0031 ÷ 002E ÷ 003A ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0031 × 005F × 0061 ÷ 002E ÷ 003A ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0031 ÷ 002E ÷ 003A ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0031 × 005F × 0031 ÷ 002E ÷ 003A ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0031 × 005F × 0061 ÷ 002E ÷ 003A ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0031 ÷ 002E ÷ 002E ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0031 × 005F × 0031 ÷ 002E ÷ 002E ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0031 × 005F × 0061 ÷ 002E ÷ 002E ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0031 ÷ 002E ÷ 002E ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0031 × 005F × 0031 ÷ 002E ÷ 002E ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0031 × 005F × 0061 ÷ 002E ÷ 002E ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0031 ÷ 002E ÷ 002C ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0031 × 005F × 0031 ÷ 002E ÷ 002C ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0031 × 005F × 0061 ÷ 002E ÷ 002C ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0031 ÷ 002E ÷ 002C ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0031 × 005F × 0031 ÷ 002E ÷ 002C ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0031 × 005F × 0061 ÷ 002E ÷ 002C ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0031 ÷ 002C ÷ 003A ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0031 × 005F × 0031 ÷ 002C ÷ 003A ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0031 × 005F × 0061 ÷ 002C ÷ 003A ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0031 ÷ 002C ÷ 003A ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0031 × 005F × 0031 ÷ 002C ÷ 003A ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0031 × 005F × 0061 ÷ 002C ÷ 003A ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0031 ÷ 002C ÷ 002E ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0031 × 005F × 0031 ÷ 002C ÷ 002E ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0031 × 005F × 0061 ÷ 002C ÷ 002E ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0031 ÷ 002C ÷ 002E ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0031 × 005F × 0031 ÷ 002C ÷ 002E ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0031 × 005F × 0061 ÷ 002C ÷ 002E ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0031 ÷ 002C ÷ 002C ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0031 × 005F × 0031 ÷ 002C ÷ 002C ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0031 × 005F × 0061 ÷ 002C ÷ 002C ÷ 0031 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0031 ÷ 002C ÷ 002C ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0031 × 005F × 0031 ÷ 002C ÷ 002C ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0031 × 005F × 0061 ÷ 002C ÷ 002C ÷ 0061 ÷ # ÷ [0.2] DIGIT ONE (Numeric) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0061 ÷ 003A ÷ 003A ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0061 × 005F × 0031 ÷ 003A ÷ 003A ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0061 × 005F × 0061 ÷ 003A ÷ 003A ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0061 ÷ 003A ÷ 003A ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0061 × 005F × 0031 ÷ 003A ÷ 003A ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0061 × 005F × 0061 ÷ 003A ÷ 003A ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0061 ÷ 003A ÷ 002E ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0061 × 005F × 0031 ÷ 003A ÷ 002E ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0061 × 005F × 0061 ÷ 003A ÷ 002E ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0061 ÷ 003A ÷ 002E ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0061 × 005F × 0031 ÷ 003A ÷ 002E ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0061 × 005F × 0061 ÷ 003A ÷ 002E ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0061 ÷ 003A ÷ 002C ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0061 × 005F × 0031 ÷ 003A ÷ 002C ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0061 × 005F × 0061 ÷ 003A ÷ 002C ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0061 ÷ 003A ÷ 002C ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0061 × 005F × 0031 ÷ 003A ÷ 002C ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0061 × 005F × 0061 ÷ 003A ÷ 002C ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COLON (MidLetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0061 ÷ 002E ÷ 003A ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0061 × 005F × 0031 ÷ 002E ÷ 003A ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0061 × 005F × 0061 ÷ 002E ÷ 003A ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0061 ÷ 002E ÷ 003A ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0061 × 005F × 0031 ÷ 002E ÷ 003A ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0061 × 005F × 0061 ÷ 002E ÷ 003A ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0061 ÷ 002E ÷ 002E ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0061 × 005F × 0031 ÷ 002E ÷ 002E ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0061 × 005F × 0061 ÷ 002E ÷ 002E ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0061 ÷ 002E ÷ 002E ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0061 × 005F × 0031 ÷ 002E ÷ 002E ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0061 × 005F × 0061 ÷ 002E ÷ 002E ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0061 ÷ 002E ÷ 002C ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0061 × 005F × 0031 ÷ 002E ÷ 002C ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0061 × 005F × 0061 ÷ 002E ÷ 002C ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0061 ÷ 002E ÷ 002C ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0061 × 005F × 0031 ÷ 002E ÷ 002C ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0061 × 005F × 0061 ÷ 002E ÷ 002C ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0061 ÷ 002C ÷ 003A ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0061 × 005F × 0031 ÷ 002C ÷ 003A ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0061 × 005F × 0061 ÷ 002C ÷ 003A ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COLON (MidLetter) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0061 ÷ 002C ÷ 003A ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0061 × 005F × 0031 ÷ 002C ÷ 003A ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0061 × 005F × 0061 ÷ 002C ÷ 003A ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COLON (MidLetter) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0061 ÷ 002C ÷ 002E ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0061 × 005F × 0031 ÷ 002C ÷ 002E ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0061 × 005F × 0061 ÷ 002C ÷ 002E ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0061 ÷ 002C ÷ 002E ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0061 × 005F × 0031 ÷ 002C ÷ 002E ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0061 × 005F × 0061 ÷ 002C ÷ 002E ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] FULL STOP (MidNumLet) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0061 ÷ 002C ÷ 002C ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0061 × 005F × 0031 ÷ 002C ÷ 002C ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0061 × 005F × 0061 ÷ 002C ÷ 002C ÷ 0031 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COMMA (MidNum) ÷ [999.0] DIGIT ONE (Numeric) ÷ [0.3] ÷ 0061 ÷ 002C ÷ 002C ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0061 × 005F × 0031 ÷ 002C ÷ 002C ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] DIGIT ONE (Numeric) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] ÷ 0061 × 005F × 0061 ÷ 002C ÷ 002C ÷ 0061 ÷ # ÷ [0.2] LATIN SMALL LETTER A (ALetter) × [13.1] LOW LINE (ExtendNumLet) × [13.2] LATIN SMALL LETTER A (ALetter) ÷ [999.0] COMMA (MidNum) ÷ [999.0] COMMA (MidNum) ÷ [999.0] LATIN SMALL LETTER A (ALetter) ÷ [0.3] # # Lines: 1823 # # EOF bstr-0.2.17/src/unicode/fsm/grapheme_break_fwd.bigendian.dfa000064400000000000000000000245350072674642500221170ustar 00000000000000rust-regex-automata-sparse-dfa#l  !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~#$b$&$r" `j4r""4#8r""44"r" ~h   8$&(  j4rvj&4r  @ N l%%#!!!!!T!j!!!!!!T!j!!!@!!!T!j!!!!!!T!j!!!!!!T!j!!!!!!T!j!!!!!!T!j!!!!!!T!j!!!!!!T!j!!!!!!T!j!!!!!!T!j!!!!@!!T!j!!!!!!T!j!!!!!!T!j!!!!!!T!j!!!!!!T!j!!!!!!T!j!!!!!!T!j!!!!!!T!j!!!!!!T!j!!!!! !T!j!!!!!!T!j!!!!!!T!j!!!!!!T!j!!!!!!T!j!"'' 8v(.(`h`h`jx\N@2D^h lhlhlhhlhlhlhlhll hlhlhlhlhlhllhlhhllhhlhlhlhhlhlhhlhlhlhllhl''%&&&4&B&`&n&&&&&':'\'j'llllllllllllllllllll| >`r  >Hjx~",6H^dn >Pr8BHN ,24.>Dbl~$. 6 L R l  r l " 6 H R l ~   4 Z h 8J\j   " ,    lllllllllllhlhllllllllllllllllhhllhhlhllllhhlhlllhlhllhhlhhlhlhlhlhlllhllhllhlhhhlhllhlhlhll lhlhlhlhllhllhlhlhlllhlhlhlllhlhlhllhlhlhlhllhhlhlhhlhllhlhllhlllhllhlhllhllhlhlhlllllllhlhlhllhllllllllllllllllllllllhlhlhhhlllllhlhlhlhlhhllhlhlllhllllllhlhhlhlhlhhlllllll%%#X"(llhhhhhhhhhhhhhhhhhhhhhl~"4>>Tjp ",6@FLb B`rxb*06@JP^p~h~h"JT^hr| &0>LV\fp&,6@ (2@F\bhntzbllllllllllllllllllllllllllllhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhllhllhllhllllllllllllllllllllllllllllllllllllllllllllhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhlhllhlhllllhlllllllllhlhhlhllllllhlhhlhllhl hlhllllhllllhlhlhhlhllhhlhlhllhlhlhlhhlhlllhlllhlllllllllllhlhlhhlhlhlllhllhhlhhlllllhlhhllllllhllhhllllhllhlhhlllllhlhhhlllhlllhlllllhlhl hlhlhhlllhlhhllllhllhllhlhlhlllhllllllhlhlllll'':4h~(.hhhhh`hh`hh`hhfhhfhhfhhfhfhhfhhfhhfhhfhhlhlhfhhhfh%%#X"'%%%&&&4&B&`&n&&&&&':'\'j'!D!J 8 ! !!:!0!&hhhhhhhhhhhhfh`h"" ",6@FLb"h~".#R#X" ",6@FLb#B#H`hlh`lhhh  ~`b`h`#b"" T.$&(%%% ",6@FLbb*%z6@JP^p~!!!!!T!j!!!!!!T!j!!! !T!j!!!!!!T!j!!!!!!T!j!!!!!!T!j!!!!!!T!j%b%thhlhlhhlhlhhhhhlhhlhhhlhlhhlhlhhlhlhhlhhlhlhlhhlhhlhlhlhlhlhlhllhllhlhlhlhlhlhlhlhhlhlhlhlhlhlhlhhlhlhlhlhlhhlhlhhlhlhhhhfhhfhh`h`h h`h`h`hlhhlh`hh`hbstr-0.2.17/src/unicode/fsm/grapheme_break_fwd.littleendian.dfa000064400000000000000000000245350072674642500226530ustar 00000000000000rust-regex-automata-sparse-dfa#l  !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~#b$$&$r" `j4r""48#r"4"4"r" ~h  8 $&(  j4rvj&4r  @ N l %%#!!!!T!j!!!!!!T!j!!!!@!!T!j!!!!!!T!j!!!!!!T!j!!!!!!T!j!!!!!!T!j!!!!!!T!j!!!!!!T!j!!!!!!T!j!!!!!!T!j!!!!!@!T!j!!!!!!T!j!!!!!!T!j!!!!!!T!j!!!!!!T!j!!!!!!T!j!!!!!!T!j!!!!!!T!j!!!!!!T!j!!!!!! T!j!!!!!!T!j!!!!!!T!j!!!!!!T!j!!!!!!T!j!!"'' 8v(.(`h`h`jx\N@2D^h lhlhlhhlhlhlhlhll hlhlhlhlhlhllhlhhllhhlhlhlhhlhlhhlhlhlhllhl''%&&4&B&`&n&&&&&&:'\'j''llllllllllllllllllll| >`r >Hjx~",6H^dn >Pr8BHN ,24.>Dbl~$. 6 L R l r  l "6 H R l ~   4 Z h 8J\j  " ,     lllllllllllhlhllllllllllllllllhhllhhlhllllhhlhlllhlhllhhlhhlhlhlhlhlllhllhllhlhhhlhllhlhlhll lhlhlhlhllhllhlhlhlllhlhlhlllhlhlhllhlhlhlhllhhlhlhhlhllhlhllhlllhllhlhllhllhlhlhlllllllhlhlhllhllllllllllllllllllllllhlhlhhhlllllhlhlhlhlhhllhlhlllhllllllhlhhlhlhlhhlllllll%%X#"(llhhhhhhhhhhhhhhhhhhhhhl~"4>>Tjp ",6@FLb B`rxb*06@JP^p~h~h"JT^hr| &0>LV\fp&,6@ (2@F\bhntzbllllllllllllllllllllllllllllhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhllhllhllhllllllllllllllllllllllllllllllllllllllllllllhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhlhllhlhllllhlllllllllhlhhlhllllllhlhhlhllhl hlhllllhllllhlhlhhlhllhhlhlhllhlhlhlhhlhlllhlllhlllllllllllhlhlhhlhlhlllhllhhlhhlllllhlhhllllllhllhhllllhllhlhhlllllhlhhhlllhlllhlllllhlhl hlhlhhlllhlhhllllhllhllhlhlhlllhllllllhlhlllll'':4h~(.hhhhh`hh`hh`hhfhhfhhfhhfhfhhfhhfhhfhhfhhlhlhfhhhfh%%X#"'%%%&&4&B&`&n&&&&&&:'\'j''D!J! 8  !!:!0!&!hhhhhhhhhhhhfh`h"" ",6@FLb"h~".R#X#" ",6@FLbB#H#`hlh`lhhh  ~`b`h`b#"" T.$&(%%% ",6@FLbb*z%6@JP^p~!!!!T!j!!!!!!T!j!!!! T!j!!!!!!T!j!!!!!!T!j!!!!!!T!j!!!!!!T!j!b%t%hhlhlhhlhlhhhhhlhhlhhhlhlhhlhlhhlhlhhlhhlhlhlhhlhhlhlhlhlhlhlhllhllhlhlhlhlhlhlhlhhlhlhlhlhlhlhlhhlhlhlhlhlhhlhlhhlhlhhhhfhhfhh`h`h h`h`h`hlhhlh`hh`hbstr-0.2.17/src/unicode/fsm/grapheme_break_fwd.rs000064400000000000000000000023110072674642500200560ustar 00000000000000// DO NOT EDIT THIS FILE. IT WAS AUTOMATICALLY GENERATED BY: // // ucd-generate dfa --name GRAPHEME_BREAK_FWD --sparse --minimize --anchored --state-size 2 src/unicode/fsm/ [snip (arg too long)] // // ucd-generate 0.2.9 is available on crates.io. #[cfg(target_endian = "big")] lazy_static::lazy_static! { pub static ref GRAPHEME_BREAK_FWD: ::regex_automata::SparseDFA<&'static [u8], u16> = { #[repr(C)] struct Aligned { _align: [u8; 0], bytes: B, } static ALIGNED: &'static Aligned<[u8]> = &Aligned { _align: [], bytes: *include_bytes!("grapheme_break_fwd.bigendian.dfa"), }; unsafe { ::regex_automata::SparseDFA::from_bytes(&ALIGNED.bytes) } }; } #[cfg(target_endian = "little")] lazy_static::lazy_static! { pub static ref GRAPHEME_BREAK_FWD: ::regex_automata::SparseDFA<&'static [u8], u16> = { #[repr(C)] struct Aligned { _align: [u8; 0], bytes: B, } static ALIGNED: &'static Aligned<[u8]> = &Aligned { _align: [], bytes: *include_bytes!("grapheme_break_fwd.littleendian.dfa"), }; unsafe { ::regex_automata::SparseDFA::from_bytes(&ALIGNED.bytes) } }; } bstr-0.2.17/src/unicode/fsm/grapheme_break_rev.bigendian.dfa000064400000000000000000001512210072674642500221240ustar 00000000000000rust-regex-automata-sparse-dfah:  !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~qqqq,q:qHqRqXqbqlqRqvqRqqqqqqqq Xrrrrrr.rrrss ssrrrrrr.rrrs4:4D4N0R|̩*Pr檔ت>hh-r.8̡.:..x.lڥڦZڦȥڦާTftrrrrrrrrrrss ssA ~`8:99:;r?@*@ABdC2CDZDEF\GGHrI466f66f66f66f ɖ6f7(6X`:`:`:`8222ܯ232324B24B34345L45L5&f5L5&>6466f66f66f686f ɖ66f7(6fX`:`:`:`:`7(22ܯ23234B24B24B4345L45L5&5L5&46466f66f66f66f ɖ66f66f6fX`:`:`:`((22ܯ3,24B24B4345L45Ll5L5&46f ɖ66f66f6fX`:`:`:`-(22ܯ3,24B24B4345L45Ll5L466f66f66f6rɖ66f66f6fX`:`:`2δ22ܯ3,2324B24B43h5L445L5&l5&46466f66f66f6r ɖ66f66f6fX`:`:`1h22ܺR3,2324B24B43h55L45L5&ϖ5&46466f66f66f6r ɖ66f66f6fX`:`:`4hp22ܺR2324B24B43h55L45L5&ϖ5&46466f66f66f6r ɖ66f66f66fX`:`:`&δ22ܹ|3,24B24B43h>5L45Ll5L46fr ɖ66f66fX`:`:`%δ22ܺR3,24B24B24B43h5L45Ll5L46fɖ6f7(6fX`:`:`)(2Ȁ2ܺR324B44B43h4Ύ5L45Ll5L4>46f 6fr6f7(6fX`:`:`-(2Ȁ2ܹ|324B44B43h4Ύ5L45Llf5L46f 6fr6f7(6fX`:`:`:`:`,(2Ȁ2ܹ|324B44Bƾ@3h4Ύ5L45Ll5L46fr 6f6f7(6fX`:`:`:`+(2Ȁ2ܹ|3244B@ƾ3h45L45Ll5L46fr6fr6f66fX`:`:`:`-(2Ȁ2ܽڵ324B44Bƾ43F45L5&5Ll5L46fr 6fr6f66fX`:`:`:`/(2Ȁ2ܽڵ324B44B3445L5&5Lld5L446f6r6fr 6f66fX`:`:`:`3( 2Ȁ2ܽڵ324B44B434645L5&5Lld5L4646f6r6fr6f66fX`:`:`:`:`7( 2Ȁ2ܽڵ322324B44B34645&5L5&5L8ʚd5L46466fr 6f6r 6f66fX`:`:`:`:`*( 2Ȁ2ܽڵ324B44B3645&5Lʚd5L46fr 6f6f66fX`:`:`:`:`-2 ͞Ȁ2ܽڵ3224B44B3645&5Lʚd5L46fr 6f6f666fX`:`:`:`:`,2͞Ȁ2ܹ|24B24B44B345&45&5Lʚd5L46fr 6f6f66fX`:`:`:`:`/(2Ȁ2ܺR324B24B44B365&45&5Lʚdl5L>46fr 6f6f66fX`:`:`:`:`/(2Ȁ2ܺR3234B44B365L45&5Llʚl5L>46fr 6f6f66fX`:`:`X:`:`.(pȀ2ܺR32p234B44BR365L45&5Llʚl5L46f 6f6f66fX`:`:`:`1( 2Ȁ2ܺR32p2324B4$4B365L45&5Llʚl5L46f 6f6f̖6fX`:`:`:`0( 2Ȁ2ܺR3232p24B4$4B365L45&5Ll5L46fr 6f6f̖6fX`:`:`:`:`-߻p 2Ȁ2ܺR32J24B44B364456f5Llʚ5L46f 6f6ɖ6f66f`:`:`:`.߻p2Ȁ2ܺR32324B44BR3464456f5Ll5L4L46f 6f6ɖ6f66f`:`:`:`6߮2Ȁ0R3232324B44B3644565L5&lʚl5&46L46f66f66f6ɖ6f66f`:`:`:`5߻p2Ȁ2ܺR3232324B44B43ˀ44565L5&l5&46L˦6f66f66f6ɖ6f66f`:`:`:`1߻p2Ȁ2ܺR32324B44B4N˦4456f5Ll5L4L˦ 6fr 6f6ɖ6f6`:`:`:`2߮2Ȁ2ܯR32324B44B4Nˀ4456f5L4lʚlĶ4L˦ 6fr 6f6ɖ6f66f`:`:`5߻p2Ȁ2ܯR3232324B44B4Nˀ44Ü6f5LlʚlĶ4L 6fr 6f6ɖ6f66f`:`:`:`7߻p2ȀR3232324B44B4Nˀ44Ü6f5Llʚl5L4L 6fr 6f6ɖ6f66f`:`:`:`:`4$2Ȁ 3234B324B44BR3N44Ü6f5Llʚl5L4L 6f 6f6ɖ6f66f`:`:`:`:`7$2Ȁ3232324B24B44B3N64L4Ü6f5Lʚl5L4L 6fr6f6ɖ6f66f`:`:`:`92ȀR3232324B24B24B44B3N644Ü6f5Lʚl5L4L 6fr6f6ɖ6f66f`:`:`:`:`8>2ȀR32324B24B24B434B3N644Ü6f5Lʚl44L 6fr6f6ɖ6f6f66f`:`:`:`:8>2Ȁ|2(2324B24B434B3Nl4446f5Lʚl44L 6fr6f6ɖ6f6f66f`:`:`:`:6ɼ2Ȁ2з2(24B24B434B3N4446f5L5l44L 6fr 6f6ɖ6f66fX`:`:`:`:5ɼ2Ȁ2·2(24B24B434B3NĐ45L45L6f5L5l44L4L 6fr6f6ɖ6f66f`:`:`:7߷@2Ȁ2·32(24B2324B34B3NĐ45L45L6f5L5l44L4L6fr66f6ɖ6f66f`:`:`:=߶3ȀT23Z2324BƔ2323233$3ȪĐ45L4h6f5LÜDŽL4L6fr66f66f6R`:`:`:`:`:>߳ 32T23Z2324BƔ2323233$3bĐ45L4h6f5L5DŽL4L6fr6r6f666f66f7(6R`:`:`:`:;߳ 32T3Z32324BƔ23232334B@bĐ45L5&h6f5L5ÜDŽL4L6fr6fr6fǪ6f7(67`:`:`:`:A߳ 32T3,3Z32324BƔ232323434Bƾ@bĐ45L5&h6f45LŚ5ÜDŽL4L6fr6fr6fǪ66f6r6f7(67`:`:`:`:=߳ 32T@3,3Z3232Ɣ23232334Bƾ2Đ445&06f45LŚ5Ü46fr r6f666f6r6f7(67`:`:`:`<߳ 320@3,3Z32623232334B32Đ445&06f45LŚf5446fr6fr6f666f6r67(67`:`:`:`=߳ 320@3,3Z32j262324B2334B32Đ445&56f45LŚ5446fr6fr6f666f6r6f7(67`:`:`:`:߳ 322@3,3Z32j26232334B32Đ445&h6f45L5Ü5LL4 6f 6f666f66f67(67`:`:`:`:2322@3,3Z2j24B62323234B32Đ445&h6f45L5Ü5LL4r6f 6f666f66f7(67`:`:`:`:߮~320@3,3Z2j24B32323234B32Đ445&56f45L55LL4r6f 6f666f66f67(67`:`:`:`9߮~322@3,3Z2j24B32323234B32Đ445&6f45LĶ55&L4r6fr6f666f66f7(67`:`:`:`7߮~22ܯ3,3Z32323323234B34B32445&6f45L55&L4r6f66f66f66f7(67`:`:`:`9߰22ܯ3,3Z2323323234B34B324445&86f45LfÜ5&54r6f66f686f66f66f7(66f`:`:`:`3߯X22ܯ3,œ23323234B34B324445&5L45L5L55&L4r6fr6f66f7(6f`:`:`:`:;߮~2223,3Z3232332323234B344B324445&85L45&f5L55&5464686f66f66f66f67(6f7V`:`:`=22223,3Z232332323232344B324l44445&5L45L5n555&5464686f66f6866f66f767(6f7V`:`:`:` pp$qp$q^p$``pp$qp$``pp$^p$`` pp$qp$q^p$J`` pp$qp$q^p$`` pp$p$`` pp$qp$qp$``pp$qp$``pp$p$`` pp$qp$qp$``` pp$p$``` pp$qp$``` pp$p$:`J`pp$qp$``pp$p$`` pp$qp$`:` pp$qp$:`:` pp$qp$:``pp$qp$`` pqp$qp$qp$:`` pp$qp$`:`` pp$qp$:`:`pp$p$:``pp$p$:`` pp$qp$qp$:``pp$qp$`` pp$qp$:`` pp$qp$`:`` pp$qp$`:`` pp$qp$``X pp$qp$qp$:``pp$p$``1(2p22ܸ23232324B434L445Lpl5L5&>46fr6f ɖ6f6fX`:`:`:`:`7z2l.k渦23,lzkl.kl.kl.klzkllllllzmfmnĮmm,"mhnhno6oo6no|Vooo`:`:`4z2l.k渦2ֵklzkl.kl.kl.klzklllzmfmnmmmH"mmnhno6no6|Vooo`:`:`:`2z2k渦2ֵk"kl.kl.klklzklllzλmfmnĮmmH"mnno6no6|Vooo`:`:`:`/߷2k2ܺRk"kl.kl.kl.klzklllzmfmnĮmmHmnmfo6no6|Vooo`:`:`:`4߷2k2ܺRk"kl.kl.klzklllzmfmmf.mm԰6,m԰hnmfo6oo6ono|Vooo`:`:`:`:`0߷2k2ܸklzkl.klzklllzmfhmmf.m,mmԽPmfoo|Voo`:`:`:`:`3߷2k2ܸklzkl.klkllzλmfhmmfm,mPmfnoo6oo6oo6o|Vo,oo6`:`:`:`7k殬k2ܯklzkl.klkllzllzPmfmmfrmm԰6"mmԽPnmfnoo6oo6ono|Vo6o,oo`:`:`:`:`6߷讬k2ܯklzkl.lklklllzPmfmmfrmm԰6"mmmfnmfnoo6oo6oo6o|Vo6oo6o,o`:`:`:`'߷k2ܯ3,klklllzPmfmmfrm6,mmmfo|Vo6oo6o,o`:`:`:`,߷讬k2ܯ3,klklllzPmfmmfrm6, mmfnoo6oo6oo6Vo6oo6o,o`:`:`1߾k2ܯ3,kl.klklllz(mmfmmmԸ," mmfnmfnoo6oo6oo6|Vo6oo6o,o`:`:`0߿®k2ܺR3,kl.klklllznNmmmmԿ@ mmfnmfnoo6oo6oo6|Vo6oo6o,o`:`:`3߿®k2ܺRkl.klklllznNmmmmԿ@ mmfnmfnoo6oo6oo6|Vo6oo6o,o6o`:`:`%߾k2ܹ|3,klklllzmmm@, mmfo|Vo6o,o6o`:`:`$߾k2ܺR3,klklklllzmmm@, mmfoVonoo`:`:`(߷讬k2ܺRlzklllllzmfmmm, mmfPmfo|oo,oo`:`:`,߷讬k2ܹ|lzklllllzmfmmm ," mmfo|oooo`:`:`:`:`+߷讬k2ܹ|lzklll𴰵2lzmfmmm ,6 mmfo|oooo`:`:`:`*߷讬k2ܹ|lzkll2lzmf.mmm ,6 mmfoooo6o`:`:`:`,߷讬k2ܽڵlzklllllzmf.mmm ,6 mmfo|ooo6o`:`:`:`.߷讬k2ܽڵlzkllllzlʱ>mf.mmm ,v mmfXmfoo6o|oo6o`:`:`:`2߷ k2ܽڵlzklllllzlʱ>nmf.mmm ,v mmfnmfoo6ooo6o`:`:`:`:`6߷ k2ܽڵlzkkl.kllllzlʱ>nmfmmmmv mmfnmfno|oo6|oo6o`:`:`:`:`)߷ k2ܽڵlzkllllz>nmfmmv mmfo|ooo6o`:`:`:`:`,k 2ܽڵlzkkllllz>nmfmmv mmfo|ooo6o6o`:`:`:`:`+k殬2ܹ|klkllllz>mfmmfmm vmmfo|ooo6o`:`:`:`:`.߷讬k2ܺRlzklkllllz>nmmfmm v,mPmfo|ooo6o`:`:`:`:`-߷讬k2ܺRlzkl.llllz>nmmfmm ,,mPmfo|ooo6o`:`:`:`:`-߷讬2ܺRlzk漂kl.lllBlz&nmmfmm ,,mmfo|ooo6o`:`:`:`0߷ k2ܺRlzk漂kl.kllʶnmmfmm ,,mmfo|ooo`:`:`:`/߷ k2ܺRlzkl.k漂kllʶnmmfmm ,mmfo|ooo`:`:`:`:`-߻p6k2ܺRlzk\kllllz>nmfmnNom ,mmfo޲|ooVoo6o`:`:`:`.߻pnk2ܺRlzkl.klllBlzlʱ>nmfmnNom,mmfhmfo޲|ooVoo6o`:`:`:`6߮nk²0Rlzkl.kl.kllllz>nmfmnNo6mmԸ,,mmfnİhmfoo6޼oo6ooVoo6o`:`:`:`5߻pnk2ܺRlzkl.kl.klllllz>mfmnNo6mmԸ,mmfnİhoo6޼oo6ooVoo6o`:`:`:`1߻pnk2ܺRlzkl.kllllʸ>mfmnNom ,mmfh|o|,ooVoo6`:`:`:`2߮nk2ܯRlzkl.kllllʸ>mfmnNomm,,mfh|o|,ooVoo6o`:`:`5߻pnk2ܯRlzkl.kl.kllllʸ>mfmom,,mfh(|o|,ooVoo6o`:`:`:`7߻pnk¸Rlzkl.kl.kllllʸ>mfmom,,mmfh(|o|,ooVoo6o`:`:`:`:`4ߺзnk¸ lzkl.ll.klllBlz>mfmom,,mmfh(|o|ooVoo6o`:`:`:`:`7ߺзnk¸lzkl.kl.klkllllz>nmfhmom,mmfh(|oooVoo6o`:`:`:`9ߺtnk¸Rlzkl.kl.klklkllllz>nmfmom,mmfh(|oooVoo6o`:`:`:`:`8߹Rnk¸Rlzkl.klklklllllz>nmfmom,mmfh|oooVo,oo6o`:`:`:`:8߹Rnk¸|¶kkl.klklllllz>mfmmom,mmfh(|oooVo,oo6o`:`:`:`:5߸|nk¸2з¶kklklllllz>mfmmomnN,mmfh(|o|ooVoo6o`:`:`:`:5߸|nk¸2·¶kklklllllz>mfmmmomnN,mmfhmfh|oooVoo6o`:`:`:7߷@nk·2·l.kklkl.kllllz>mfmmmomnN,mmfhmfhoo\ooVoo6o`:`:`:=߶l.´T2lkl.klkl.kl.kllmfmmomֵvhmfhoo\oooo6j`:`:`:`:`:>߳ l.kT2lkl.klkl.kl.kllmfmmomnNֵvhmfhoo\ooo6oo6ooo6j`:`:`:`:;߳ l.kTll.kl.klkl.kl.klll2T>mfmmԲomnNֵvhmfhoooooo6o`:`:`:`:A߳ l.kT3,ll.kl.klkl.kl.kllll𴰵2T>mfmmԲommZnNֵvhmfhoooo6oo6ooo6o`:`:`:`:=߳ l.kT3,ll.kl.k洆kl.kl.klllkα>mfmmԳommZnNֳmfo|ooo6oo6ooo6o`:`:`:`<߳ l.k03,ll.k沦kl.kl.kllllzkα>mfmmԳommZ&nNmmfooooo6oo6o6oo6o`:`:`:`=߳ l.k03,ll.k汸k沦kl.klkllllzkα>mfmmnNommZnNmmfooooo6oo6ooo6o`:`:`:`:߳ l.k2ܱ3,ll.k汸k沦kl.kllllzkα>mfmmԲomm nNmhmf|o|ooo6oo6oo6oo6o`:`:`:`:2l.k2ܱ3,lk汸klkl.kl.klllzkα>mfmmԲommVnNmhmfo|ooo6oo6ooo6o`:`:`:`:߮~l.k03,lk汸kll.kl.kl.klllzkα>mfmmnNommVnNmhmfo|ooo6oo6oo6oo6o`:`:`:`9߮~l.k2ܱ3,lk汸kll.kl.kl.klllzkα>mfmmԱommnNm԰hmfooo\o6oo6ooo6o`:`:`:`7߮~k2ܯ3,llzkl.klXl.kl.kl.llllzkα>mfmmԱdomm6nNm԰hmfoo6oooo6ooo6o`:`:`:`9߰k2ܯ3,lkl.klXl.kl.kl.llllzkmDmfmmԮomm"mnmfoo6onoooo6ooo6o`:`:`:`3߯Xk2ܯ3,klXl.kl.kl.llllzkmDmfmm԰ mmm6mnNm԰hmfoooooo`:`:`:`:;߮~k223,ll.kl.klXl.kl.klzkl.lllllzkmDmfmmԮmmmԯ"mnxmnmfnmfnoo6oo6oooo6oop`:`:`=2k223,lkl.klXl.kl.klzkl.kllllzkmmDmfmmmmmmnnNnxmnmfnmfnoo6ono\ooooo6oop`:`:`:`pp$qp*p$`` pp$p*p$`` pp$qp$qp*p$``pp$qp*p$``pp$p*p$`` pp$qp$qp*p$``` pp$p*p$``` pp$qp*p$``` pp$p*p$:`J`pp$qp*p$``pp$p*p$`` pp$qp*p$`:` pp$qp*p$:`:` pp$qp*p$:``pp$qp*p$`` pqp$qp$qp*p$:`` pp$qp*p$`:`` pp$qp*p$:`:`pp$p*p$:``pp$p*p$:`` pp$qp$qp*p$:``pp$qp*p$`` pp$qp*p$:`` pp$qp*p$`:`` pp$qp*p$`:``pp$qp*p$`` pp$qp$qp*p$:``pp$p*p$`````5(2p22ܺR2О23232324B434445L5&l5&L5466f66f686f ɖ6f7(6fX`:`:`:`:`q`qq`qq`qq`qqqqqqqqqqq`q`qqqqqqqqqqqqqq``:``0(222ܺR2О2323232324B43446845&5&546686 ɖ6f7(6fX`:`:`:``q`q`q`qqqsqq``qqqqs"@t$tutv"vw>wxRxyryznz{~|(|}}~~,Ɓpx.BZT:썚DޏJt6씦LZT*ݜКККښښ蚦6ƚ⚦6,,К,s"s")ݞКК蚦6,,v ,6,,<,,s"s"s"+ݜpКښz6,ƛ,v ,6,<,s"s"s"$ݜpКК蚦<,b,6<,s"s"s""ݜpКƝ֚,,6,<,s"s"s"s"ݜƝ֚b⚦,蚦Кs"s"s"s"%ݜКzXb⚦蚦,,,s"s"s"$ݚ蚦F⚦,蚦,К,,s"s"s"s"#ݜ蚦F ,,,,s"s"s"ݜ8⚦,,s"s"s" ݜ8К,,,s"s"!ٜƚ К,,,s"s""ٞƚ&К,,,,s"s"*ٜ, Л,66,,,,,,s"s"ٜƚК,,s"s"ٜƚКs"s"ٜJƚК蚦Лs"s" ܜJКЛs"s"s"s" ٜJ,⛂КЛs"s"s"ٜJ֚,⛂КК,s"s"s"!ٜJ֚,М⛂КЛ,s"s"s"$ٜJ蚦֚,М⛂К䚦,ЛЛ,s"s"s"&ۜJ蚦6֚,М❢К6,Л,s"s"s"s"*ۜJ̚К蚦6,,<4⚦66Л,Л,s"s"s"s"ۜ2J6,4КЛ,s"s"s"s"#ۚ2J̚6,❢КЛ,,s"s"s"s" ۚJ,,М❢Л,s"s"s"s"#ۜJ6,,Н4⚦蚦Л,s"s"s"s"!ۜJК6,4⚦蚦Л,s"s"s"s""ٜJКp6,4⚦,s"s"s"%ٜ2JКš6,4⚦s"s"s"#ۜ2JКš6,⚦Лs"s"s"s"!۝b2J6М4ЛT,s"s"s"۝bJ蚦6⚦КT,s"s"s"'ۚJ6ƛ,,4,6,Л,T,s"s"s"(۝bJК蚦6,,,6,Л,T,s"s"s"!۝bJК蚦⚦ЛT,s"s"s"&֚JК蚦*Ɯ4zЛT,s"s"%ޝbJКК蚦*ƚ4ЛT,s"s"s"(ޝbJКК蚦64⚦ЛT,s"s"s"s"&ޝbJККp4⚦T,s"s"s"s"(ޝbJКК6ƚ4⚦T,s"s"s"*ޝBJTКК6ƚ4⚦КT,s"s"s"s",ߝJКښ6ƚ4⚦КT,s"s"s"s"+ߝJК*ƚ4ƛКT,s"s"s"s"*ߜJښ6ƚƛЛT,s"s"s"s"&ߜJ⚦КT,s"s"s"+ߜJКК⚦⚦ЛJT,s"s"s"/ߜКJККڜš蚦К,fЛJT,s"s"s"s"s",ߚ2Кڜš,fJКT,,,s"s"s"s"+ߜ2ККښ,fККT,s"s"s"s"2ߜ2ККККښښ,ƚfККTК,s"s"s"s"0ܜ2КККККڛښ,\ККT,,К,s"s"s"-ܜ2К@ККڛښ,Jt\КККT,,,s"s"s".ܜ2К@ККڛ,ƚJƜ\fККT,,,s"s"s")ܜ2ККڛښ,ƚЛT,,,s"s"s"+ܚККڛ,ƚ(КT,,,s"s"s",ܛpККڛ,ƚКT,,,s"s"s"+ܛpКККڛ,,ККJ,,,s"s"s"&ܚКʚК,,К,T,,s"s"s"-ܛККʚКК,<,К,ҫ` ثҫثҫ` ҫثҫ4ҫثҫҫثҫҫثҬ Ҭ ҫثҬ ҫثҬ*ܫҫثҬ`۫ҫثҬ`ҫثҬ ҫثҬ ҫثҫҫثҫҫثҫ ҫثҫҫثҫ Ҭ ҫثҫ ҫثҫJJqq``qq823223,323232323234B34B434˦46844l5LL5L666f6686f ɖ6f7(6fX`:`:`qXXqqƭXXqq`:E  ~XZX`XLqp07t@t* & b  x.Bh&hN|b D!&""#$%&'()*v+`,J-4../01 pp$qp$q^p$:``pp$q^p$`` pp$p*p$:`J` pp$qp$qp*p$:`` pp$qp$qp$qp*p$:`` pp$qp$q^p$:``` pp$qp$^p$J`` pqp$p*p$`` pp$p*p$``:` pp$qp$qp*p$`` pp$qp$qp$qp*p$`` pp$p*p$`:`` pp$qp$qp*p$`` pp$qp$q^p$:`` pp$qp*p$`:`:` pp$qp*p$:`` pp$qp*p$`` pp$qp$p*p$:`` pp$*p$^p$J`` pqp$qp*p$`` pp$qp$p*p$`` pqp$qp*p$``` pp$^p$`` pqp$qp*p$`` pp$qp*p$`:`` pp$qp$*p$qp*p$`` pp$qp$qp*p$`:`` pp$q^p$:`` pp$qp$^p$`` pp$qp$qp$qp*p$`:` pp$qp$p*p$`:`` pp$qp$qp*p$`:`` pp$qp$qp*p$`:`` pp$qp$qp*p$:`` pp$qp$qp$^p$`` pp$*p$qp*p$`` pp$p*p$:`` pp$qp$qp*p$`` pp$qp$*p$^p$J``pp$p*p$:``pp$p*p$`J` pp$qp$p*p$`` pp$qp*p$:`` pp$q^p$`` pp$qp$qp*p$`:` pp$qp$p*p$`` pp$qp*p$``:` pp$qp$qp*p$`` pp$q^p$:``` pp$qp*p$`:`` pp$p*p$``J` pp$qp$qp*p$`` pp$q^p$:`:`` pp$q^p$``` pp$qp$q^p$`` pp$^p$`` pp$qqp*p$``pp$p*p$`` pp$qp$qp*p$`` pp$qp*p$``: pp$q^p$::`` pp$qp$^p$`` pp$qp$q^p$`` pp$qp$p*p$:`` pp$p*p$`:`:` pp$q^p$`:`` pp$q^p$J`` pp$qp$^p$`` pp$qp$p*p$:`:` pp$qp$qp*p$`:`` pqp$qp*p$``pp$^p$J`` pp$qq^p$`:`` pp$qp$^p$`` pp$qq^p$``` pp$^p$``pp$^p$`` pp$qp$p*p$``` pp$qq^p$`` pp$p*p$:`:`pp$p*p$`:` pp$qp$*p$qp*p$`` pp$qp*p$:`` pp$q^p$:`` pqp$qp*p$`` pp$qp*p$:`` pp$qp*p$:``:pp$p*p$``: pp$qp$qp$qp*p$`` pp$qp*p$:`` pp$p*p$```pp$qp$qp$qp*p$`:`` pp$qp*p$`:`` pp$q^p$J`` pp$qp*p$``` pp$qqp*p$:`` pp$p*p$:```pp$p*p$`` pp$p*p$:`` pp$qqp*p$`` pp$qqp*p$``` pp$q^p$`` pqp$qp$qp*p$`` pqp$qp$qp*p$`` pp$qp*p$``` pp$qqp*p$`:`` pp$qp$qp*p$:`:` pp$qp$qp*p$:``pp$p*p$:`` pp$qqp*p$:`:` pp$q^p$``` pp$qp$p*p$:`` pqp$qp$qp*p$`` pp$qp$p*p$`` pqp$qp$qp*p$:`` pp$qp$qqp*p$`` pp$qp$qp*p$:`` pp$qp$qp$^p$J```X pp$p$:`J` pp$qp$qp$:`` pp$qp$qp$qp$:`` pqp$p$`` pp$p$``:` pp$qp$qp$`` pp$qp$qp$qp$`` pp$p$`:`` pp$qp$qp$`` pp$qp$`:`:` pp$qp$:`` pp$qp$`` pp$qp$p$:`` pp$p$^p$J`` pqp$qp$`` pp$qp$p$`` pqp$qp$``X` pqp$qp$`` pp$qp$`:`` pp$qp$p$qp$`` pp$qp$qp$`:`` pp$qp$qp$qp$`:` pp$qp$p$`:`` pp$qp$qp$`:`` pp$qp$qp$`:`` pp$qp$qp$:`` pp$p$qp$`` pp$p$:`` pp$qp$qp$`` pp$qp$p$^p$J``pp$p$:``pp$p$`J` pp$qp$p$`` pp$qp$:`` pp$qp$qp$`:` pp$qp$p$`` pp$qp$``:` pp$qp$qp$``X pp$qp$`:`` pp$p$``J` pp$qp$qp$`` pp$q^p$`X`` pp$qqp$``pp$p$`` pp$qp$qp$`` pp$qp$``: pp$q^p$:X:`` pp$qp$p$:`` pp$p$`:`:` pp$q^p$`X:`` pp$qp$p$:`:` pp$qp$qp$`:`` pqp$qp$`` pp$qq^p$`X:`` pp$qq^p$`X`` pp$qp$p$``` pp$p$:`:`pp$p$`:` pp$qp$p$qp$`` pp$qp$:`` pp$q^p$:X`` pqp$qp$`` pp$qp$:`` pp$qp$:``:pp$p$``: pp$qp$qp$qp$`` pp$qp$:`` pp$p$```pp$qp$qp$qp$`:`` pp$qp$`:`` pp$qp$``` pp$qqp$:`` pp$p$:```pp$p$`` pp$p$:`` pp$qqp$`` pp$qqp$`X`` pqp$qp$qp$`` pqp$qp$qp$`` pp$qp$`X`` pp$qqp$`:`` pp$qp$qp$:`:` pp$qp$qp$:``pp$p$:`` pp$qqp$:`:` pp$qp$p$:`` pqp$qp$qp$`` pp$qp$p$`` pqp$qp$qp$:`` pp$qp$qqp$`` pp$qp$qp$:``bstr-0.2.17/src/unicode/fsm/grapheme_break_rev.littleendian.dfa000064400000000000000000001512210072674642500226600ustar 00000000000000rust-regex-automata-sparse-dfah:  !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~qqq,q:qHqRqXqbqlqRqvqRqqqqqqqqq Xrrrrrr.rrrs sssrrrrrr.rrrs4:4D4N0R|̩*Pr橔ت>hh-r.8̡.:..x.lڥڥZڥȦڥަTftrrrrrrrrrrs sssA ~`:899:r;D<==>?*@@AdB2CCZDDE\FGGrH͸46f66f66f66f6 Ŗf6(76X`:`:`:`8ߺ222µ23232B42B4343ϸ4L54L5&5fL5&5>646f66f66f686f6 ŖɈ6f6(7f6X`:`:`:`:`7(ɬ22µ2323B42B42B443ϸ4L54L5&5L5&54646f66f66f66f6 ŖɈ6f66f6f6X`:`:`:`((ɠ22,32B42B443ϸ4L54L5lL5&54f6 ŖɈ6f66f6f6X`:`:`:`-(ɬ22,32B42B443ϸ4L54L5lL546f66f66f66r˖Ɉ6f66f6f6X`:`:`2ߴά22,3232B42B443hL544L5&5l&54646f66f66f66r ŖɈ6f66f6f6X`:`:`1hϬ22R,3232B42B443hΠ5L54L5&5Ζ&54646f66f66f66r ŖɈ6f66f6f6X`:`:`4hϬp̺22Rµ232B42B443hΠ5L54L5&5Ζ&54646f66f66f66r ŖɈ6f66f6ʈ6f6X`:`:`&ߴά22|,32B42B443h>L54L5lL54f6r ŖɈ6f6ʈ6f6X`:`:`%ߴά22R,32B42B42B443hL54L5lL54f6˖f6(7f6X`:`:`)(ɬ22Rµ32B44B443hθ4L54L5lL54>͸4f6 f6rf6(7f6X`:`:`-(ɬ22|µ32B44B443hθ4L54L5lfL54f6 f6rf6(7f6X`:`:`:`:`,(ɬ22|µ32B44B4@3hθ4L54L5lL54f6r f6f6(7f6X`:`:`:`+(ɬ22|µ324B4@Ǿ3hθ4L54L5lL54f6rf6rf6Ȉ6f6X`:`:`:`-(ɬ22ڽµ32B44B443Fθ4L5&5L5lL54f6r f6rf6Ȉ6f6X`:`:`:`/(ɬ22ڽµ32B44B434ø4L5&5L5ldL54θ4f66rf6r f6Ȉ6f6X`:`:`:`3( 22ڽµ32B44B443464L5&5L5ldL5464f66rf6rf6Ȉ6f6X`:`:`:`:`7( 22ڽµ32ͺ232B44B43464&5L5&5L58šdL54646f6r f66r f6Ȉ6f6X`:`:`:`:`*( 22ڽµ32B44B4364&5L5dL54f6r f6f66f6X`:`:`:`:`-ߺ2 ̀2ڽµ32ͺ2B44B4364&5L5dL54f6r f6f66Ȉ6f6X`:`:`:`:`,ߺ2̀2|µ2B42B44B43ø4&54&5L5šdL54f6r f6f6Ȉ6f6X`:`:`:`:`/(ɬ22Rµ32B42B44B436&54&5L5šdlL5>͸4f6r f6f6Ȉ6f6X`:`:`:`:`/(ɬ22Rµ323B44B436L54&5L5lɚlL5>͸4f6r f6f6Ȉ6f6X`:`:`X:`:`.(ɬp̀2Rµ32p̺23B44B4R36L54&5L5lɚlL54f6 f6f6Ȉ6f6X`:`:`:`1( 22Rµ32p̺232B44$B436L54&5L5lɚlL5̸4f6 f6f6Ȗf6X`:`:`:`0( 22Rµ3232p̺2B44$B436L54&5L5lL54f6r f6f6Ȗf6X`:`:`:`:`-p ̺22Rµ32J̺2B44B436445f6L5lɚL54f6 f66f6Ȉ6f6`:`:`:`.pȺ22Rµ3232B44B4R346445f6L5lL54Lø4f6 f66f6Ȉ6f6`:`:`:`6߬Ⱥ20Rµ323232B44B4364456L5&5lɚl&546Lø4f66f66f66f6Ȉ6f6`:`:`:`5pȺ22Rµ323232B44B443À˸4456L5&5l&546Læf66f66f66f6Ȉ6f6`:`:`:`1pȺ22Rµ3232B44B44Næ˸445f6L5lL54Læ f6r f66f6Ȉ6`:`:`:`2߬Ⱥ22Rµ3232B44B44NÀ˸445f6L54lɚlɶĸ4Læ f6r f66f6Ȉ6f6`:`:`5pȺ22R·323232B44B44NÀ˸44f6L5lɚlɶĸ4L f6r f66f6Ȉ6f6`:`:`:`7pȺ2ȦR·323232B44B44NÀ˸44f6L5lɚlL54L f6r f66f6Ȉ6f6`:`:`:`:`4$Ⱥ2Ȧ ·323B432B44B4R3Nø44f6L5lɚlL54L f6 f66f6Ȉ6f6`:`:`:`:`7$Ⱥ2Ȧ·323232B42B44B43N64L4f6L5ǚlL54L f6rf66f6Ȉ6f6`:`:`:`9Ⱥ2ȦR·323232B42B42B44B43N644f6L5ǚlL54L f6rf66f6Ȉ6f6`:`:`:`:`8>Ⱥ2ȦR·3232B42B42B443B43N644f6L5ǚl44L f6rf66f6f6Ȉ6f6`:`:`:`:8>Ⱥ2Ȧ|·Ǻ2(ɺ232B42B443B43Nlʸ444f6L5ǚl44L f6rf66f6f6Ȉ6f6`:`:`:`:6߼Ⱥ2Ȧ2и·Ǻ2(ɺ2B42B443B43Nɸ444f6L5Ǡ5l44L f6r f66f6Ȉ6f6X`:`:`:`:5߼Ⱥ2Ȧ2µ·Ǻ2(ɺ2B42B443B43NÐĸ4L54L5f6L5Ǡ5l44Lø4L f6rf66f6Ȉ6f6`:`:`:7@Ⱥ2Ȕ2µ·32(ɺ2B4232B43B43NÐĸ4L54L5f6L5Ǡ5l44Lø4Lf6rî6f66f6Ȉ6f6`:`:`:=ߔ3T2µZ3Ǻ232B4ƺ2323233$3Ðĸ4L54hf6L5ÜƄLø4Lf6rî6f66f6Ȉ6R`:`:`:`:`:> 32T2µZ3Ǻ232B4ƺ2323233$3bÐĸ4L54hf6L5à5ƄLø4Lf6rî6rf666f66f6(76R`:`:`:`:; 32TµZ33232B4ƺ2323233B4@bÐĸ4L5&5hf6L5Ǡ5ƄLø4Lf6rf6rf6f6(767`:`:`:`:A 32T,3Z33232B4ƺ23232343B4@bÐĸ4L5&5hf64L5Š5ƄLø4Lf6rf6rf6Lj6f66rf6(767`:`:`:`:= 32T@,3Z33232ƺ2323233B4ƺ2Ðĸ44&50f64L5Š5Ÿ4f6r rf666f66rf6(767`:`:`:`< 320@,3Z3326ź2323233B432Ðĸ44&50f64L5fƠ54Ÿ4f6rf6rf666f66rÈ6(767`:`:`:`= 320@,3Z332jĺ26ź232B4233B432Ðĸ44&55f64L5Š54Ÿ4f6rf6rf666f66rf6(767`:`:`:`: 322@,3Z332jĺ26ź23233B432Ðĸ44&5hf64L5 5L5Lø4 f6 f666f66f66(767`:`:`:`:ߐ2322@,3Z32jĺ2B46ź232323B432Ðĸ44&5hf64L5Ġ5L5Lø4rf6 f666f66f6(767`:`:`:`:~320@,3Z32jĺ2B43232323B432Ðĸ44&55f64L5Ġ5L5Lø4rf6 f666f66f66(767`:`:`:`9~322@,3Z32jĺ2B43232323B432Ðĸ44&5f64L5Ġ5&5Lø4rf6rf666f66f6(767`:`:`:`7~22,3Z33232332323B43B432ø44&5f64L5à5&5Lø4rf66f66f66f6(767`:`:`:`9߸22,3Z3232332323B43B432–444&58f64L5fœ&554rf66f686f66f66f6(76f6`:`:`:`3X22,3º2332323B43B432–444&5L54L5L55&5Lø4rf6rf66f6(7f6`:`:`:`:;~222,3Z3323233232323B434B432–444&58L54&5fL55&5546486f66f66f66f66(7f6V7`:`:`=ߐ2222,3Z323233232323234B432l44444&5L54L5n555&5546486f66f6866f66f676(7f6V7`:`:`:` p$pq$pq^$p``p$pq$p``p$p^$p`` p$pq$pq^$pJ`` p$pq$pq^$p`` p$p$p`` p$pq$pq$p``p$pq$p``p$p$p`` p$pq$pq$p``` p$p$p``` p$pq$p``` p$p$p:`J`p$pq$p``p$p$p`` p$pq$p`:` p$pq$p:`:` p$pq$p:``p$pq$p`` pq$pq$pq$p:`` p$pq$p`:`` p$pq$p:`:`p$p$p:``p$p$p:`` p$pq$pq$p:``p$pq$p`` p$pq$p:`` p$pq$p`:`` p$pq$p`:`` p$pq$p``X p$pq$pq$p:``p$p$p``1(ɐ2p̺22µ2323232B443¸4L44L5plL5&5>͸4f6rf6 Ŗf6f6X`:`:`:`:`7z2.lk2,3zlk.lk.lk.lkzlklllllzlήfmmnmm,"mhnhn6oo6ono|Vooo`:`:`4z2.lk2µkzlk.lk.lk.lkzlkllzlήfmmnmmmH"mmnhn6on6o|Vooo`:`:`:`2z2k2µk"k.lk.lklkzlkllzlήfmmnmmH"mnn6on6o|Vooo`:`:`:`/跐2k2Rµk"k.lk.lk.lkzlkllzlήfmmnmmHmnfm6on6o|Vooo`:`:`:`4跐2k2Rµk"k.lk.lkzlkllzlήfmmfm.mm6,mhnfm6oo6oono|Vooo`:`:`:`:`0跐2k2µkzlk.lkzlkllzlήfmhmfm.m,mmPfmoo|Voo`:`:`:`:`3跐2k2µkzlk.lklklzlήfmhmfmm,mPfmno6oo6oo6oo|Vo,o6o`:`:`:`7kk2µkzlk.lklklzllzlPfmmfmrmm6"mmPnfmno6oo6oono|V6oo,oo`:`:`:`:`6跬k2µkzlk.llklkllzlPfmmfmrmm6"mmfmnfmno6oo6oo6oo|V6oo6oo,o`:`:`:`'跠k2,3klkllzlPfmmfmrm6,mmfmo|V6oo6oo,o`:`:`:`,跬k2,3klkllzlPfmmfmrm6, mfmno6oo6oo6oV6oo6oo,o`:`:`1𾬮k2,3k.lklkllzl(mfmmmm," mfmnfmno6oo6oo6o|V6oo6oo,o`:`:`0¿k2R,3k.lklkllzlNnmmmm@ mfmnfmno6oo6oo6o|V6oo6oo,o`:`:`3¿k2Rµk.lklkllzlNnmmmm@ mfmnfmno6oo6oo6o|V6oo6oo,6oo`:`:`%𾬮k2|,3klkllzlmmm@, mfmo|V6oo,6oo`:`:`$𾬮k2R,3klklkllzlmmm@, mfmoVonoo`:`:`(跬k¶2Rµzlkllllzlfmʾmmm, mfmPfmo|oo,oo`:`:`,跬k¶2|µzlkllllzlfmʾmmm ," mfmo|oooo`:`:`:`:`+跬k¶2|µzlklll2zlfmʾmmm ,6 mfmo|oooo`:`:`:`*跬k¶2|µzlkll2zlfm.mmm ,6 mfmooo6oo`:`:`:`,跬k¶2ڽµzlkllllzlfm.mmm ,6 mfmo|oo6oo`:`:`:`.跬k¶2ڽµzlklllzll>fm.mmm ,v mfmXfmo6oo|o6oo`:`:`:`2 k¶2ڽµzlkllllzll>nfm.mmm ,v mfmnfmo6ooo6oo`:`:`:`:`6 k¶2ڽµzlkk.lklllzll>nfmmmmmv mfmnfmno|o6o|o6oo`:`:`:`:`) k¶2ڽµzlklllzl>nfmmmv mfmo|oo6oo`:`:`:`:`,k ¶2ڽµzlkklllzl>nfmmmv mfmo|oo6o6oo`:`:`:`:`+k¶2|µklklllzl>fmmfmmm vmfmo|oo6oo`:`:`:`:`.跬k¶2Rµzlklklllzl>nmfmmm v,mPfmo|oo6oo`:`:`:`:`-跬k¶2Rµzlk.llllzl>nmfmmm ,,mPfmo|oo6oo`:`:`:`:`-跬¶2Rµzlkk.llllBzl&nmfmmm ,,mfmo|oo6oo`:`:`:`0 k¶2Rµzlkk.lkllnmfmmm ,,mҼfmo|ooo`:`:`:`/ k¶2Rµzlk.lkkllnmfmmm ,mfmo|ooo`:`:`:`:`-p6k¶2Rµzlk\klllzl>nfmmNnom ,mfmo޻|ooVo6oo`:`:`:`.pnk¶2Rµzlk.lklllBzll>nfmmNnom,mfmhfmo޻|ooVo6oo`:`:`:`6߬nk¶0Rµzlk.lk.lklllzl>nfmmNn6omm,,mfmnhfmo6o޻o6oooVo6oo`:`:`:`5pnk¶2Rµzlk.lk.lkllllzl>fmmNn6omm,mfmnho6o޻o6oooVo6oo`:`:`:`1pnk¶2Rµzlk.lkllll>fmmNnom ,mfmh|o|,ooVo6o`:`:`:`2߬nk¶2Rµzlk.lkllll>fmmNnomm,,fmh|o|,ooVo6oo`:`:`5pnk¶2R·zlk.lk.lkllll>fmmom,,fmh(|o|,ooVo6oo`:`:`:`7pnk¶R·zlk.lk.lkllll>fmmom,,mfmh(|o|,ooVo6oo`:`:`:`:`4кnk¶ ·zlk.ll.lklllBzl>fmmom,,mfmh(|o|ooVo6oo`:`:`:`:`7кnk¶·zlk.lk.lklklllzl>nfmhmom,mfmh(|oooVo6oo`:`:`:`9tnk¶R·zlk.lk.lklklklllzl>nfmmom,mfmh(|oooVo6oo`:`:`:`:`8Rnk¶R·zlk.lklklkllllzl>nfmmom,mfmh|oooVo,o6oo`:`:`:`:8Rnk¶|·kk.lklkllllzl>̹fmmmom,mfmh(|oooVo,o6oo`:`:`:`:5|nk¶2и·kklkllllzl>fmmmomNn,mfmh(|o|ooVo6oo`:`:`:`:5|nk¶2µ·kklkllllzl>ޱfmmmmomNn,mfmhfmh|oooVo6oo`:`:`:7@nk¶2µ·.lkklk.lkl⯘llzl>ޱfmmmmomNn,mfmhfmho\oooVo6oo`:`:`:=ߔ.l¶T2µlk.lklk.lk.lkl⯘lޱfmmmزomִvhfmho\oooo6oj`:`:`:`:`:> .lkT2µlk.lklk.lk.lkl⯘lޱfmmmزomNnִvhfmho\ooo6oo6ooo6oj`:`:`:`:; .lkTµl.lk.lklk.lk.lkl⯘ll2T>ޱfmmmزomNnִvhfmhooooo6oo`:`:`:`:A .lkT,3l.lk.lklk.lk.lkllll2T>ޱfmmmزommZNnִvhfmhooo6oo6ooo6oo`:`:`:`:= .lkT,3l.lk.lkk.lk.lkl⯘llkή>ޱfmmmommZNnִfmo|oo6oo6ooo6oo`:`:`:`< .lk0,3l.lkk.lk.lkl⯘llzlkή>ޱfmmmommZ&Nnmfmoooo6oo6o6oo6oo`:`:`:`= .lk0,3l.lkkk.lklkl⯘llzlkή>ޱfmmmNnommZNnmfmoooo6oo6ooo6oo`:`:`:`: .lk2,3l.lkkk.lkl⯘llzlkή>ޱfmmmزomm Nnmhfm|o|oo6oo6oo6oo6oo`:`:`:`:ߐ2.lk2,3lkklk.lk.lkllzlkή>ޱfmmmزommVNnmhfmo|oo6oo6ooo6oo`:`:`:`:~.lk0,3lkkl.lk.lk.lkllzlkή>ޱfmmmNnommVNnmhfmo|oo6oo6oo6oo6oo`:`:`:`9~.lk2,3lkkl.lk.lk.lkllzlkή>ޱfmmmommNnmhfmoo\o6oo6ooo6oo`:`:`:`7~k2,3lzlk.lkXl.lk.lk.llllzlkή>fmmmdomm6Nnmhfmo6oooo6ooo6oo`:`:`:`9߸k2,3lk.lkXl.lk.lk.llllzlkήDmfmmmomm"mnfmo6oonooo6ooo6oo`:`:`:`3Xk2,3kXl.lk.lk.llllzlkήDmfmmm mmm6mNnmhfmoooooo`:`:`:`:;~k22,3l.lk.lkXl.lk.lkzlk.lllllzlkήDmfmmmmmm"mxnmnfmnfmno6oo6oooo6ooop`:`:`=ߐ2k22,3lk.lkXl.lk.lkzlk.lklllzlkmDmfmmmmmmmnNnxnmnfmnfmno6oon\ooooo6ooop`:`:`:`p$pq*p$p`` p$p*p$p`` p$pq$pq*p$p``p$pq*p$p``p$p*p$p`` p$pq$pq*p$p``` p$p*p$p``` p$pq*p$p``` p$p*p$p:`J`p$pq*p$p``p$p*p$p`` p$pq*p$p`:` p$pq*p$p:`:` p$pq*p$p:``p$pq*p$p`` pq$pq$pq*p$p:`` p$pq*p$p`:`` p$pq*p$p:`:`p$p*p$p:``p$p*p$p:`` p$pq$pq*p$p:``p$pq*p$p`` p$pq*p$p:`` p$pq*p$p`:`` p$pq*p$p`:``p$pq*p$p`` p$pq$pq*p$p:``p$p*p$p`````5(ɐ2p̺22Rµ2к2323232B443¸444L5&5l&5L546f66f686f6 Ŗf6(7f6X`:`:`:`:`q`qq`qq`qq`qqqqqqqqqqq`q`qqqqqqqqqqqqqq``:``0(ɐ222Rµ2к232323232B443¸44684&5&5546866 Ŗf6(7f6X`:`:`:``q`q`q`qqqsqq``qqqq"s@$tttu"vv>wwRxxryynzz~{(||}}~~,ƀpx.BZT:쌚DގJt6쓦LZT*КККښښ蚦6ƛ✦6,,К,"s"s)݈КК蚦6,,v ,6,,<,,"s"s"s+pКښz6,ƛ,v ,6,<,"s"s"s$pКК蚦<,b,6<,"s"s"s"pКƛ֝,,6,<,"s"s"s"sƛ֝b✦,蚦К"s"s"s"s%КzXb✦蚦,,,"s"s"s$ݦ蚦F⛦,蚦,К,,"s"s"s"s#蚦F ,,,,"s"s"s8✦,,"s"s"s 8К,,,"s"s!ƛ К,,,"s"s"ƛ&К,,,,"s"s*, К,66,,,,,,"s"sƛК,,"s"sƛК"s"sJƛК蚦К"s"s JКК"s"s"s"s J,✂КК"s"s"sJ֝,✂КК,"s"s"s!J֝,К✂КК,"s"s"s$J蚦֝,К✂К䝦,КК,"s"s"s&J蚦6֝,К✢К6,К,"s"s"s"s*J̝К蚦6,,<4✦66К,К,"s"s"s"s2J6,4КК,"s"s"s"s#ۦ2J̝6,✢КК,,"s"s"s"s ۦJ,,К✢К,"s"s"s"s#J6,,К4✦蚦К,"s"s"s"s!JК6,4✦蚦К,"s"s"s"s"JКp6,4✦,"s"s"s%2JКœ6,4✦"s"s"s#2JКœ6,✦К"s"s"s"s!b2J6К4КT,"s"s"sbJ蚦6✦КT,"s"s"s'J6ƛ,,4,6,К,T,"s"s"s(bJК蚦6,,,6,К,T,"s"s"s!bJК蚦✦КT,"s"s"s&JК蚦*ƛ4zКT,"s"s%bJКК蚦*ƛ4КT,"s"s"s(bJКК蚦64✦КT,"s"s"s"s&bJККp4✦T,"s"s"s"s(bJКК6ƛ4✦T,"s"s"s*BJTКК6ƛ4✦КT,"s"s"s"s,JКښ6ƛ4✦КT,"s"s"s"s+JК*ƛ4ƛКT,"s"s"s"s*Jښ6ƛƛКT,"s"s"s"s&J✦КT,"s"s"s+JКК✦✦КJT,"s"s"s/МJККښœ蚦К,fКJT,"s"s"s"s"s,2Кښœ,fJКT,,,"s"s"s"s+2ККښ,fККT,"s"s"s"s22ККККښښ,ƛfККTК,"s"s"s"s02КККККښښ,\ККT,,К,"s"s"s-2К@ККښښ,Jt\КККT,,,"s"s"s.2К@ККښ,ƛJƛ\fККT,,,"s"s"s)2ККښښ,ƛКT,,,"s"s"s+ܘККښ,ƛ(КT,,,"s"s"s,pККښ,ƛКT,,,"s"s"s+pКККښ,,ККJ,,,"s"s"s&ܘКʚК,,К,T,,"s"s"s-ЛКʚКК,<,К,ҫ` ҫثҫ` ҫثҫ4ҫثҫҫثҫҫثҫ ҫ ҫثҫ ҫثҫ*ҫثҫ`ҫثҫ`ҫثҫ ҫثҫ ҫثҫҫثҫҫثҫ ҫثҫҫثҫ ҫ ҫثҫ ҫثҫJJqq``qq8А2322,332323232323B43B443¸446844lL5L5L66f6686f6 Ŗf6(7f6X`:`:`qXXqqXXqq`:E  ~XZX`XLq0pt7@t* & b  x .Bh&hN|bD &!""#$%&'()v*`+J,4-../01 p$pq$pq^$p:``p$pq^$p`` p$p*p$p:`J` p$pq$pq*p$p:`` p$pq$pq$pq*p$p:`` p$pq$pq^$p:``` p$pq$p^$pJ`` pq$p*p$p`` p$p*p$p``:` p$pq$pq*p$p`` p$pq$pq$pq*p$p`` p$p*p$p`:`` p$pq$pq*p$p`` p$pq$pq^$p:`` p$pq*p$p`:`:` p$pq*p$p:`` p$pq*p$p`` p$pq$p*p$p:`` p$p*$p^$pJ`` pq$pq*p$p`` p$pq$p*p$p`` pq$pq*p$p``` p$p^$p`` pq$pq*p$p`` p$pq*p$p`:`` p$pq$p*$pq*p$p`` p$pq$pq*p$p`:`` p$pq^$p:`` p$pq$p^$p`` p$pq$pq$pq*p$p`:` p$pq$p*p$p`:`` p$pq$pq*p$p`:`` p$pq$pq*p$p`:`` p$pq$pq*p$p:`` p$pq$pq$p^$p`` p$p*$pq*p$p`` p$p*p$p:`` p$pq$pq*p$p`` p$pq$p*$p^$pJ``p$p*p$p:``p$p*p$p`J` p$pq$p*p$p`` p$pq*p$p:`` p$pq^$p`` p$pq$pq*p$p`:` p$pq$p*p$p`` p$pq*p$p``:` p$pq$pq*p$p`` p$pq^$p:``` p$pq*p$p`:`` p$p*p$p``J` p$pq$pq*p$p`` p$pq^$p:`:`` p$pq^$p``` p$pq$pq^$p`` p$p^$p`` p$pqq*p$p``p$p*p$p`` p$pq$pq*p$p`` p$pq*p$p``: p$pq^$p::`` p$pq$p^$p`` p$pq$pq^$p`` p$pq$p*p$p:`` p$p*p$p`:`:` p$pq^$p`:`` p$pq^$pJ`` p$pq$p^$p`` p$pq$p*p$p:`:` p$pq$pq*p$p`:`` pq$pq*p$p``p$p^$pJ`` p$pqq^$p`:`` p$pq$p^$p`` p$pqq^$p``` p$p^$p``p$p^$p`` p$pq$p*p$p``` p$pqq^$p`` p$p*p$p:`:`p$p*p$p`:` p$pq$p*$pq*p$p`` p$pq*p$p:`` p$pq^$p:`` pq$pq*p$p`` p$pq*p$p:`` p$pq*p$p:``:p$p*p$p``: p$pq$pq$pq*p$p`` p$pq*p$p:`` p$p*p$p```p$pq$pq$pq*p$p`:`` p$pq*p$p`:`` p$pq^$pJ`` p$pq*p$p``` p$pqq*p$p:`` p$p*p$p:```p$p*p$p`` p$p*p$p:`` p$pqq*p$p`` p$pqq*p$p``` p$pq^$p`` pq$pq$pq*p$p`` pq$pq$pq*p$p`` p$pq*p$p``` p$pqq*p$p`:`` p$pq$pq*p$p:`:` p$pq$pq*p$p:``p$p*p$p:`` p$pqq*p$p:`:` p$pq^$p``` p$pq$p*p$p:`` pq$pq$pq*p$p`` p$pq$p*p$p`` pq$pq$pq*p$p:`` p$pq$pqq*p$p`` p$pq$pq*p$p:`` p$pq$pq$p^$pJ```X p$p$p:`J` p$pq$pq$p:`` p$pq$pq$pq$p:`` pq$p$p`` p$p$p``:` p$pq$pq$p`` p$pq$pq$pq$p`` p$p$p`:`` p$pq$pq$p`` p$pq$p`:`:` p$pq$p:`` p$pq$p`` p$pq$p$p:`` p$p$p^$pJ`` pq$pq$p`` p$pq$p$p`` pq$pq$p``X` pq$pq$p`` p$pq$p`:`` p$pq$p$pq$p`` p$pq$pq$p`:`` p$pq$pq$pq$p`:` p$pq$p$p`:`` p$pq$pq$p`:`` p$pq$pq$p`:`` p$pq$pq$p:`` p$p$pq$p`` p$p$p:`` p$pq$pq$p`` p$pq$p$p^$pJ``p$p$p:``p$p$p`J` p$pq$p$p`` p$pq$p:`` p$pq$pq$p`:` p$pq$p$p`` p$pq$p``:` p$pq$pq$p``X p$pq$p`:`` p$p$p``J` p$pq$pq$p`` p$pq^$p`X`` p$pqq$p``p$p$p`` p$pq$pq$p`` p$pq$p``: p$pq^$p:X:`` p$pq$p$p:`` p$p$p`:`:` p$pq^$p`X:`` p$pq$p$p:`:` p$pq$pq$p`:`` pq$pq$p`` p$pqq^$p`X:`` p$pqq^$p`X`` p$pq$p$p``` p$p$p:`:`p$p$p`:` p$pq$p$pq$p`` p$pq$p:`` p$pq^$p:X`` pq$pq$p`` p$pq$p:`` p$pq$p:``:p$p$p``: p$pq$pq$pq$p`` p$pq$p:`` p$p$p```p$pq$pq$pq$p`:`` p$pq$p`:`` p$pq$p``` p$pqq$p:`` p$p$p:```p$p$p`` p$p$p:`` p$pqq$p`` p$pqq$p`X`` pq$pq$pq$p`` pq$pq$pq$p`` p$pq$p`X`` p$pqq$p`:`` p$pq$pq$p:`:` p$pq$pq$p:``p$p$p:`` p$pqq$p:`:` p$pq$p$p:`` pq$pq$pq$p`` p$pq$p$p`` pq$pq$pq$p:`` p$pq$pqq$p`` p$pq$pq$p:``bstr-0.2.17/src/unicode/fsm/grapheme_break_rev.rs000064400000000000000000000023350072674642500201000ustar 00000000000000// DO NOT EDIT THIS FILE. IT WAS AUTOMATICALLY GENERATED BY: // // ucd-generate dfa --name GRAPHEME_BREAK_REV --reverse --longest --sparse --minimize --anchored --state-size 2 src/unicode/fsm/ [snip (arg too long)] // // ucd-generate 0.2.9 is available on crates.io. #[cfg(target_endian = "big")] lazy_static::lazy_static! { pub static ref GRAPHEME_BREAK_REV: ::regex_automata::SparseDFA<&'static [u8], u16> = { #[repr(C)] struct Aligned { _align: [u8; 0], bytes: B, } static ALIGNED: &'static Aligned<[u8]> = &Aligned { _align: [], bytes: *include_bytes!("grapheme_break_rev.bigendian.dfa"), }; unsafe { ::regex_automata::SparseDFA::from_bytes(&ALIGNED.bytes) } }; } #[cfg(target_endian = "little")] lazy_static::lazy_static! { pub static ref GRAPHEME_BREAK_REV: ::regex_automata::SparseDFA<&'static [u8], u16> = { #[repr(C)] struct Aligned { _align: [u8; 0], bytes: B, } static ALIGNED: &'static Aligned<[u8]> = &Aligned { _align: [], bytes: *include_bytes!("grapheme_break_rev.littleendian.dfa"), }; unsafe { ::regex_automata::SparseDFA::from_bytes(&ALIGNED.bytes) } }; } bstr-0.2.17/src/unicode/fsm/mod.rs000064400000000000000000000003470072674642500150500ustar 00000000000000pub mod grapheme_break_fwd; pub mod grapheme_break_rev; pub mod regional_indicator_rev; pub mod sentence_break_fwd; pub mod simple_word_fwd; pub mod whitespace_anchored_fwd; pub mod whitespace_anchored_rev; pub mod word_break_fwd; bstr-0.2.17/src/unicode/fsm/regional_indicator_rev.bigendian.dfa000064400000000000000000000005560072674642500230300ustar 00000000000000rust-regex-automata-dfa- $ bstr-0.2.17/src/unicode/fsm/regional_indicator_rev.littleendian.dfa000064400000000000000000000005560072674642500235640ustar 00000000000000rust-regex-automata-dfa- $ bstr-0.2.17/src/unicode/fsm/regional_indicator_rev.rs000064400000000000000000000023650072674642500210030ustar 00000000000000// DO NOT EDIT THIS FILE. IT WAS AUTOMATICALLY GENERATED BY: // // ucd-generate dfa --name REGIONAL_INDICATOR_REV --reverse --classes --minimize --anchored --premultiply --state-size 1 src/unicode/fsm/ \p{gcb=Regional_Indicator} // // ucd-generate 0.2.9 is available on crates.io. #[cfg(target_endian = "big")] lazy_static::lazy_static! { pub static ref REGIONAL_INDICATOR_REV: ::regex_automata::DenseDFA<&'static [u8], u8> = { #[repr(C)] struct Aligned { _align: [u8; 0], bytes: B, } static ALIGNED: &'static Aligned<[u8]> = &Aligned { _align: [], bytes: *include_bytes!("regional_indicator_rev.bigendian.dfa"), }; unsafe { ::regex_automata::DenseDFA::from_bytes(&ALIGNED.bytes) } }; } #[cfg(target_endian = "little")] lazy_static::lazy_static! { pub static ref REGIONAL_INDICATOR_REV: ::regex_automata::DenseDFA<&'static [u8], u8> = { #[repr(C)] struct Aligned { _align: [u8; 0], bytes: B, } static ALIGNED: &'static Aligned<[u8]> = &Aligned { _align: [], bytes: *include_bytes!("regional_indicator_rev.littleendian.dfa"), }; unsafe { ::regex_automata::DenseDFA::from_bytes(&ALIGNED.bytes) } }; } bstr-0.2.17/src/unicode/fsm/sentence_break_fwd.bigendian.dfa000064400000000000000000004446170072674642500221420ustar 00000000000000rust-regex-automata-sparse-dfar  !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~5  !!"-../>??@@AZ[`az{BDL2L0h(d(dV(dj(d~@wFw>vw>vw>v`vvv  B& !!""'),-..::??[[]]{{}}BD L 4NV^x4(6G  !!""#&')*+,-..//0:;>??@@[[\\]]^`az{{||}}~C$rBrDC$r C$C$LC$C$ C$C$C$C$C$wwxyByzZ{EE{j{| |B}0}~V (Z>JK,KK8LL8  !!"-../>??@@AZ[`az{BDLL( h(d(d(d(lV(d((d~((()0)P))))**+,Hw>,bw>vw>- -Pv-v !!,-..::??BD L ;8l8t8|888888999$9>;<<:;@;r;& !!""'),-..::??[[]]{{}}BD ..L ....)n%%%%&&&6&D&d&~&&)*J*+>(f+L+)>& !!""'),-..::??[[]]{{}}BD L %%%%%&&&6&D&d&~&&&'d((X(f() )> !!,-..::??BD L 8X8l8t8|888888999$9>9:::;@;r;>  !!"+,-../9::;>??@@[`az{C$ B DC$ C$LC$C$ C$C$C$wwxyByzZ{EE{j{| |L}0}~(`ИЬ R&HJKKKԆLtL& !!""'),-..::??[[]]{{}} B D   d4<D^& !!""'),-..::??[[]]{{}} B D   .6.Ht !!,-..::?? B D   6JRZbvv¢°P|;  !!"-../>??@@[`az{C$ B DC$ .C$.C$.C$C$C$wwxyByzZ{EE,4{j{| |f}0}׸Bz .Z>JKKKL|LD  !!""#&')*-../>??@@[[\\]]^`az{{||}}~C$ B DC$ . C$ C$.C$.C$ C$ C$ C$ C$\wwxyByzZ{EEJR{j{| |Є}0}׸`2jN\؊ضxښVJKۈKK&ܔLLG  !!""#&')*+,-../9::;>??@@[[\\]]^`az{{||}}~C$rBrDC$r C$ C$LC$C$ C$ C$ C$ C$ C$wwxyByzZ{EE~~{j{| |.}0}~ BzP4JKKK"LL" !"')..??[[]]{{}}BD........l.6>R~ !!..??BD...   , L f n v  F      E  !!""#&')*+,-..//0:;>??@@AZ[[\\]]^`az{{||}}~C$rBrDC$r C$C$LC$C$ C$C$C$C$C$E(E(dEE(dEEEEF.Fl(dF(dFFFFG2GjGGGGHH6HJJJKK KKKL.LLLG  !!""#&')*+,-..//0:;>??@@[[\\]]^`az{{||}}~C$PBPDC$PC$C$C$C$C$C$C$C$C$wTwwxyByzZ{EE{0{8{j{| ||z}0}~~~:`JKKKvLdLG  !!""#&')*+,-../9::;>??@@[[\\]]^`az{{||}}~C$PBPDC$PC$C$C$C$C$C$C$C$C$wwxyByzZ{EELT{j{| |}0}~<b>LZNp,JK^KKjLLG  !!""#&')*+,-..//0:;>??@@[[\\]]^`az{{||}}~C$PBPDC$PC$C$C$C$C$C$C$C$C$wwxyByzZ{EEJR{j{| |Є}0}~:`ј.N\jҜ^Ԁ<JKnKK zLL>  !!"+,-../9::;>??@@[`az{C$PBPDC$PC$C$C$C$C$C$GvwwxyByzZ{EEGG{j{| |H}0}~HHIILI`IIIIJJKLjJKLKKM:MLN(LG  !!""#&')*+,-../9::;>??@@[[\\]]^`az{{||}}~C$rBrDC$r C$C$LC$C$ C$C$C$C$C$~wwxyByzZ{EE~~{j{| |.}0}~ Bz4JKKKLL>  !!"+,-../9::;>??@@[`az{C$rBrDC$r C$LC$C$ C$C$C$wwxyByzZ{EE{j{| |L}0}~(`ИЬ R6ӶJKKKԆLtL LLLLLLLLLLLLLLLLLLLLLLLLLLLL2L L2LLLLLLLLLLLLL2BLLLLLLLLLLLLLLLLLLLL BLL LLL >?X?l?z????@6?@?@@AABAV?AV4FAAAB*BJB^B~BBBC)<<&>>$>V>p> (&>(d>(d&&'J'p;h;$;$2$$$% %%.(d;%h8!%;4;T"h"v9 !"9F"(d""9r# 999::*:>:d:x::4#(d; 8R8x888"H89 ..w>/w>/w>0&w>0:0Dw>w>-w>-(d(d-7|77(d!x4!7!7!8 8822233(3N3h3334 424F4f44445 5&45X5l55!!"556 686d66 667767b2 2^2x(d 2222>111(d( DX(d.Nn112 1 1H1\1p(d)  LLLLLLLL LLLLLLLLLLLLLLL LL LLLL LLL LLLL BL L LLLLLLLLLL LLLL        3  "-/>@@[`az{C$C$C$C$C$C$C$C$h^wwxyByzZ{EEMh{j{| |h}0}׸iriiiiiiiijjkllJKlKKm<mLL4DfDX(d4(d( DX(d.NnC$BC$rC$C$C$C$C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$ C$C$ C$C$C$C$C$C$ C$C$C$C$ C$C$ C$C$C$ C$C$C$C$ C$C$ mnn,n:nHn\nno"orop pVppq@qqqr2rrrssnsssttPtdt.hhii8idiiiiije"jj(j<jJjpjjjVk_Fk"kBkbkjk~kkkkkl,l@lNlhll(dl(dllmRmxebeff:fNfgMgMg.g<MgJMgjMggMgg(dgh h2hFhfhhMde"e*eDeLeZ_MMtbb bb8bXbfb(dbbbccc(c`czccccdd.dZd(ddttMM_X_`$`2`:`B`J`R`Z`h`aaaaaM<NjOOOKOPHKPbKPQ Q|QQQQQQKKKLKKLEMEMMC$C$C$C$2]]]MR"M]M]]]]R*](d^^^4^N^VM^^^fM^t^V^^M^M^__*_2_F_N_\MV_d_rM_M____5XX(XNXnXXXXY2MYdYYYMZ>ZFZrZzMZZZrZ[[[M[HM[V[d[x[[[\\0\VVM\p\\M\\](M]ZM]tMXMX XMWMMWMMVWMW"WBWVWvM(dW~MWWWVVVMVVVMVVMMTT0MTVM(dTjT~TTU8(dUdUxUUUVMV$V8VRMRMRRMRMS(dS$MSDS|SM MR2R@R`MRnMRMR R"R*RRMMR MRMC$C$C$C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$ C$C$ C$C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$ C$C$C$ C$ C$ C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$C$C$C$ C$C$C$  C$C$C$C$C$C$ C$C$C$  C$ C$C$ C$C$ C$ C$C$C$C$C$C$ C$C$C$ C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$ C$ C$ C$C$C$C$C$C$C$C$ C$C$C$C$ C$C$C$C$C$C$ C$ C$C$C$C$C$C$C$C$C$ C$C$C$ C$C$C$ C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$LC$ C$C$C$C$C$ C$C$LC$C$ C$C$C$ C$C$C$ C$C$C$C$C$C$C$C$C$C$C$ C$ C$C$C$C$ C$ C$C$C$C$C$C$C$C$C$ C$ C$ C$C$C$C$ C$C$ C$ C$C$C$C$C$C$ C$C$C$ C$C$C$C$C$C$C$C$C$ C$C$r C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$rC$C$C$LC$BrC$C$ C$ C$ C$rC$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$ C$C$C$ C$ C$C$C$ C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$ C$ C$C$C$C$C$ rC$C$C$C$C$ C$C$C$C$C$C$C$C$C$ C$ C$ C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$ C$C$ C$ C$ C$C$C$C$ C$C$ C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$ C$ C$ C$ C$C$C$C$C$C$ C$ C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$ C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$ (d  ( 6 D X !!!"!0!P!d  (d!x4!!!!w>w> uuw>tw>M(w>DDDw>!!""H"h"v""""(d""# "v#4#H#\#p4#(d##C$BC$PC$C$LC$C$C$C$C$LLC$C$C$C$C$C$C$C$C$C$LC$C$C$C$C$LC$L C$LC$LC$LC$C$C$ LC$C$LC$LC$LC$C$L LC$LLC$LC$C$LLLLC$LC$LC$C$LC$ ~n,\d6 F$D $>v,0idiii 6>e"R`j<tV8_FLlkbkj:N\vl(d"8$MMMMM<PMp~~h hF$DMe"eDeLeZ_Mb,,RX"Nbp*\vdZd(dM_X*`$`2`:b`J`R`Zj:nOOKPHKKDDQQQQQQKKxK{0M{0MC$LC$LLC$##$$2$$$% %%.(d%B%h%3]]]MR"MM] ]R*]2^:N^N^VM^^^fM^t^V^^M^Mb_*_F_N_\MV_dMM___5&RlMB\M*2MRf*[MM[d@[rVM &MF~MM]tMMMVM.WvMNVMj~VVVMVMMHbMMHbp~(MNb|MMRMM (MSDS|SM M_|MMRMC$C$C$C$C$ LC$LC$LC$LC$LC$C$LC$LC$C$LC$LC$C$C$LC$LLC$LC$LC$C$LC$C$ C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$LC$LLC$LC$ C$LC$C$LC$LC$C$C$LLC$C$C$C$C$C$LC$LC$C$C$C$C$C$C$LLLC$LC$C$C$LC$LC$C$LC$LLLC$C$LLLC$LC$LC$C$C$LLC$C$LC$LC$C$LL LC$LLC$C$C$C$LC$C$LC$C$C$C$C$C$LC$C$ LC$C$C$C$C$C$C$LLLC$LC$LC$C$LC$LC$LC$LC$LLC$C$LC$LLC$C$C$LC$LLC$C$LC$LC$C$C$LC$C$LC$C$C$LC$C$ C$C$C$C$LC$LC$LLLC$C$C$LC$LC$LC$LLLC$C$C$LC$LLC$C$C$LC$LC$C$C$LC$LC$ C$C$LC$LC$LC$LLLC$C$C$C$LC$LC$LC$C$C$LC$C$LC$C$LC$C$LC$C$C$C$C$ LC$LC$LC$C$C$LC$LC$C$C$C$LC$C$LC$C$C$LC$C$LC$C$ C$C$LC$C$C$C$ LC$C$C$LC$C$C$C$C$C$C$C$C$LC$C$C$C$C$C$C$C$ C$C$C$C$LC$C$C$C$C$C$LC$C$C$C$C$LC$LC$LLC$C$C$C$C$C$C$C$LLLLC$LC$C$C$LL LC$C$LC$C$LLC$LC$C$LL LC$C$C$LC$LC$LLC$C$C$LLLLLLC$C$LLC$LLC$C$PC$C$C$C$LC$C$C$C$LC$C$PLC$C$C$C$BLPC$C$C$ C$C$PLC$LC$C$C$C$C$C$LC$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$ C$ C$LC$C$C$LC$C$C$C$L C$C$C$C$C$C$C$LC$LLLLLLLLC$C$C$C$C$C$LC$C$C$C$C$C$C$PC$C$LC$LC$LC$C$C$LC$LLC$C$LC$C$ C$C$C$LC$C$LLC$C$LC$LC$C$C$C$LC$LC$LC$LC$C$C$LLC$LL LC$C$C$LC$LLLC$LC$C$C$C$LC$LLLLC$LC$LC$C$ C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$ LLLLC$C$C$C$LC$C$C$LLLLLLLC$ LC$C$C$C$C$C$C$LLLC$LC$LC$LC$C$LC$C$C$LC$ C$LC$C$C$C$C$C$C$C$LC$LLC$LC$LC$LC$C$C$LLC$ C$LC$C$C$C$C$C$C$LL LC$LC$LC$C$LC$C$L C$LC$C$C$C$C$C$C$LLLC$LC$LC$LC$C$LC$C$C$ C$LC$C$C$C$C$C$C$C$C$C$L LC$LC$LC$C$LC$C$LC$C$C$C$L LC$LC$LC$LC$C$LC$C$ LC$C$C$C$C$C$LLLC$LC$LC$LC$C$LC$C$C$LC$C$LL LC$LC$LC$LC$LC$C$C$LC$C$C$C$C$ C$LC$LC$LC$LC$C$LC$C$LLC$LC$C$ C$C$C$C$C$C$LLC$C$C$LC$C$C$ C$LC$C$LC$LC$LLC$C$LLC$LLC$LC$C$LC$ %!%%!&&"&6&J&j&~&& (&(d(d&&'J'p''L B LLLLC$BC$PC$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$ C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$ n,(<\ʚF˖\̦.T͘:xΤHhϚϮ0Jidiiij6>e"R`j<ǖǼ VB_FVvkbȖȤ,@Nhl|(dɮ"~6$MMMMM<PMp~Jh hFDMe"deDeLeZ_Mb`zzRXàN &:HzĈĮdZd(dM_X4`$`2`:b`J`R`Zl&4LOOKPHK*KVVQQQQQQKKK$LML>MC$C$C$3]]]MR"MVM]j]~R*]2^N^N^VM^^^fM^t^V^^M^M_*_F_N_\MV_dMM___5$DXMTMM4NM\M|M[6\VMvMM<M]tMMMVM,L`WvMNVMtVVVMVMM2MXMHbp~lMMMMM MSDS|SMM_|MM C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$PC$C$C$C$C$C$C$C$C$C$PC$C$C$C$BPC$C$C$ C$C$PC$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$ C$C$C$C$C$ C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$ C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$BC$PC$C$ C$C$C$  C$C$C$C$ C$ C$ C$ C$ C$ C$C$C$ C$C$ C$ C$ C$C$  C$ C$ C$C$  C$ C$ C$C$ C$ Cn,CD tDlDE $EbEEFLFfFGG,,X0xAB,idiiiBL6>e"R`j<BxFlBBV_Flkbkj"C\jC:CrCvl(d,"ABA$MMMMM<PMp~`h hFDM@e"eDeLeZ_Mbx?RXN:?@@0@h@dZd(d@M_X `$`2`:b`J`R`Z?nlz7|8O߈OK9PHK:LK:x;DQQQQQQKKK4JMJNMC$ C$  C$C$BC$ C$ C$ C$C$ C$C$C$.C$ C$  C$ C$ C$. C$ . C$ C$ C$C$.C$ C$ C$.C$ C$ @n,tX$tN"f,X0xidiii6>e" j< FltV_Fkb"B\jxl(d,"H$MMMM M,@Mp~`h hF&dM|e"eDeLeZ_MbxRX&:Hz6dZd(dbM_X `$`2`:X`J`R`Z`.lzHO߈OKߖPHKKQQQQQQK3]]]MR"M0M]D]XR*]2^N^N^VM^^^fM^t^V^^M^Ml_*_F_N_\MV_dMM___5FTM<MM6MDMdMr[6VMPjxMMM]tMMMVMWvMN"M6JdVVVMVMMM&MHbp~lM:NhM^MMM MSDS|SMM_|MJMC$ C$ C$ C$ C$ C$ C$C$ C$ C$C$ C$ C$C$ C$  C$ C$ C$C$ C$ C$  C$ C$ C$ C$.C$ C$ C$C$C$ . C$C$.C$C$C$ .C$ .C$.C$C$C$.C$C$  C$ C$C$C$ C$ C$C$ C$  .C$  C$ ..C$ C$C$ C$C$. C$ C$C$ .C$ . C$C$C$.C$C$ .C$.C$ C$C$C$C$C$.C$ C$ C$C$C$C$C$C$C$  C$ C$ C$C$ C$ C$ C$ C$  .C$ C$  C$C$ C$  C$.C$. C$ .C$C$ C$C$ C$.C$ C$ C$C$C$C$ C$ C$  .C$.C$ C$ C$ C$ C$C$.C$ C$  C$.C$C$ C$ .C$C$ C$ C$ C$C$ C$ C$ C$  C$C$C$ C$ C$ C$C$ .C$C$ C$C$ C$C$ C$ C$ C$ C$C$C$ C$ C$.C$C$C$ C$ C$C$ C$C$ C$.C$ C$C$ C$C$C$C$  C$ C$ C$ C$ C$.C$.C$ C$C$C$ C$.C$ C$.C$.C$ C$ C$ C$ C$ C$ . C$ C$C$C$C$C$C$ C$C$C$.C$.C$C$ C$ C$  C$.C$.C$ C$ C$C$.C$  C$.C$ C$C$ C$ C$. C$C$  C$.C$C$ C$C$ C$ C$.C$   C$C$ . C$ . C$ C$.C$ C$ C$ C$ C$C$C$C$ C$C$ C$ C$.C$B C$ C$.C$ C$ .C$ C$ C$C$ C$ C$C$C$ C$C$ C$ C$C$ C$ C$C$ C$ C$C$ C$C$ C$ C$C$ C$C$C$ C$C$C$C$ C$ C$ C$.C$.C$C$ C$ C$.C$  C$ C$C$ C$.C$.C$C$. C$C$ C$ .C$ C$C$C$ C$  C$C$ C$C$.C$.C$ C$ C$C$ C$ C$C$.C$C$C$ C$ C$ C$ C$.C$  C$  C$.C$.C$ C$ C$ C$ C$.C$C$C$.C$ C$ C$ C$ C$.C$.C$.C$ C$C$C$   .C$ C$C$C$C$C$C$C$  C$ C$ C$ C$C$ C$C$C$ C$ C$ C$C$C$C$C$C$C$C$ C$ C$ C$ C$ C$C$C$ C$ C$ C$C$C$C$C$C$C$ C$ C$ C$C$ C$ C$ C$C$C$C$C$C$C$ C$ C$ C$ C$C$ C$C$ C$ C$C$C$C$C$C$C$C$C$C$ C$ C$ C$C$ C$ C$C$C$C$ C$ C$ C$ C$C$ C$ C$C$C$C$C$C$ C$ C$ C$ C$C$ C$C$ C$C$ C$ C$ C$ C$ C$C$ C$C$C$C$C$ C$ C$ C$ C$ C$ C$C$ C$ C$ C$C$C$C$C$C$ C$C$C$ C$C$C$ C$ C$ C$ C$C$  C$ C$ C$C$ C$B................................<bn @NVdl$J^rz$2LZbpp<t4 .NVd~ hpxP08jr ..  :BJRZ%"0J^fn  (<Pdxpxh*2@N.......................................................................................................................................................................................................... ...B.................................................................................................................................................................B.....(<(b(((~>Fft|T\r "6DRlzp.6>FN"<JR`t|z  R 2 : l t    $,4:T\dl%(6\^v~ (6>Rlpn|Z $2@     ........................................B.................. C$BC$ C$C$ C$C$C$  C$C$C$C$ .C$ C$ C$ C$ C$ C$C$C$ C$ C$ C$. C$ . C$ C$ C$C$.C$  C$ C$C$ C$.C$ C$ 1D1n,111122x233\334(4x4455R5556&6j66667(7<7h0...idiii/6>e"/Jj</X/~///V/_F0 kb0,0:0Z0t00000l0(d10"-@-x--$MMp~-h .phF..M,e"-&eDeLeZ_Mb*Z*+ZRX+&+++,,,(,Z,h,,dZd(d,M_X)@`$`2`:)x`J`R`Z))))** *.O ROK `PHK K !!QQQQQQKKK,M,MC$ C$  C$3]]]MR"M(jM](~](R*]2^N^N^VM^^^fM^t^V^^M^M(_*(_F_N_\MV_d(M) M_)&__5$ $$4$T$t$$$%M%8%F%M%%&&M&$&8&&d&~M&M&M&&[''>'X'~VM'j'M'( (*M(PM]tM#MMVM#p#WvMN"M###VVVMV#JMM""M"MHbp~lM""#M"M"FM"ZM"h "|MSDS|SMM_|M C$ C$ C$ C$ C$C$ C$ C$C$ C$ C$C$ C$  C$ C$ C$C$ C$ C$  C$ C$ C$ C$.C$ C$ C$C$C$ . C$C$C$ .C$ .C$C$  C$ C$C$C$ C$ C$C$ C$  .C$  C$ ..C$ C$C$ C$C$. C$ C$C$ .C$ . C$C$C$.C$C$ .C$.C$ C$ C$ C$C$C$C$C$C$C$  C$ C$ C$C$ C$ C$ C$ C$  .C$ C$  C$C$ C$  C$.C$. C$ .C$C$ C$C$ C$.C$ C$ C$C$C$C$ C$ C$  .C$.C$ C$ C$ C$ C$C$.C$ C$  C$.C$C$ C$ C$ C$ C$ C$C$ C$ C$ C$  C$C$C$ C$ C$ C$C$ .C$C$ C$C$ C$C$ C$ C$ C$ C$C$C$ C$ C$ C$ C$C$ C$C$ C$.C$ C$C$ C$C$C$C$C$ C$ C$C$.C$.C$C$C$ C$.C$.C$.C$C$C$C$.C$ C$C$C$C$C$C$ C$ C$ C$  C$.C$.C$ C$ C$  C$.C$ C$C$ C$ C$. C$C$  C$.C$C$ C$C$ C$ C$.C$   C$C$ . C$ . C$ C$.C$C$ C$C$C$C$ C$C$ C$.C$B C$.C$ C$.C$ C$ C$C$C$C$C$ C$C$ C$C$C$ C$C$C$C$ C$.C$.C$ C$.C$  C$ C$C$ C$.C$.C$ C$C$ C$ .C$ C$C$C$ C$  C$C$ C$C$.C$.C$ C$ C$C$ C$ C$ C$ C$ C$ C$.C$  C$  C$.C$.C$ C$ C$ C$ C$.C$C$ C$ C$ C$ C$.C$.C$.C$ C$C$C$   .C$ C$C$C$C$C$C$C$  C$ C$ C$ C$C$ C$C$C$ C$ C$ C$C$C$C$C$C$C$C$ C$ C$ C$ C$ C$C$C$ C$ C$ C$C$C$C$C$C$C$ C$ C$ C$C$ C$ C$ C$C$C$C$C$C$C$ C$ C$ C$ C$C$ C$C$ C$ C$C$C$C$C$C$C$C$C$C$ C$ C$ C$C$ C$ C$C$C$C$ C$ C$ C$ C$C$ C$ C$C$C$C$C$C$ C$ C$ C$ C$C$ C$C$ C$C$ C$ C$ C$ C$ C$C$ C$C$C$C$C$ C$ C$ C$ C$ C$ C$C$ C$ C$ C$C$C$C$C$C$ C$C$C$ C$C$ C$ C$ C$ C$ C$ C$C$  C$ C$ C$C$ C$3]]]MR"M0M]D]XR*]2^:N^N^VM^^^fM^t^V^^M^Ml_*_F_N_\MV_d?:MM_?T__5<>$MdM[dr>P[>>VMP xM>>M? M]tMMVM< <*.WvMNVM6JdVVVMV;MMM&MHbp~(M:N;M^MRM;xM ;MSDS|SM C$ C$C$ C$ C$C$ C$ C$C$C$  C$C$C$ C$ C$ C$C$  C$ C$ C$C$C$ C$C$ C$ C$C$ C$  C$C$C$C$ C$C$ C$ C$C$ C$C$ C$ C$C$C$ C$C$ C$ C$C$C$C$ C$C$C$ C$C$C$C$C$ C$  C$C$ C$C$C$C$ C$ C$ C$C$C$ C$ C$C$ C$C$ C$C$C$ C$C$ C$C$C$ C$C$ C$C$  C$ C$C$C$ C$ C$C$C$  C$C$  C$  C$C$PC$C$C$C$ C$C$C$P C$C$C$C$B PC$C$C$ C$C$P C$ C$C$C$   C$C$ C$C$C$C$ C$ C$ C$C$ C$C$ C$C$C$ C$C$C$C$C$ C$C$C$ C$  C$C$ C$C$C$C$ C$ C$ C$ C$ C$C$ C$C$C$ C$ C$ C$ C$ C$C$C$ C$ C$ C$ C$C$ C$C$  C$ C$ C$ C$C$ C$C$C$ C$ C$ C$C$ C$C$ C$ C$ C$ C$C$ C$C$ C$ C$ C$ C$C$ C$C$C$ C$ C$ C$ C$ C$C$ C$ C$ C$ C$ C$C$ C$ C$C$C$C$ C$C$C$ C$ C$C$ C$ C$  C$BC$PC$C$PC$C$C$PPC$C$C$C$PC$PC$P C$PC$PC$PC$C$C$ PC$C$PC$PC$PC$P PC$PPC$PC$C$C$PPPPC$PC$C$PC$C$PC$ b&bjn,b~bbbbcZccd>d|de eZeeef4f~ffggLgfgtggh hhJ0___idiii_6>e"R` j<`.`T`z``V`_F`vkbaaa<aVadaraaahla(db"^^b-^$MMp~^h _FhF_Z_zM]e"^eDeLeZ_Mb`[8[\8RX\^N\\\\\]]8]F]l]dZd(d]M_XY`$`2`:)x`J`R`ZZZ<ZZZZ[ NOOPOKQ PHKQhKQR8!QQQQQQKKN<KNbGMGN|MC$PC$PPC$3]]]MR"MXM]Y ]YR*]2^N^N^VM^^^fM^t^V^^M^MY2_*Yv_F_N_\MV_dYMYM_Y__5TTTTUUU@UNUMUUVMVlVtVVMVVVVW MWMW8MWFW~[WWWX VMX$X>MX^XXMXM]tMTMMVMST`WvMNVMT0TDT^VVVMVSMMSS6MS\MHbp~lMSpSSMRMRMRMR SMSDS|SM PC$PC$PC$PC$PC$C$PC$PC$C$PC$PC$C$PC$PPC$PC$PC$C$PC$PC$PPC$PC$ C$PC$C$PC$PC$C$C$PPC$C$C$PC$PC$C$PPPC$PC$C$C$PC$PC$C$PC$PPPC$PPPC$PC$PC$C$PPC$C$PC$PC$C$PP PC$PPC$C$C$C$C$PC$C$PC$PC$ PC$C$C$C$C$C$C$PPPC$PC$PC$C$PC$PC$PC$PC$PPC$PC$PPC$C$PC$PPC$C$PC$PC$C$PC$C$PC$C$PC$ C$C$C$C$PC$PC$PPPC$C$PC$PC$PC$PPPC$C$C$PC$PPC$C$C$PC$PC$PC$PC$ C$C$PC$PC$PC$PPPC$C$C$PC$PC$PC$C$PC$C$PC$C$PC$C$PC$ PC$PC$PC$C$C$PC$PC$PC$PC$C$PC$C$PC$C$ C$C$PC$C$C$C$ PC$C$PC$C$ C$C$C$C$C$C$P C$C$C$C$C$C$C$C$C$C$PC$C$C$C$C$C$PC$PC$PC$PPC$C$C$PPPPC$PC$PPPC$C$PC$C$PC$PC$PC$C$PPPC$C$C$PC$C$PC$PPC$C$PPPPPPC$C$PPC$PPC$ PC$C$PC$C$C$C$PC$C$ PC$C$C$BPC$C$ C$C$PC$PC$C$C$PC$C$PC$C$C$PC$C$C$C$PC$C$C$PC$C$PPPPPPPC$PC$C$PC$C$C$PC$C$PC$PC$PC$C$C$PC$PPC$C$PC$ C$C$C$PC$PPC$C$PC$PC$PC$PC$PC$PC$C$PPC$PPPC$C$C$PC$PPC$PC$PC$C$C$PC$PPPPC$PC$P PPPPC$C$C$C$PC$C$C$PPPPPPPC$ PC$C$C$C$C$C$C$PPPC$PC$PC$PC$C$PC$C$C$PC$ C$PC$C$C$C$C$C$C$C$PC$P PC$PC$PC$PC$C$C$PPC$ C$PC$C$C$C$C$C$C$PP PC$PC$PC$C$PC$P C$PC$C$C$C$C$C$C$PP PC$PC$PC$PC$C$PC$C$ C$PC$C$C$C$C$C$C$C$C$C$P PC$PC$PC$C$PC$PC$C$C$C$P PC$PC$PC$PC$C$PC$ PC$C$C$C$C$C$PP PC$PC$PC$PC$C$PC$C$PC$C$PP PC$PC$PC$PC$PC$C$PC$C$C$C$C$ C$PC$PC$PC$PC$PC$C$PPC$PC$ C$C$C$C$C$C$PPC$C$C$PC$C$ C$PC$PC$PC$PC$PC$C$PPC$PPC$PC$C$PC$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$ {n{n,{{{{||6|h||s||},}:}`}t}}}}W~W~(~`~t~|~M0yyz idiiiz6>e"z.z6j<zDz^zrzzzVz_FVzkbzzzMzz{{*{2{@l{N(dM"xx-M$MMp~y h yhFyyMxe"xeDeLeZ_MbvhvvvwwxRXwwwwwwwx x2x:xTxndZd(dxM_Xu`$`2`:)x`J`R`ZMutvv v:vHn*oROpOKpPHKpKq(q!QQQQQQK1]]]MR"M]u@]uHR*]2^N^N^VM^^^fM^t^V^^M^MuVup_*u_F_N_\MV_d^VMuM_u__5rrrrsss.s<sbMs|VssMR"sssMX tst&t:MtHMrMtVtv[ttttVMttWvMtuuMu,M]tMX MMVrLMrZrnr|WvMNrMrrrVVVMVr8MMHbp~lMr*MMqMrMr rMSDS|SMC$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$BC$rC$C$C$C$C$C$C$C$C$ C$C$ C$C$C$C$C$C$ C$C$C$C$ C$ C$C$C$C$ C$C$C$C$C$ C$C$ Zn,2.r>0h<&>R~0idiii6>e"jj(j<.TzV_Fkb.<\vl(dF"nf|$MgMMgJMgjMggMp~h "hF6hhMe"eDeLeZ_Mbb j(RXc0DRdZd(dM_X`$`2`:`B`J`R`Z`fa(DOOKPHKKQQQQQQKKK~M~MC$C$C$C$BC$rC$ C$C$C$ C$ Zn,2.r>0h<>R~0idiii6>e"jj<.TzV_Fkb.<\vl(dF",|$MMMM M,@Mp~h "hF6VdMTe"eDeLeZ_MM_X`$`2`:X`J`R`Z ft(DOOKPHKKQQQQQQK3]]]MR"MM]]R*]2^N^N^VM^^^fM^t^V^^M^M__*2_F_N_\MV_dFMTM_n__5FTn@MYdrM"BJM^rBMMM,[RxVMMRrMM]tM2MMVMWvMNW~M VVVMV\MMMMHbp~lM $MMXMlMz MSDS|SM C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$ C$C$ C$C$C$C$C$ C$C$ C$C$C$ C$ C$ C$C$C$C$C$C$C$C$C$C$C$ C$C$  C$C$C$C$C$ C$C$C$  C$ C$C$C$ C$C$ C$ C$C$C$ C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$ C$ C$ C$C$C$C$C$ C$C$ C$C$C$C$C$C$ C$ C$C$C$C$C$C$ C$C$C$ C$C$C$ C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$C$C$ C$ C$C$C$C$C$C$ C$C$ C$C$C$  C$ C$LC$  C$C$C$C$C$ C$ C$LC$C$ C$ C$ C$ C$ C$ C$ C$C$C$C$C$C$C$C$r C$ C$ C$ C$C$C$C$C$C$C$rC$C$ C$LC$BrC$ C$ C$ C$ C$rC$C$C$ C$C$C$C$C$C$C$C$C$C$ C$ C$ C$ C$ C$C$ C$C$C$C$C$ C$ C$r C$C$C$ C$C$C$C$C$C$C$C$ C$ C$ C$C$C$C$C$C$C$ C$C$C$C$C$C$C$ C$C$C$ C$ C$C$C$C$C$ C$C$C$ C$C$C$C$ C$ C$ C$ C$C$C$C$ C$ C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$ C$C$C$C$C$ C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$ C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$ C$C$C$C$C$C$C$M_|MRnM C$C$C$C$C$C$ C$C$LC$C$ C$C$C$C$ C$C$C$C$C$ C$ C$C$C$C$ C$C$C$C$C$ C$C$C$ C$C$C$C$C$C$ C$C$C$ C$ C$r C$C$C$C$C$C$C$rC$C$C$LC$BrC$C$ C$ C$ C$rC$C$C$ C$C$C$C$B               ,Rr0>FT\v~.BJXlz.<DRZz>dlt8*8@Nbj 8@r$\<Dv~ ^fnv%*8FZh$2LZbpx.<Pt"6>LZ                                                                                                        B                                                                 B .  .....  .... .... .<bn @NVd&JLBJ`t$LZRpdt4 .V,:Nh h@r0(0  BJR%"fn0D^ l (zPdp*2@V. ... .. . . ....  . ... . . . .. .. .. . ..  ... . . . . . . ... . ... ... .. .  . . B.  ..  ...... ..   . .. .. .... ... .... .B               ̤"Hn͎ͮͼ$,LZbpxΘΠκ.6\pB˄ˌˠˮ˶˄*8R`hRv̖Z ʬɠɐɮ  .BJd~ʘ 4Zbɂɐɘì,ÄÞJJR ȮȶȾ%N\pƊƘƦƺ$PXlt|NJlǤDzǺ&:HVd~ȌȠF*8ŌŚŮŶLlt|ń                                                                                           B                                                      C$BC$rC$C$C$C$C$C$C$C$C$ C$C$ C$C$C$C$C$C$ C$C$C$C$ C$ C$C$C$C$ C$C$C$C$C$ C$C$ tn,LHXJ*VXl0idiii46>e"jfj<`چڬڴV _F kb@Nnۈۖۤl(dD""-$MMp~h نhFٚٺMe"2eDeLeZ_Mbb jRX,cXf:TdZd(dM_X`$`2`:)x`J`R`Z N\X݌OOKPHK6Kb!QQQQQQKKՈKծMMC$C$C$ C$BC$ C$C$C$C$C$0idiii46>e"j/Jj<`چڬڴV _F kb@Nnۈۖۤl(dD"L؜-$MMp~h نhFٚٺMe"2eDeLeZ_M  C$C$C$C$C$C$C$C$ C$C$LC$B C$ C$ C$ C$ C$C$C$C$C$C$C$C$C$C$C$C$C$C$ C$ C$C$ C$C$C$C$C$ C$ C$C$C$ C$C$C$C$C$C$C$C$ C$ C$ C$C$C$C$C$C$C$C$C$C$ C$C$C$ C$ C$C$C$C$C$ C$C$C$C$C$3]]]MR"MM]]R*]2^N^N^VM^^^fM^t^V^^M^M__*D_F_N_\MV_dXMfM___5ft`MYdM:BbjM~bMMML[rVM M,dMM]tMRMMVMWvMNW~M,VVVMVMMM*MHbp~lM>RlMbMMM MSDS|SM C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$ C$C$ C$C$C$C$C$ C$C$C$ C$ C$C$C$C$C$C$C$C$C$C$ C$C$  C$C$C$C$C$ C$C$C$  C$ C$C$C$ C$C$ C$ C$C$C$ C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$ C$ C$ C$C$C$C$C$ C$C$ C$C$C$C$C$C$ C$ C$C$C$C$C$C$ C$C$C$ C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$C$C$ C$ C$C$C$C$C$C$ C$C$C$C$ C$LC$ C$C$C$C$ C$ C$LC$C$ C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$C$ C$ C$C$C$C$ C$C$C$C$C$ C$C$C$ C$C$C$C$C$C$ C$C$C$ C$ C$ r C$C$C$C$C$ rC$C$LC$BrC$ C$ C$ C$rC$C$C$rC$ C$ C$ C$ C$C$C$C$ C$ C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$ C$C$C$C$C$ C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$ C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$ C$C$C$C$C$C$C$C$C$C$C$C$C$BC$rC$C$2C$C$C$22C$C$C$C$2 C$2C$2 C$2C$2C$2C$C$C$ 2C$C$2C$2C$ 2C$C$2 2C$22C$2C$ C$2222C$2C$2C$ C$2C$ n,*>dT8|"fjPj2Fr0   Pidiii p6>e"jj(j<     V `_F tkBkbkj      BVdlhl~(d"  f <$MgMMgJMgjMggMp~ Ph  hF hhM e" jeDeLeZ_Mbb b\\RXc  " 0 h v  dZd(d M_X0`$`2`:`B`J`R`Zh`a"0NOOKPHKK$$QQQQQQKKKM MC$2C$22C$3]]]MR"MNM]b]vR*]2^:N^N^VM^^^fM^t^V^^M^M__*_F_N_\MV_dMM___5r 8FMYdMpxM 2[@MlM[dz[ $JVMd\~MM4M]tM^MMVWMWVWvMNW~M 8VVVMVMMM6MHbp~(MJ^xM\MRMM MSDS|SM M_|MRnMRM 2C$2C$2C$2C$2C$C$2C$2C$C$2C$2C$C$C$2C$22C$2C$2C$C$2C$2C$22C$2C$ C$2C$ C$2C$2C$C$C$2 2C$C$C$2 C$2 C$C$222C$2C$C$C$2C$2C$C$2C$222 C$C$222C$2  C$2C$C$C$22C$C$ 2C$2C$C$22 2 C$2 2C$C$ C$C$2 C$ C$2C$2C$C$ 2C$C$C$C$C$C$C$222C$2C$2C$C$2C$2C$2C$2C$22 C$C$2C$22C$C$C$2C$22C$ C$ 2C$2 C$C$C$2C$C$2C$C$ C$2C$ C$C$C$C$2C$2C$222 C$ C$C$2C$2C$2C$222C$C$ C$2C$22C$ C$C$2C$2C$2C$2C$ C$C$2C$2C$2C$222C$C$C$C$2C$2C$2C$C$C$2 C$C$2C$C$2C$C$2C$ 2C$2C$2C$C$C$2C$2C$2C$2C$C$C$2C$C$2C$ C$ C$C$2C$C$C$C$ 2C$C$C$2C$C$C$2C$ C$C$LC$C$ C$C$C$2C$C$C$C$C$C$2C$2C$2C$22C$ C$ C$2222C$2C$22 2C$ C$2C$C$22C$ 2C$C$22 2C$ C$C$2C$2C$22C$C$ C$222222C$C$2 2C$2 2C$C$r C$C$C$C$2C$C$C$C$2C$C$r2C$C$C$LC$B2rC$C$ C$ C$ C$r2C$2C$C$C$2C$C$2C$C$C$2C$C$C$C$22 C$22222222C$C$2C$ C$ C$C$2C$2 C$2C$C$C$2C$22C$C$2C$C$ C$ C$ C$2C$C$22C$C$2C$2C$2C$2C$2C$2C$C$ C$22C$22 2C$ C$ C$2C$222C$2C$ C$C$2C$2222C$2C$2 2222C$ C$ C$ C$2C$C$C$2222222 C$ 2C$C$C$C$C$C$C$222C$2C$2C$2C$C$2C$C$C$2C$ C$2C$C$C$C$C$C$C$C$2C$22C$2C$2C$2C$C$C$22C$ C$2C$C$C$C$C$C$C$22 2C$2C$2C$C$2C$C$2 C$2C$C$C$C$C$C$C$222C$2C$2C$2C$C$2C$C$C$ C$2C$C$C$C$C$C$C$C$C$C$2 2C$2C$2C$C$2C$C$2C$C$C$C$2 2C$2C$2C$2C$C$2C$C$ 2C$C$C$C$C$C$222C$2C$2C$2C$C$2C$C$C$2C$C$22 2C$2C$2C$2C$2C$C$C$2C$C$C$C$C$ C$2C$2C$2C$2C$C$2C$C$22C$2C$C$ C$C$C$C$C$C$22C$C$C$2C$C$C$ C$2C$C$2C$2C$22C$C$22C$22C$2C$C$2C$BLLLL LLLLLLLL LL LLLL LLLLLL L######$#$6#$\#$|$$$$#$$%%%:%H%P%^%f%%%!"","@"T"\"j"~""""""""####&#@#N#V#d#!!P!v!~!!!!!!R!!! ! 8   " < J R ` t |   RZ>vV^0LL x%*DR`t.6>L.ft|  .HVj.<PXft&LLLLLLLLLLLLLLLLLLLL LLL L L L   LLLLLLLLLL LLLL  LLL LLLLL L L L L LLLLLLLLLLLLL LLLLLL LL LL LLLLLL LLLLLL LLL LL LLLLLLLLLLLL LLLLLLLL LLLL LLL L L L  LL LLL LLLLL LLL LLL LLLL LLLL LLLLLLL LL L LL LLBL  LLLLL L LLLLLLLLL  LL LLLLL  LLLLL LLLL LLLLL LLLLL LLLLLLLLLLL LLLLLLLL LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLB     4F4r4z444444454505P5^5x545/5555566826@6H6b2223"T"\3&3:3H3P3^3~3"333333344 44878 !v2!!!!!212!!7286166 667777$787@7Z7t7 16j1Z11++,,---6)F)`%%%B..4F4r4z444444454505P5^5x545/555556666@6H6b2223"T33&3:3H3P3^3~3"333333344 4482&2d2212212 1p11J11111++,,---6 00000011%.X.f.z......///./Z/b/v/~///v//////00000D0R0`0n000.P. ..&.4.B--------V-v-~--                     .. ..L . .L ..... . ... .LB. . ....   .        .         LB  B0000 00000000 00 0000 000000 0F2F^FfFnFFFFFFFFGG<GJGdGrFGrB0GGGGGGGHH&H.HH=V=^=="TF*=====>>"> >(><>J>R>`>z>>>>EF=(=0=8=@=HE<8DDDD DEE"E0E8EFEZEbE|EED,D4DT=8DzDDD>???@@&@L;;8l8l8t00B0=V=^=="T=====>>"> >(><>J>R>`>z>>>><==(=0=8=@=H<< 000LB0  000000 0 000000000 00 00000  000000000 00000 00000 0000000 CCCCDDDD%AnA|AAAAAABB(B0BDBpBxBBBBBBBBBBCCC2CFCZChCvCCCCAfA A.A<AJAXA @@@@@@@l@@@@00000000000000000000 000 00 0 0000000000 0000  000 00000 0 0 0 00000000000000 000000 00 00 000000 000000 000 00000000000000 000000000000 000L  L  00000 00000000 000 0000 0000 0000000 00 0 00LB0  000000 00000000 000000000000000000000000000000000000000000000000000000000000000000bstr-0.2.17/src/unicode/fsm/sentence_break_fwd.littleendian.dfa000064400000000000000000004446170072674642500226760ustar 00000000000000rust-regex-automata-sparse-dfar  !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~5  !!"-../>??@@AZ[`az{BDL2L0hd(d(ĬެVd(jd(~ެެέެ@ƄFw>wv>wv>w`vvv v B& !!""'),-..::??[[]]{{}}BD L 4NV^x4(6G  !!""#&')*+,-..//0:;>??@@[[\\]]^`az{{||}}~$CrBrD$Cr $C$CL$C$C $C$C$C$C$CwwxByyZz{EEj{{ ||B0}}~V (Z>JK,KK8LL8  !!"-../>??@@AZ[`az{BDLL (hd(d(Ĭd(l(Vd((d(~(((0)ެP)))))**+H,>wb,>wv>w -P-v- v !!,-..::??BD L ;l8t8|888888899$9>9;<<:@;r;;& !!""'),-..::??[[]]{{}}BD ..L ....n)%%%%&&6&D&d&~&&&)J**>+f(L++>)& !!""'),-..::??[[]]{{}}BD L %%%%%&&6&D&d&~&&&&d'(X(f(( )>) !!,-..::??BD L X8l8t8|888888899$9>99:::@;r;;>  !!"+,-../9::;>??@@[`az{$C B D$C $CL$C$C $C$C$CwwxByyZz{EEj{{ ||L0}}~(` R&HJKKKLtL& !!""'),-..::??[[]]{{}} B D   ḑ4<D^ª& !!""'),-..::??[[]]{{}} B D   Է.6.Ht !!,-..::?? B D   6JRZbvvP|;  !!"-../>??@@[`az{$C B D$C .$C.$C.$C$C$CwwxByyZz{EE,4j{{ ||f0}}Bz .Z>JKKKL|LD  !!""#&')*-../>??@@[[\\]]^`az{{||}}~$C B D$C . $C $C.$C.$C $C $C $C $C\wwxByyZz{EEJRj{{ ||0}}`2jN\xVJKKK&LLG  !!""#&')*+,-../9::;>??@@[[\\]]^`az{{||}}~$CrBrD$Cr $C $CL$C$C $C $C $C $C $CwwxByyZz{EE~~j{{ ||.0}}~ BzƀP4JKKK"LL" !"')..??[[]]{{}}BD........l.6>R~ !!..??BD...  , L f n v  F       E  !!""#&')*+,-..//0:;>??@@AZ[[\\]]^`az{{||}}~$CrBrD$Cr $C$CL$C$C $C$C$C$C$C(EEd(EEd(EEEE.FlFd(Fd(FFFF2GjGGGGGH6HHJJJK KKKK.LLLLG  !!""#&')*+,-..//0:;>??@@[[\\]]^`az{{||}}~$CPBPD$CP$C$C$C$C$C$C$C$C$CTwwwxByyZz{EE0{8{j{{ ||z|0}}~~~:`ȀJK؂KKvLdLG  !!""#&')*+,-../9::;>??@@[[\\]]^`az{{||}}~$CPBPD$CP$C$C$C$C$C$C$C$C$CwwxByyZz{EELTj{{ ||0}}~<bү>LZNp,JK^KKjLLG  !!""#&')*+,-..//0:;>??@@[[\\]]^`az{{||}}~$CPBPD$CP$C$C$C$C$C$C$C$C$CwwxByyZz{EEJRj{{ ||0}}~:`.N\j^<JKnKK zLL>  !!"+,-../9::;>??@@[`az{$CPBPD$CP$C$C$C$C$C$CvGwwxByyZz{EEGGj{{ ||H0}}~HHILI`IIIIIJJKjLJKLKK:MML(NLG  !!""#&')*+,-../9::;>??@@[[\\]]^`az{{||}}~$CrBrD$Cr $C$CL$C$C $C$C$C$C$C~wwxByyZz{EE~~j{{ ||.0}}~ Bzƀ4ԃJKKKLL>  !!"+,-../9::;>??@@[`az{$CrBrD$Cr $CL$C$C $C$C$CwwxByyZz{EEj{{ ||L0}}~(` R6JKKKLtL LLLLLLLLLLLLLLLLLLLLLLLLLLLL2L L2LLLLLLLLLLLLL2BLLLLLLLLLLLLLLLLLLLL BLL LLL >X?ެl?z?????6@?@?@@ABAVA?VAF4AAA*BJB^B~BBBBC)<&<v<%ެ<!&ެ"&ެ<<=(=B=ެn=ެ=j&ެ===>>$>V>p>>( &>d(>d(&&J'p'h;;$;2$$$ެ$ %ެ%.%d(;h%8ެ!%ެ4;ެT;ެެh"ެv" 9"!F9"d(""r9 #999:*:>:d:x:::4#d(;ެ ެR8ެx88ެ88H"8 9 ..>w/>w/>w&0>w:00D>w>w->w-d(ެd(-ެެ|7ެ7ެ7ެd(x!4!ެ7!ެ7ެ! 8ެ8ެ28ެ223(3N3h33334ެ 24F4f4ެ4444ެ 5&54X5l5ެ5ެ5ެ!"!55ެ 686d66ެ6 6ެ6767ެb7ެެ2ެ ެެ^2x2ެd( ެ222ެ>2ެެ11ެ1ެd( (DXd(.Nnެ112ެ 1ެH1ެ\1ެp1d()ެ  LLLLLLLL LLLLLLLLLLLLLLL LL LLLL LLL LLLL BL L LLLLLLLLLL LLLL        3  "-/>@@[`az{$C$C$C$C$C$C$C$C^hwwxByyZz{EEMhj{{ ||h0}}riiiiiiiiijjkllJKlKKZFZrZzZMZZrZZ[[[MH[MV[d[x[[[[\0\V\VMp\\\M\\(]MZ]Mt]MXM XXMWMMWMMVWM"WBWVWvWMd(~WMWWWVVVMVVVMVVMMT0TMVTMd(jT~TTT8Ud(dUxUUUUVM$V8VRVMRMRRMRMSd($SMDS|SSM M2R@R`RMnRMRM R"R*RRRMM RMRM$C$C$C$C$C$C$C$C$C$C$C$C$C$C $C$C$C$C$C$C$C$C$C$C$C$C$C$C $C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C $C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C $C$C $C$C$C$C$C$C$C$C$C$C$C$C $C$C$C$C$C$C$C $C$C$C $C $C $C$C$C$C$C $C$C$C$C$C$C$C$C$C$C$C$C$C$C $C$C$C  $C$C$C$C$C$C $C$C$C  $C $C$C $C$C $C $C$C$C$C$C$C $C$C$C $C$C$C$C$C$C$C$C$C$C$C$C$C$C$C $C$C$C$C$C$C$C$C $C $C $C$C$C$C$C$C$C$C $C$C$C$C $C$C$C$C$C$C $C $C$C$C$C$C$C$C$C$C $C$C$C $C$C$C $C$C$C$C$C $C$C$C$C$C$C$C$C$C$C$C$C$C$C $C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C $C$C$C$C$C$C$C $C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C $C$C$C$C$C $C$C$C$C$C$C$C$C$C$C$C$C$C$C $C$C$C$C$C$C$CL$C $C$C$C$C$C $C$CL$C$C $C$C$C $C$C$C $C$C$C$C$C$C$C$C$C$C$C $C $C$C$C$C $C $C$C$C$C$C$C$C$C$C $C $C $C$C$C$C $C$C $C $C$C$C$C$C$C $C$C$C $C$C$C$C$C$C$C$C$C $C$Cr $C$C$C$C$C$C$C$C$C$C$C$C$C$C$Cr$C$C$CL$CBr$C$C $C $C $Cr$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C $C$C$C $C $C$C$C $C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C $C $C$C$C$C$C r$C$C$C$C$C $C$C$C$C$C$C$C$C$C $C $C $C$C$C$C$C$C$C$C $C$C$C$C$C$C$C$C$C$C $C$C $C $C $C$C$C$C $C$C $C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C $C $C $C $C$C$C$C$C$C $C $C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C $C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C $C$C$C$C$C$C$C$C $C$C$C$C$C$C $C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C $C$C$C$C$C$C$C$C$C$C$C $C$C$C$C$C$C$C$C$C$C $C$C$C$C$C$C$C $C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C $C$C$C$C$C$C$C$C$C$C$C$C $C$C$C$C$C$C$C$C$C$C$C $C$C$C$C$C$C$C$C$C$C$C$C $C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C ެެެd( ެ ެ ( 6 ެD ެX ެ ެ ެ ެ ެ!ެ!"!ެ0!ެP!d!ެ ެެ ެd(x!4!ެ!ެ!ެ!ެ>w>w uu>wt>w(M>wDDD>wެ!ެ!ެ"H"ެެެh"ެv"""""d(""ެ #ެv"4#ެH#ެ\#ެp#4#d(#ެެ#ެ$CB$CP$C$CL$C$C$C$C$CLL$C$C$C$C$C$C$C$C$C$CL$C$C$C$C$CL$CL $CL$CL$CL$C$C$C L$C$CL$CL$CL$C$CL L$CLL$CL$C$CLLLL$CL$CL$C$CL$C ~¤,n֤\d6ܧ FΨ$D $>v,0Пdiiii 6>"eR`"eR`M$C$C$C3]]]M"RMVM]j]~*R]2^NN^V^M^^f^Mt^^V^^M^M*_F_N_\_MVԓd_MM___5ܻ$DXּMTMؽMؽ4NM\M|Mо[6\VMvMM<Mt]MȻMMVM,L`vWMNVMtVVVMVMM2MXMHbp~ȋlMMMԹMM MDS|SSMM_|MM $C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C $C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C $C$C$C$C$C$C$C$C$C$C $C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C $C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C $C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C $C$C$C$C$C$C$C$C$C$C$C$C$C$C $C$C$C$C$C$C $C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$CP$C$C$C$C$C$C$C$C$C$CP$C$C$C$CBP$C$C$C $C$CP$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C $C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C $C$C$C$C$C$C$C$C $C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C $C$C$C$C$C$C$C$C$C$C $C$C$C$C$C$C$C $C$C$C$C$C$C$C$C $C$C$C$C$C $C$C$C$C$C$C$C$C $C$C$C$C$C$C$C $C$C$C$C$C$C$C$C$C$C$C $C$C$C$C$C$C$C$C$C $C$C$C$C$C$C $C$C$C$C$C$C $C$C$C$C$C$C$C$C$C $C$C$C$C$C$C$C$C$C$C$C $C$C$C$C$C$C$C$C$C $C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$CB$CP$C$C $C$C$C  $C$C$C$C $C $C $C $C $C $C$C$C $C$C $C $C $C$C  $C $C $C$C  $C $C $C$C $C C,nC DtlDD E$bEEELFFfFG,G,X0xA,BdiiiiLB6>"eR`"e Fft|T\r "6DRlzp.6>FN"<JR`t|z R 2 : l t     $,4:T\dl%(6\^v~ (6>Rlpn|Z $2@     ........................................B.................. $CB$C $C$C $C$C$C  $C$C$C$C .$C $C $C $C $C $C$C$C $C $C $C. $C . $C $C $C$C.$C  $C $C$C $C.$C $C D11,n11112x223\333(4x4445R5555&6j66666(7<7h70...diiii/6>"eJ/'X'~'VM'j'M' (*(MP(Mt]M#MMVMp##vWMN"M###VVVMVJ#MM""M"MHbp~ȋlM""#M"MF"ԹMZ"Mh" |"MDS|SSMM_|M $C $C $C $C $C$C $C $C$C $C $C$C $C  $C $C $C$C $C $C  $C $C $C $C.$C $C $C$C$C . $C$C$C .$C .$C$C  $C $C$C$C $C $C$C $C  .$C  $C ..$C $C$C $C$C. $C $C$C .$C . $C$C$C.$C$C .$C.$C $C $C $C$C$C$C$C$C$C  $C $C $C$C $C $C $C $C  .$C $C  $C$C $C  $C.$C. $C .$C$C $C$C $C.$C $C $C$C$C$C $C $C  .$C.$C $C $C $C $C$C.$C $C  $C.$C$C $C $C $C $C $C$C $C $C $C  $C$C$C $C $C $C$C .$C$C $C$C $C$C $C $C $C $C$C$C $C $C $C $C$C $C$C $C.$C $C$C $C$C$C$C$C $C $C$C.$C.$C$C$C $C.$C.$C.$C$C$C$C.$C $C$C$C$C$C$C $C $C $C  $C.$C.$C $C $C  $C.$C $C$C $C $C. $C$C  $C.$C$C $C$C $C $C.$C   $C$C . $C . $C $C.$C$C $C$C$C$C $C$C $C.$CB $C.$C $C.$C $C $C$C$C$C$C $C$C $C$C$C $C$C$C$C $C.$C.$C $C.$C  $C $C$C $C.$C.$C $C$C $C .$C $C$C$C $C  $C$C $C$C.$C.$C $C $C$C $C $C $C $C $C $C.$C  $C  $C.$C.$C $C $C $C $C.$C$C $C $C $C $C.$C.$C.$C $C$C$C   .$C $C$C$C$C$C$C$C  $C $C $C $C$C $C$C$C $C $C $C$C$C$C$C$C$C$C $C $C $C $C $C$C$C $C $C $C$C$C$C$C$C$C $C $C $C$C $C $C $C$C$C$C$C$C$C $C $C $C $C$C $C$C $C $C$C$C$C$C$C$C$C$C$C $C $C $C$C $C $C$C$C$C $C $C $C $C$C $C $C$C$C$C$C$C $C $C $C $C$C $C$C $C$C $C $C $C $C $C$C $C$C$C$C$C $C $C $C $C $C $C$C $C $C $C$C$C$C$C$C $C$C$C $C$C $C $C $C $C $C $C$C  $C $C $C$C $C3]]]M"RM0M]D]X*R]2^:NN^V^M^^f^Mt^^V^^M^Ml*_F_N_\_MVԓd_:?MM_T?__5><d<<<<F<:=Ml=<M==M==6[$>MdMd[rP>[>>VMP xM>>M ?Mt]MMVڌM <*<.vWMNVM6JdVVVMV;MMM&MHbp~ȋ(M:N;M^MRMx;M ;MDS|SSM $C $C$C $C $C$C $C $C$C$C  $C$C$C $C $C $C$C  $C $C $C$C$C $C$C $C $C$C $C  $C$C$C$C $C$C $C $C$C $C$C $C $C$C$C $C$C $C $C$C$C$C $C$C$C $C$C$C$C$C $C  $C$C $C$C$C$C $C $C $C$C$C $C $C$C $C$C $C$C$C $C$C $C$C$C $C$C $C$C  $C $C$C$C $C $C$C$C  $C$C  $C  $C$CP$C$C$C$C $C$C$CP $C$C$C$CB P$C$C$C $C$CP $C $C$C$C   $C$C $C$C$C$C $C $C $C$C $C$C $C$C$C $C$C$C$C$C $C$C$C $C  $C$C $C$C$C$C $C $C $C $C $C$C $C$C$C $C $C $C $C $C$C$C $C $C $C $C$C $C$C  $C $C $C $C$C $C$C$C $C $C $C$C $C$C $C $C $C $C$C $C$C $C $C $C $C$C $C$C$C $C $C $C $C $C$C $C $C $C $C $C$C $C $C$C$C$C $C$C$C $C $C$C $C $C  $CB$CP$C$CP$C$C$CPP$C$C$C$CP$CP$CP $CP$CP$CP$C$C$C P$C$CP$CP$CP$CP P$CPP$CP$C$C$CPPPP$CP$C$CP$C$CP$C &bjb,n~bbbbbZccc>d|dd eZeeee4f~fffgLgfgtggg hhJh0___diiii_6>"eR `XM^XXXMXMt]MTMMVMST`vWMNVM0TDT^TVVVMVSMMS6SM\SMHbp~ȋlMpSSSMRMRԹMRMR SMDS|SSM P$CP$CP$CP$CP$C$CP$CP$C$CP$CP$C$CP$CPP$CP$CP$C$CP$CP$CPP$CP$C $CP$C$CP$CP$C$C$CPP$C$C$CP$CP$C$CPPP$CP$C$C$CP$CP$C$CP$CPPP$CPPP$CP$CP$C$CPP$C$CP$CP$C$CPP P$CPP$C$C$C$C$CP$C$CP$CP$C P$C$C$C$C$C$C$CPPP$CP$CP$C$CP$CP$CP$CP$CPP$CP$CPP$C$CP$CPP$C$CP$CP$C$CP$C$CP$C$CP$C $C$C$C$CP$CP$CPPP$C$CP$CP$CP$CPPP$C$C$CP$CPP$C$C$CP$CP$CP$CP$C $C$CP$CP$CP$CPPP$C$C$CP$CP$CP$C$CP$C$CP$C$CP$C$CP$C P$CP$CP$C$C$CP$CP$CP$CP$C$CP$C$CP$C$C $C$CP$C$C$C$C P$C$CP$C$C $C$C$C$C$C$CP $C$C$C$C$C$C$C$C$C$CP$C$C$C$C$C$CP$CP$CP$CPP$C$C$CPPPP$CP$CPPP$C$CP$C$CP$CP$CP$C$CPPP$C$C$CP$C$CP$CPP$C$CPPPPPP$C$CPP$CPP$C P$C$CP$C$C$C$CP$C$C P$C$C$CBP$C$C $C$CP$CP$C$C$CP$C$CP$C$C$CP$C$C$C$CP$C$C$CP$C$CPPPPPPP$CP$C$CP$C$C$CP$C$CP$CP$CP$C$C$CP$CPP$C$CP$C $C$C$CP$CPP$C$CP$CP$CP$CP$CP$CP$C$CPP$CPPP$C$C$CP$CPP$CP$CP$C$C$CP$CPPPP$CP$CP PPPP$C$C$C$CP$C$C$CPPPPPPP$C P$C$C$C$C$C$C$CPPP$CP$CP$CP$C$CP$C$C$CP$C $CP$C$C$C$C$C$C$C$CP$CP P$CP$CP$CP$C$C$CPP$C $CP$C$C$C$C$C$C$CPP P$CP$CP$C$CP$CP $CP$C$C$C$C$C$C$CPP P$CP$CP$CP$C$CP$C$C $CP$C$C$C$C$C$C$C$C$C$CP P$CP$CP$C$CP$CP$C$C$C$CP P$CP$CP$CP$C$CP$C P$C$C$C$C$C$CPP P$CP$CP$CP$C$CP$C$CP$C$CPP P$CP$CP$CP$CP$C$CP$C$C$C$C$C $CP$CP$CP$CP$CP$C$CPP$CP$C $C$C$C$C$C$CPP$C$C$CP$C$C $CP$CP$CP$CP$CP$C$CPP$CPP$CP$C$CP$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C n{{,n{{{{|6|h|||s||,}:}`}t}}}}}W~W(~`~t~|~~M0yy zdiiiiz6>"e.z6zƟ0hҠ<&>R~0Ιdiiii6>"ej(jƟ0hҠ<>R~0Ιdiiii6>"ejFT\v~.BJXlz̴Դܴ.<DRZz>dlt8ij̲Ʊڱ*8@Nbj Ḭ̇԰8@r$\<Dv~Ĭ ^fnv%*8FZḫ$2LZbpxί.<Pʭحt"6>LZԬ ̬                                                                                                       B                                                                 B .  .....  .... .... .ʿ<bn @NVd&JLBJ`t̴$LZRp¾dtij4 .V,:Nh h̰ҽ@r޺0(0Ĭ  BJRĽ%μܼ"fn0D^ l (zPdp*2@V. ... .. . . ....  . ... . . . .. .. .. . ..  ... . . . . . . ... . ... ... .. .  . . B.  ..  ...... ..   . .. .. .... ... .... .B               "Hn$,LZbpx.6\pB̴*8R`hRvZ   .BJd~ 4Zb,JJR %N\p$PXlt|l&:HVd~F*8Llt|                                                                                           B                                                      $CB$Cr$C$C$C$C$C$C$C$C$C $C$C $C$C$C$C$C$C $C$C$C$C $C $C$C$C$C $C$C$C$C$C $C$C t,nLHXJ*VXl0diiii46>"ejf"ejJ/RlMbMԹMM MDS|SSM $C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C $C$C $C$C$C$C$C $C$C$C $C $C$C$C$C$C$C$C$C$C$C $C$C  $C$C$C$C$C $C$C$C  $C $C$C$C $C$C $C $C$C$C $C$C$C$C$C$C$C$C$C$C$C$C$C$C$C $C$C$C$C$C$C $C $C $C$C$C$C$C $C$C $C$C$C$C$C$C $C $C$C$C$C$C$C $C$C$C $C$C$C$C$C$C $C$C$C$C$C$C$C$C$C$C$C$C $C$C$C$C$C$C$C $C$C$C$C$C$C$C$C$C$C$C$C$C $C $C$C$C$C$C$C $C$C$C$C $CL$C $C$C$C$C $C $CL$C$C $C$C$C$C $C$C$C$C$C$C$C$C$C$C$C$C $C $C$C$C$C $C$C$C$C$C $C$C$C $C$C$C$C$C$C $C$C$C $C $C r $C$C$C$C$C r$C$CL$CBr$C $C $C $Cr$C$C$Cr$C $C $C $C $C$C$C$C $C $C$C$C$C$C$C$C$C$C$C$C$C$C$C$C$C $C$C$C$C$C$C$C$C$C$C $C$C$C$C$C$C$C $C$C$C$C$C$C$C$C $C$C$C$C$C $C$C$C$C$C$C$C$C $C$C$C$C$C$C$C $C$C$C$C$C$C$C$C$C$C$C $C$C$C$C$C$C$C$C$C $C$C$C$C$C$C $C$C$C$C$C$C $C$C$C$C$C$C$C$C$C $C$C$C$C$C$C$C$C$C$C$C $C$C$C$C$C$C$C$C$C $C$C$C$C$C$C$C$C$C$C$C $C$C$C$C$C$C$C$C$C$C$C$C$CB$Cr$C$C2$C$C$C22$C$C$C$C2 $C2$C2 $C2$C2$C2$C$C$C 2$C$C2$C2$C 2$C$C2 2$C22$C2$C $C2222$C2$C2$C $C2$C ,n*>dT8|"fjPj2Fr0  P diiiip 6>"ej(jvV^0LL x%*DR`t.6>L.ft|  .HVj.<PXft&LLLLLLLLLLLLLLLLLLLL LLL L L L   LLLLLLLLLL LLLL  LLL LLLLL L L L L LLLLLLLLLLLLL LLLLLL LL LL LLLLLL LLLLLL LLL LL LLLLLLLLLLLL LLLLLLLL LLLL LLL L L L  LL LLL LLLLL LLL LLL LLLL LLLL LLLLLLL LL L LL LLBL  LLLLL L LLLLLLLLL  LL LLLLL  LLLLL LLLL LLLLL LLLLL LLLLLLLLLLL LLLLLLLL LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLB     F4r4z444444445405P5^5x5545/555556628@6H6b62223T"\"&3:3H3P3^3~33"33333334 4̰4847 8v!2!!!!!212!!7286166 66777$787@7Z7t77 1j61Z11++,,--6-F)`)%%%B..F4r4z444444445405P5^5x5545/55555666@6H6b62223T"3&3:3H3P3^3~33"33333334 4̰484&2d22212212 1p1J111111++,,--6- 00000011%X.f.z.......//./Z/b/v/~///v///////0000D0R0`0n0000P. ..&.4.B.-------V-v-~---                     .. ..L . .L ..... . ... .LB. . ....   .        .         LB  B0000 00000000 00 0000 000000 02F^FfFnFFFFFFFFFG>" >(><>J>R>`>z>>>̰>>EF(=0=8=@=H=E<8DDDD DE"E0E8EFEZEbE|EEE,D4DTD8=zDDDD>???@&@L@;;l8l8t800B0V=^===T"=====>>" >(><>J>R>`>z>>>̰>><=(=0=8=@=H=<< 000LB0  000000 0 000000000 00 00000  000000000 00000 00000 0000000 CCCCDDDD%nA|AAAAAAAB(B0BDBpBxBBBBBBBBBBBCC2CFCZChCvCCCCCfA A.A = { #[repr(C)] struct Aligned { _align: [u8; 0], bytes: B, } static ALIGNED: &'static Aligned<[u8]> = &Aligned { _align: [], bytes: *include_bytes!("sentence_break_fwd.bigendian.dfa"), }; unsafe { ::regex_automata::SparseDFA::from_bytes(&ALIGNED.bytes) } }; } #[cfg(target_endian = "little")] lazy_static::lazy_static! { pub static ref SENTENCE_BREAK_FWD: ::regex_automata::SparseDFA<&'static [u8], u32> = { #[repr(C)] struct Aligned { _align: [u8; 0], bytes: B, } static ALIGNED: &'static Aligned<[u8]> = &Aligned { _align: [], bytes: *include_bytes!("sentence_break_fwd.littleendian.dfa"), }; unsafe { ::regex_automata::SparseDFA::from_bytes(&ALIGNED.bytes) } }; } bstr-0.2.17/src/unicode/fsm/simple_word_fwd.bigendian.dfa000064400000000000000000000214170072674642500215030ustar 00000000000000rust-regex-automata-sparse-dfa*  !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~0/09:@AZ[^__``az{*Hr<FT^x<JPZJ>     Zdv< @Pv ( > t !2!\!!!0 :h*Df $BL fxr 2<FTbt &DRd t~T:(: TrTb:XHzT<.J &PJ  6J D RJ d4  *8JX j  *,>L  (5l 8J  8R\j  "8RXNbND  &  * 4 N X j |    J h v     * < F X j R   |    | r '                            bstr-0.2.17/src/unicode/fsm/simple_word_fwd.littleendian.dfa000064400000000000000000000214170072674642500222370ustar 00000000000000rust-regex-automata-sparse-dfa*  !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~0/09:@AZ[^__``az{*Hr<FT^x<JPZJ>     Zdv< @Pv( > t 2!\!!!!0 :h*Df $BL fxr 2<FTbt &DRd t~T:(: TrTb:XHzT<.J &PJ 6 JD R Jd 4  *8JX j * ,>L  (5l 8J  8R\j  "8RXNbND  &  * 4 N X j |    J h v    * < F X j R   |    | r  '                            bstr-0.2.17/src/unicode/fsm/simple_word_fwd.rs000064400000000000000000000022340072674642500174520ustar 00000000000000// DO NOT EDIT THIS FILE. IT WAS AUTOMATICALLY GENERATED BY: // // ucd-generate dfa --name SIMPLE_WORD_FWD --sparse --minimize --state-size 2 src/unicode/fsm/ \w // // ucd-generate 0.2.9 is available on crates.io. #[cfg(target_endian = "big")] lazy_static::lazy_static! { pub static ref SIMPLE_WORD_FWD: ::regex_automata::SparseDFA<&'static [u8], u16> = { #[repr(C)] struct Aligned { _align: [u8; 0], bytes: B, } static ALIGNED: &'static Aligned<[u8]> = &Aligned { _align: [], bytes: *include_bytes!("simple_word_fwd.bigendian.dfa"), }; unsafe { ::regex_automata::SparseDFA::from_bytes(&ALIGNED.bytes) } }; } #[cfg(target_endian = "little")] lazy_static::lazy_static! { pub static ref SIMPLE_WORD_FWD: ::regex_automata::SparseDFA<&'static [u8], u16> = { #[repr(C)] struct Aligned { _align: [u8; 0], bytes: B, } static ALIGNED: &'static Aligned<[u8]> = &Aligned { _align: [], bytes: *include_bytes!("simple_word_fwd.littleendian.dfa"), }; unsafe { ::regex_automata::SparseDFA::from_bytes(&ALIGNED.bytes) } }; } bstr-0.2.17/src/unicode/fsm/whitespace_anchored_fwd.bigendian.dfa000064400000000000000000000010740072674642500231530ustar 00000000000000rust-regex-automata-dfa4  NhNhbstr-0.2.17/src/unicode/fsm/whitespace_anchored_fwd.littleendian.dfa000064400000000000000000000010740072674642500237070ustar 00000000000000rust-regex-automata-dfa4  NhNhbstr-0.2.17/src/unicode/fsm/whitespace_anchored_fwd.rs000064400000000000000000000023310072674642500211230ustar 00000000000000// DO NOT EDIT THIS FILE. IT WAS AUTOMATICALLY GENERATED BY: // // ucd-generate dfa --name WHITESPACE_ANCHORED_FWD --anchored --classes --premultiply --minimize --state-size 1 src/unicode/fsm/ \s+ // // ucd-generate 0.2.9 is available on crates.io. #[cfg(target_endian = "big")] lazy_static::lazy_static! { pub static ref WHITESPACE_ANCHORED_FWD: ::regex_automata::DenseDFA<&'static [u8], u8> = { #[repr(C)] struct Aligned { _align: [u8; 0], bytes: B, } static ALIGNED: &'static Aligned<[u8]> = &Aligned { _align: [], bytes: *include_bytes!("whitespace_anchored_fwd.bigendian.dfa"), }; unsafe { ::regex_automata::DenseDFA::from_bytes(&ALIGNED.bytes) } }; } #[cfg(target_endian = "little")] lazy_static::lazy_static! { pub static ref WHITESPACE_ANCHORED_FWD: ::regex_automata::DenseDFA<&'static [u8], u8> = { #[repr(C)] struct Aligned { _align: [u8; 0], bytes: B, } static ALIGNED: &'static Aligned<[u8]> = &Aligned { _align: [], bytes: *include_bytes!("whitespace_anchored_fwd.littleendian.dfa"), }; unsafe { ::regex_automata::DenseDFA::from_bytes(&ALIGNED.bytes) } }; } bstr-0.2.17/src/unicode/fsm/whitespace_anchored_rev.bigendian.dfa000064400000000000000000000015640072674642500231730ustar 00000000000000rust-regex-automata-dfa4  NhhhhhNhhhhhbstr-0.2.17/src/unicode/fsm/whitespace_anchored_rev.littleendian.dfa000064400000000000000000000015640072674642500237270ustar 00000000000000rust-regex-automata-dfa4  NhhhhhNhhhhhbstr-0.2.17/src/unicode/fsm/whitespace_anchored_rev.rs000064400000000000000000000023510072674642500211410ustar 00000000000000// DO NOT EDIT THIS FILE. IT WAS AUTOMATICALLY GENERATED BY: // // ucd-generate dfa --name WHITESPACE_ANCHORED_REV --reverse --anchored --classes --premultiply --minimize --state-size 2 src/unicode/fsm/ \s+ // // ucd-generate 0.2.9 is available on crates.io. #[cfg(target_endian = "big")] lazy_static::lazy_static! { pub static ref WHITESPACE_ANCHORED_REV: ::regex_automata::DenseDFA<&'static [u16], u16> = { #[repr(C)] struct Aligned { _align: [u16; 0], bytes: B, } static ALIGNED: &'static Aligned<[u8]> = &Aligned { _align: [], bytes: *include_bytes!("whitespace_anchored_rev.bigendian.dfa"), }; unsafe { ::regex_automata::DenseDFA::from_bytes(&ALIGNED.bytes) } }; } #[cfg(target_endian = "little")] lazy_static::lazy_static! { pub static ref WHITESPACE_ANCHORED_REV: ::regex_automata::DenseDFA<&'static [u16], u16> = { #[repr(C)] struct Aligned { _align: [u16; 0], bytes: B, } static ALIGNED: &'static Aligned<[u8]> = &Aligned { _align: [], bytes: *include_bytes!("whitespace_anchored_rev.littleendian.dfa"), }; unsafe { ::regex_automata::DenseDFA::from_bytes(&ALIGNED.bytes) } }; } bstr-0.2.17/src/unicode/fsm/word_break_fwd.bigendian.dfa000064400000000000000000007005530072674642500213030ustar 00000000000000rust-regex-automata-sparse-dfa} J.  !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~ ~:hpxL  ~:hpx~B~~L)'',,..09;;AZ__azfff f az3aa 3a3BPaabb63bhbbbbccdefe9f0ff(''..09::AZ__az00 0 F3./V=/ThpxJLl"",4<DLZnDR)""''..09::AZ__az0 0  3nv1 3–3B1ª30tÔâöĞ6H9$ǘ#AZaz 3 3 3BP8Xr3ޒ2r9<)""''..09::AZ__az0 0 N3nv1 3–3B1ª30tÔâöĞź(H9Zǘ__V 4HPXf"jV.__ 4HPXf"V4 &''(-..//09::;@AZ[^__``az{440b40b4 0b4 4 4 4X3r33334334L4`X3QZ< =99=?*^?F/ /09:@AZ[^__``az{44 4 4 4 4h3r33hhh43i<34LiViiij3jVjjjkk>lm"m=m99=no ?*o`?F7 &''(+,,--..//09::;;<@AZ[^__``az{44444 44 4 4 4>3r33|h4334LiV@x3 2F`f=X99=p?*?F/ /09:@AZ[^__``az{4484848484lPdx(Ttf=`h=|?*@?F/ /09:@AZ[^__``az{444p4T4p40nLx 2F`Z|=L= x?*?F4 &''(-..//09::;@AZ[^__``az{440b40b4 0b4 4 4 43(3r3333434234L4`4455.35f55566N7828=999=9:6?*:?F7 &''(+,,--..//09::;;<@AZ[^__``az{44444 44 4 4 4 3r33  h43 >34LiV X   3 >     2 =99=?*T?F/ /09:@AZ[^__``az{445454454[88.86TJ[8|88.\&8.88\@\l\\8.]]p]]]]^_``=`>J>R=aZa?*b?F/ /09:@AZ[^__``az{4454546454788.863L8J8|88.88.889(9T998.9:X:~:::;<=H==>J>R=>f>?*?2?F1x  12h2|D!!!&!.!6!D!X!l!z!!!!!"##.#<###px  bD2\.R.Z.h.v.......// //"//0/8/@/H/P/d/r////////0.--`--. --------@-`-h-p-x !.+:+T+\+d+x+++++++, ,,,:,T,b+,b',,,,,,,,---8**$*D*X*`*n*|******'j'****++ +,))** (*))(() ))2)@)T)\)d)r)))))((((($$L% %(%H%P%v%#$ !!!&(v(~((((((#&&&&&&&&&' ''('T'\'j'r'z''j''''''''(((($(2(L(Z(n&&N%&V&d&r&@%%&&&&,%%%%%%..................................................................................................................................................................... @1WjWxWWWWWWW3LW3LWX(XTXnXvX10~1 1zWt z 23L3T3b3p3333L33L333L33L333343L44"3L3L3.,^r |~?&hJ)'',,..09;;AZ__azfff f g>3gXg` 3g3BPgggg3h,hphhhhijk*kJ9kl\l(''..09::AZ__az00 0 138.11 313B12262\2v322333(3N45,5596d67 %09AZ__azpTp ,:N\v:Zh|d,Z 4454454545454555455454545 4545454545545545545545454554544 444454544445 55454555454545554545545554 5554544 4Th.(Z0Jp N$t0F}8.}P}}}x~8~L~f8.~t~~ 0hVsX?$Pv8.8."ztVz{"s?{H{V?{d{x8.j{{|t? J?y8.y?J??8.vv 8.v4,v8.v@w0h|xy8.8.8.8.?pttt8.tttu8.&Lf?@BBPBd=B~=B=CCD===?N==?t3L?3L??44444p^p8.p?p?L?p`qzqBqV8.q|qqq?8.qq?rrDr^rlr?r?ss*sXsfs?8.ss?6?s\D>t$5nRf?k ^??@ZzmH??mmJn4ph?o@?f?2?pJ?8.D?8.R8.`?8.*??8.??8.hh?vhi?8.?8.In?I|?8.II??GG ?GF?8.GZGnGGH(8.HTHhHHHH?II(IB?D?DD?E?8.E.8.EH?EnEF? ?D>DLDl?Dz?D?4554545544454 444445454545455454455454545454545454545454'454545454545454545454545454545454545454 54545454544444444545545454545454545545 54545454545454545455455454545545455454554545545454444444I44I45454545444__JJJJJJKK&K4KNKbKjKrKL<LLMMpMM<JPFOOOPPP"OOOOOOIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIITIIIIIII__U8JJJJJKK&K4KNKbKjKrKL<ULUMMpVMIWWJTWjWxWWWWWWW3LW3LWX(XTXnXvXTRTVTT,T:WTBTJTRNN6O OO2OFOlVB3L3T3b3pV333LV3L333L33L333343L44"3L3L3IIITIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII 445445454545454554554544 444454544445 55454555454545554545545554 5554544 ~$\J,dXB D@`0b0|||}8.}P}}}~~8~L~f8.~t~~~~&4ZhsX?4hBTh(8.r8."z z0ztzz{"s?{H{V?{d{x8.{{{{|<|t? yF?y~yy8.yy?yTJy?8.vv 8.v4vZvvv8.vvww"w6wzwwwxxxRxxy8.y8.?t>ttt8.tttuuu@8.ufuzuuubcBdBd=e=ep=ef4f==b2=bXTJ?TJbr?4444p^p8.p?p?p?ppqq"qBqV8.q|qqq?8.qq?rrDr^rlr?r?rss*s8sXsfs?8.sss?s?stD>t$5iiijj>j^jjj?k kdkk?lTlbll?lllmm.mHmV?mv?mmmnn4nNnnnh?noo>?odoo?p0?pJ?8.i??8.hh?hhhi?8.?8.i,iLilhB?hP?8.Ihd??gg?g?8.GZGnGGH(8.HTHhHHHH?ggh?f?gD?g:?8.gT8.gn?EnEF? ?D>DLDl?f?D?4 44444545455454455454444444444444444445454445454544544545454545545454544545445445454544445454545 455454545 5445545454544 5454545454554544545454545454544454454445545454554545454454454545545444545445554554545454544554544545 5454545554445554545454545455444 54545444 545454545 5445454454454 5454545455454545444545544545545454554545454545545454554545454 5454545454554545445454545454554454545444545454545454545454545454544545454554545454545545454545454554455445545545444444545544545445454 45454545445445454555454455554454545 555544545 45454545554554545 455445454 5545444444 445454 45454545454554554454 454445444445455454444444 444454545444454545454545454545454545454545545455455455454545455454 5454545454 5454545445444 544444444545 545454545 54545454545 545454545454545455454554454554544554545455454545454 545454444454445455554554444544545444454545545445555545454555454545 445555445454554545454545454545454454545454545454 55554545445455455 55454554545454545454544544545455454444545454545454544444454545544454545454545454445454545445454545454545444454545454454545454545454545454 44454454 54545454544445454545454545454545444454545454 54545544545454545 445454545454444454444444544444454 544544445454454444454454545454454544545445454545454544445454545 455454545 544554545454454544545454545454544454454445545454554545454454454545544454545454545454455454 54545455544455545454545444 54545444 545454545 544545445444544545454454545445454545454454545454554545454446446454554454546454II54 4545454544545545454455554 5555445 45454545554554545 455445454 5545444444 44545454454 4544I4544I4II4I4I4I4I444646 64644454544545545454 5454545444 544444444545544545454 54545444445444545555455444544445454455454455555454 44555544 5555454545455 55454554545454545454544544545455454444545454545454544444454545544454545454545454445454545445454545454545444454545454454545454545454545454 44454454 54545454544445454545454545454545444454545454 54545544545454545 445454545454444454444444544444454 544544445454454444^ lt| (<h8^lP^Bt0 ^~6666*&4<DLZhv .6DR`&V4+&@fz8p~,:BV0J^r *8RZt4<DR >Lr @V^rzppppp666666p6ppp6p6p6p ppppppppppppppppppppppppppppppppp666666ppppppppppppppppppppppppppppppppppppppp666666688pppp66 44848484848484888488484848 484848484884884884884848488484848 48484848484848484884848 8484848484884884848484 <Jd~ڒڲ|Lܮ`Hޒ pB<\0&pՖP|ֶ֜ :`גנü&?:N\؂Nآ<0DdٺҠ 2Ӹ&??"<hԈ? ?"<Pj<?~Ғ?ϞϬ"<JЂЖа:fѰpτ?4BP^rΌβȬ,Nb=|====T=z??4848844*b?|?ʊ?ʞʲ4Tn˂?ː?˪6?P?d̴?&4B?\?͂͢<Ͱ52^rŞ?:Tư?8?^rnjüjDz??RlȆȬȺü?.?Tɘ?????Ĵ??Ħ??ü?>??L`zN?\?pÊ???&?:N4Ht”´?"??? ?4?N? ?<Jj?x??48848488448484 8484848484848484848484848484848484848484848484'484848484848484848484848484848484848484 84848484844848848484484848848484848484848848 848484848484848484884884848488484884848848488484884884844848484844848484848484848484844848484848484884848848448484848484848484848484884844848484848484848484848484848484 84848484848848484848484848484884848484848484848484848484848488484848484484848448848 84848484848488484484884848484484884848484884848848484 84848484848 84848484884848484484484 84848484884848448484884848488484848848484848488484848848484 84848484848848484484848484848 8484848484848484848484848484848484848484848484848848484 8484848484848484848844884884848484844848488484848484848 48484848484848848484484848484848484848848484 848484848848848484848484848484484848484848484 484848484848848848484 4848484844848848484884848484484848 84848484848484484484848484848484848484848484848848488488484848484884848 8484848484 848484848484484 848484848484484848 848484848 84848484848 8484848484848484884848848484884844884848488484848484848484844884848448484884884848448448484484848848488484848484848484848484848848488484848484848484848448484848484848484848448488488484884848484848484884848484848484848448484848484848484848848484848484844848484848484848 8484848484848484848484848484884848484848484844848484848484848484848 848484848484 84848484884848484848484 8484848484848848484848484848484848 8484848484848 484848484848484848484848448484484844848448484 848484848484884848848484484ppp6p6ppppp6p666ppp66pp6ppp6p6666p666p6p6p6pp6p6p66p66pppp66ppppppp6 6pppppp6p6 666p6p666p66p6pp66ppp666p66pp6p6p6ppp ppppp666p6p6ppp66pp6p6p6p66p6pp6ppp66ppp66ppp66666p6pppp666pp6pppppppppp6ppp6pppp6pppppppppppppppppppppppppppppp p666ppp66pppp6ppppppp6p6ppppp6ppppp6ppppppppppppppp66TTppp6pTp886ppppp6pppppppppp66pp6pp6ppp p6p6p6p66p6p666ppp6p6pp6p6p66p6p6p6p6666666p6p6ppppppppp66p68p68888pppp8886f66TTTT66ppp6 pppppppppppppppppppppp6ppppppp6ppppppppp6p%09AZ__azpTpF ,:N\v:Zh|dl4,xp6pp!BJ4WWWWWW3LW3LWX(XTXnXvX >^ W ^~3LZnV333L3L333L33L333343L44"3L3L3ppppp6p68p6f66TTTT66pp pppppppppppppp666666666ppppppppppppppppppppppppp6ppppppppppppp6p6p6pp66666pp6p6ppp66p6666666p66p66p6ppp6p6ppppp 66p6p6p6p66ppppppppppppppppppppp6p6p6p6p6pppp66p6p66p6p6p p6pppppp6p6 666p6pp6pp6 6ppppppp66 6666pp6p6 6pppppp6p6 666pp6p6 6pppppp6p6 6666pp6p 6pppppppppp6666p66ppppp66666pp6 p6ppppp6p6 6666pp6p6ppp6p6 666pp6p6p6ppppp66666666666p66666pp666p666 44p4T4p4p4p4p4pp4pTTp4p4p4p 4p4p4p4p4pp4pp4Tpp4pp4p4p4pp4p4T4T 4T4T4T4p4p4T4T4T4p pT44pTpp4pT4TpT4Tpp4p4TpTpTT4ppTp4 pTp4p4T4 'j''''((H()6)***+P+,D,,-.--.0../,/L/l///00N0!f!z!!"."r"""##*#D#R#f###$$$8 F$^@$$?$$$%%2%@%T%%%%%&&^&r&&'Rfz v? & 4? B V j   !!R? b?<<?~Ғ?.<Pv0>R"6n"6?&|(Nb0j= =l=0===?"?4T4TT44F~??? *>d??,FTt?? @Nh?v?? 5&F?Ll?<J|?|0>?^?r6n F?&?L??2?v??\??N?? F T? n   ??  N?\?  ??  ? ?   V j     @ ` ?   ?(?f??? h T? ????4pp4p4pp44T44 T4T4T4T4T4p4Tp44p4pT4p4T4pTp44p4p4p4p4p4p4p4p4p4'4p4p4p4p4p4p4p4p4p4p4p4p4p4p4p4p4p4p4p4 p4p4p4p4p44T4TT4T4T44T4p4pp4p4p4p4p4p4p4pp4p p4p4p4p4p4p4p4p4p4pp4pp4p4p4pp4p4pp4p4pp4p4pp4p4T4TT4T44T4T4T4p4p4p4p4T4T4p4p444p4T4pT4p44p4pp4p4TpTT4Tp44p4pT4T4p4p4T4p4p4p4TpTT44TTpT4T44T4p44TpT4 4pTp4pT4p4TpT Tp4T4Tp4p4p4pT4T4 p4p4p4p4p4ppT44T4p4p4p4p4p4p4TpTT4T4T4p4T4pT4T4T4pTTp44Tp4pTTp4p44pT4TT4pT4T4p44pTp444T44pT44pp4pp4p4p4p4pT4T4TpTpT444p4p pT4Tp4pT4pTpTpT44T4pTppT4p4p4pT4Tp44pp4T4T4 p4p4pT4T4T4T TpT44p4p4p pT4T4Tp444pT44p4 p4p4p4p4pp4p4p4p44T4p4pp4T4p4pp4pT4p4pp4p4p4p44pp4p4p4pp4p4p4p4 p4p4p4p4p4pp4p4p44p4p4p4p4p4ppT4T4Tp4p4p4T4T4p4p4p4pT4p4p4p4p4p4p4pT44p4T4p4p4p4ppT4p4p4p4p4pTp4p4p4p4p4p4pp44pp44pp4pp4p4T4T4T44T4p4pp4T44p4T4p488Tp4 4p4p4p4p4T4p4pp4p4p44p4p4pp4pT4T4ppTppT44p4p4p pTpTpTpT4T4p4TpT T44Tp4p4pTpT4ppT4p4TpT T4p4T44pT4 pTpT44T44T4T4T4T 4T4pT4pT4 4p4p4p4p4p4pp4ppT4T44 4p4T484p44T8484p4pp4p4T4T4T T4T4T4T4p4p4p44T44p4p4p4p4p4p4p4p4p4p4p4p4p4p4pp4p4pp4pp4pTp4p4p4p4pp4p4T p4p4p4p4p4 p4p4p4p4T4p44T4 4T4T4T4T4T44T4TT4p p4p4p4p4p p4p4p4p4p4p p4p4p4p4p4p4p4p4pp4p4pp4T4p4pp4p44pp4p4p4pp4p4p4pT4pT4 pT4p4p4T44TT4T444T44ppTpTp4pp4T4T44444pT44T4T4T44TT4TpTTp44T4TpTpppT4pT44pp4p4p4p 4T4TpTpTpTp4T4Tp4p4pp4p4p4p4p4p4p4p4p4p44p4p4p4p4p4p4p4 pTpTpTpT4pT4p44p4pp4TTpTpT TpTpT44ppT4p4p4p4p4p4p4TpTT4T4Tp4T4p4pT4p4p4T44T4p4p4p4p4p4p4p4T4TT4T4T4T4p4p4TpT44T4p4p4p4p4p4p4TpTT4T4T4p4pT44pT4T4p4p4p4p4p4p4TpTT4T4T4T4p4pT44p44Tp4p4p4p4p4p4p4p4p4p4T T4T4T4p4T44 Tp4p4p4p4pTT4T4T4T4p4pT44pT4p4p4p4p4p4TpTT4T4T4T4p4pT44p4 Tp4p4pTpTT4T4Tp4pT4pT44p 4T4p4p4p4p4p4T4T4T4T44T44T4T44T444T4T44T44 p4T44T4T4T4Tp4p4TT4TpT4T44T4#AZaz 3 3.3BPj3<J^~@P9B D^rYv? Vb?V<V3V<?~Ғ?3S.S<3SPSvSSS3SST0T>TRTTTTU"U6UnUUV"3V6333SS?PQ:QZQh3QvQQQQQ3R0RDRRR:<(=h=====>*=>\>?P==:=:?:?4p4pp44MM<3MV?Md?Mr?MMMMMM3N"NBN\Np?3N~N?NNOOO2?OL?O`OOOOP P&?3P4PBPP?Pp?PP?P5F^FrFFFGGBGVG?GH H*H?HII:IH?ItII:IIII?J?J0J>JXJJJK,KXK~E?KKK?L LTL?L?L?3F4?3FB3FP?3F??3F ??3EE?E,ELE`E?3?3EEEN?\?3DD??BPBj?B?3BBCC(Cr3CCCCDD>?D^DrD???@$@D?@^?3@x3@?@A&B? ?????????4  4 4  44p4 4 p4p4p4p4p4 4p 4 4 4 p 4 4p4 p 4 4 4 4 4 4 4 4 4 4 4'4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 44p4pp4p4p44p4 4  4 4 4 4 4 4 4  4 4 4 4 4 4 4 4 4 4  4  4 4 4  4 4  4 4  4 4  4 4 p4pp4p44p4p4p4 4 4 4 4p4p4 4 4 44 4p4 p4 4 4 4  4 4p pp4p 44 4 p4p4 4 4p4 4 4 4p pp4 4pp p4p44p4 4 4p p4 4 p 4 p4 4p p p 4p4p 4 4 4 p4p4 4 4 4 4 4  p4 4p4 4 4 4 4 4 4p pp4p4p4 4p4 p4p4p4 pp 4 4p 4 pp 4 4 4 p4pp4 p4p4 4 4 p 4 44p4 4 p44  4  4 4 4 4 p4p4p p p4 44 4 p4p 4 p4 p p p44p4 p  p4 4 4 p4p 4 4  4p4p4 4 4 p4p4p4p p p4 4 4 4 p4p4p 4 44 p44 4 4 4 4 4  4 4 4 44p4 4  4p4 4  4 p4 4  4 4 4 4 4  4 4 4  4 4 4 4 4 4 4 4 4  4 4 44 4 4 4 4 4  p4p4p 4 4 4p4p4 4 4 4 p4 4 4 4 4 4 4 p4 4 4p4 4 4 4  p4 4 4 4 4 p 4 4 4 4 4 4  44  44  4  4 4p40b4p4 4 4 40b40b4 4  4p 40b40b4 0b4 4 4 488p 4 4 4 4 4 4p4 4  4 4 44 4 4 4 p4p4  p  p44 4 4 p p p p4p4 4p p p4 4p 4 4 p p4  p4 4p p p4 4p4 4 p4 p p4 4p44p4p4p4p 4p4 p4 p4 4 4 4 4 4 4  4  p4p4 4 4 4p484 44p8484 4  4 4p40b40b40bp 4 4 4p4p4 4 4 44p44 4 4 4 4 4 4 4 4 4 4 4 4 4 4  4 4  4  4 p 4 4 4 4  4 4p 4 4 4 4 4 4 4 4 4p4 44p4 4p4p4p4p4p44p4p p4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4  4 4  4p4 4  4 44  4 4 4  4 4 4 p4 p4 p4 4 4p44pp4p4 44p4 4  p p 4  4p4p44 44 4 p44p4p4p 4 4pp4p pp 4 4p4p p  p4 p4 4 4 4 4 4p4p p p p 4p4p 4 4  4 4 4 4 4 4 4 4 4 44 4 4 4 4 4 4 4 p p p p4 p4 44 4  4pp p p p p p4 4  p4 4 4 4 4 4 4p pp4p4p 4p4 4 p4 4 4p44p4 4 4 4 4 4 4 4p4pp4p4p4p4 4 4 p p44p4 4 4 4 4 4 4p pp4p4p4 4 p4 4 p4p4 4 4 4 4 4 4p pp4p4p4p4 4 p4 4 44p 4 4 4 4 4 4 4 4 4 4p p4p4p4 4p4 4 p 4 4 4 4 pp4p4p4p4 4 p4 4 p4 4 4 4 4 4p pp4p4p4p4 4 p4 4 4 p 4 4 p pp4p4p 4 p4 p4 4 4p4 4 4 4 4 4p4p4p4p4 4p44p4p44p4 44p4p44p4 4 4p4 4p4p4p4p 4 4pp4p p4p44p4 44 4 4 4 4  4 4 4 4 4 4 4 4  4  4 4 4  4 4 4 4 4 4 4 4 4 4 4 4 4 4  4 4 4 4 4  4  4 4 4 4 _P> XL684Tt$V0xZ3ZRZZ[[:[N[h3[v[["TEO\?\8Lr]^.3x3^^_ _2|WvWX$P4?XJXX?XfXz3X Y@Yv? $?\V3V?J?3S.S<3SP~~S3ST0^r BVUV"33?}xQ:QZQh3QvQQQ}}3~~.~T~n~op=hr<==rP=r=rsvs==ot=oh?ho?4 4  44MM<3MV?Md?|T?M|hM|MM3N"NBN\Np?3N~N?NNOOO2?OL?|OO|OP P&?3P4PB}?}>?P}d?P5vvvvvvwwZwnw?Gwwxf?xxyy?yHybyyyIy?y?J0J>yzRJzxzz{E?{"K{H?{n{{?|:?L?3v\??3EE?uuE`E?3?3uv v*In?I|?3Du??tt?u?3BBCC(Cr3CCCCDD>?u*u>uX?t?t\@D?t|?3t3t?@A&B? ?????t???4 4 4 4 4 4 4 4 4 4 4 4 44 4  4 4 44 4 4  4 44 4 4 4 4 4 4 44 4 4 4 4  4 44 4 4 4 4 4  4 4  4 44 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4 4 4  4 4 4  4 4 4 4  4 4 4 4 4 44 4 4 4 4 4 4 4 4 4  4 4 4 4 4 44 4  4 4 4 4  4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 44 44 4 4 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 44 4 4  4 4 4 4 4 4II 4 4 4 4 4 4 4 4 4   4 4 4 4 4 4 4  4  4 4 4 4 4 4 4 4 4 44 4 4 4 4 4 4 4 4 4 4 4 4 4I4 44 I4I4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 44 4 4  4 4 4 4 4 4 4 4 44  4 4 44 4 4  4  4 4 4 44 4 4  4 4  4  4 4 4  4 4 4 4 4 4 4 4 4 4  4 4  4 4 4 4 4 4 4  4 4 4 4 4 4 4 4 44 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4 44 4 4 4 4 4 4 4  4 4 4 4 4 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4 4 44 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 44 4 44 4 44 4 44 4 4 4 4 4 4 4 4  4 4  4 4 44 4 % %R%l%%%%&H&&'&' ''(r(())X))**>##*X*r*,$$$ 44 4 4 40b4 4 4 4 4  4  4 4 4 4 4 4 4 4 0b4 4 4 4  4 4 4 4 4 4 4  4  4 4 4  _$38X(`tD|"8^~0Z3ZRZZ[[:[N[h3[v[[2XEO\?\,Rr3T0]^33^^_ _2tWvWX$P4?XJXX?XfXz3X0YPYv? ?TV3V?J?3S.S<3SP3QZS3ST03 3:N~rUV"33?Q:QZQh3QvQQQQ3(RD~T<P=h:==N==t==r=3?3?4 4  44MM<3MV?Md??MMMM3N"NBN\Np?3N~N?NNOOO2?OL?OODOP P&?3P4PB^?x?P?P530Pj3?G:T?338?^r3EI?MV?J0J>"J<bpE?K?.f??L?3??3EE?N~E`E?3?3In?I|?3Dj????3BBCC(Cr3CCCCDD>?$>??Z@D?t?33?@A&B? ????????4 4 4 4 4 4 4 4 4 4 4 4 44 4  4 4 44 4 4  4 44 4 4 4 4 4 4 44 4 4 4  4 44 4 4 4 4 4 4 4  4 44 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4 4 4 4 4 4 4 4 4 4  4 4 4 4 44 4 4 4 4 4 4 4 4  4 4 4 4 4 44 4  4 4 4 4  4 4 4 4 4 4 4 4 4 4 4 4  4 4 4 44 44 4 4 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4 4 4 4 4 4 4 4 4 40b4 4 4 4 I 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4  4  4 4 4 4 4 4 4 4 44 4 4 4 4 4 4 4 4 4 4 4 4 4I4 44 I4I 4 40b40b40b 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 44 4 4  4 4 4 4 4 4 4 4 44  4 4 44 4 4  4  4 4 4 44 4 4  4 4  4 4 4 4  44 4 4 4  4 4 4 4  4 4  4 4 4 4 4 4 4  4 4 4 4 4 4 4 4 44 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4 44 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4 4 44 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 44 4 44 4 44 4 44 4 4 4 4 4 4 4 4  4 4  4 4 44 409 <DLT\j~֒֬X~،F 44 4 4 4 4  4 4   4   4 4 4 4 4 4 4 4 44 4 4 4 4  4 4 4 4 4  4  4 44 4 4 J͂_͢Ͷ|vFТxNҒPӸRԖ ,Llռ0BVʠZ3ZRZZ[[:[N[h3[v[[JX~EˤO\?\ 0P^r̞]^363^^_ _2`WvɰWX$P4?XJXX?XfXz3XY Yv? ?V3V?J?3S.S<3SPlƒƦS3SƺT0VvǖǪ.`UV"3Ș3?Q:QZQh3QvQQQ(`3ŤŸ~T((b=h====J>===|?|?4 4  44MM<3MV?Md?î?MMMM3N"NBN\Np?3N~N?NNOOO2?OL?OOXOP P&?3P4PBx?Ę?Pľ?P5 0\| ?G8X?(6hv?hI?<?J0J>PJ 6\E?|K¢?V?Ô?L?3??3EE?6E`E?3?3JjIn?I|?3D??6P?v?3BBCC(Cr3CCCCDD>???@D??33?@A&B? ?????v???4 4 4 4 4 4 4 4 4 4 4 4 44 4  4 4 44 4 4  4 44 4 4 4 4 4 4 44 4 4 4 4  4 44 4 4 4 4 4  4 4  4 44 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4 4 4  4 4 4  4 4 4 4  4 4 4 4 4 44 4 4 4 4 4 4 4 4 4  4 4 4 4 4 44 4  4 4 4 4  4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 44 44 4 4 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4 4 4 4 4 4 4 4 4 44 4 4 4 444 4  4 4444 44 4 I 4 4 4 4 4 4 4 4 4   4 4 4 4 4 4 4  4  4 4 4 4 4 4 4 4 4 44 4 4 4 4 4 4 4 4 4 4 4 4 4I4 44 I4I 4 444 4 44 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 44 4 4  4 4 4 4 4 4 4 4 44  4 4 44 4 4  4  4 4 4 44 4 4  4 4  4  4 4 4  4 4 4 4 4 4 4 4 4 4  4 4  4 4 4 4 4 4 4  4 4 4 4 4 4 4 4 44 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4 44 4 4 4 4 4 4 4  4 4 4 4 4 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4 4 44 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 44 4 44 4 44 4 44 4 4 4 4 4 4 4 4  4 4  4 4 44 4$$   @Zbj~*P^~"0>dl*8@Tbp~2 :HPX&FTn|߄ߒ߬ߴvٮڔڜ"NhDDL ބތޔޜ޼&N\p܄ܒܦܴ 8@T\jxT݆ݔhݜݪݲ&4Nb|F `*8۔ۢ۶۾@`hp~ۆ08                          `09 DLT\j~֒֬،FHWjWxWWWWWWW3LW3LWX(XTXnXvXPXWvٮڔڜ3L3T3b3pV333Lv3L333L33L333343L44"3L83L3`                     $ 0b   0b0b    0b    0b0b0b0b0b 0b0b0b  0b 0b 0b0b 0b0b 0b 0b 0b0b  0b  0b 0b :l&jP *V  V v        "-.3Nt3>^ft6DR`t33D,X`hv3 3333 BJ^<b3p33x33,:BJX zxvJdr0b0b0b*f33 (63>FTz$3,4<J^r*$Jd6DXl6(Nt 0P^3t3|33f3^ 3330D3LZn3v3,@Thv 4HP3^3l,    0b0b0b0b0b0b 0b   0b 0b 0b   0b0b0b0b0b0b           0b0b0b0b0b0b0b  0b0b   0b 0b   0b 0b0b0b  0b0b  0b   0b 0b0b0b0b 0b0b0b 0b 0b 0b 0b 0b 0b0b 0b0b  0b0b  0b 0b 0b 0b 0b0b0b 0b 0b0b0b 0b0b 0b  0b0b  0b0b0b 0b0b  0b 0b 0b  0b0b0b 0b 0b  0b0b 0b 0b 0b 0b0b 0b  0b  0b0b  0b0b 0b0b0b0b0b 0b  0b0b0b  0b     0b  0b  0b             0b0b0b 0b0b   0b      0b 0b   0b  0b        0b0b  0b  0b  0b      0b0b  0b  0b  0b 0b 0b 0b0b 0b 0b0b0b 0b 0b  0b 0b 0b0b 0b 0b 0b 0b0b0b0b0b0b0b 0b 0b   0b0b 0b 0b  0b0b0b0b0b  0b       0b   0b  0b #AZaz 3 3.3BPj3<J^~@9B 0b !NnDWWWX`WWW3LW3LWX(XTXnXvXhv3 .3W zxv:3LV333L 3L333L33L333343L44"3L3L3    0b 0b 0b0b0b0b0b   0b0b0b0b0b0b0b0b0b       0b       0b 0b 0b 0b0b0b0b0b  0b 0b  0b0b 0b0b0b0b0b0b0b 0b0b 0b0b 0b  0b 0b  0b0b 0b 0b 0b 0b0b     0b 0b 0b 0b 0b   0b0b 0b 0b0b 0b 0b 0b 0b 0b 0b0b0b 0b 0b 0b 0b 0b0b 0b0b0b0b 0b 0b 0b 0b 0b0b0b0b 0b 0b 0b 0b 0b0b0b0b0b 0b 0b 0b0b0b0b 0b0b 0b0b0b0b0b 0b 0b 0b 0b0b0b0b0b 0b 0b 0b 0b 0b0b0b 0b 0b 0b  0b0b0b0b0b0b0b0b0b0b0b 0b0b0b0b0b 0b0b0b 0b0b0b$$ $$$$,$$ $,,$$ 44 44 4 4 4 4   4   44 4444 4 4 4444  4 4   4 4 4 4 4  4   4  44 44 'V'_'''(()))*X*+"+,,`,--b-..d..//>/^/~//0 0$B$V$Z3ZRZZ[$[:[N[h3[v[[$%%V%d%E%O%\?\&&&<&\&j&~&&&]^&3'B3^^_ _2##`Wv#WX$P4?XJXX?XfXz3#X#Y$ Yv? "?"<V3V<?~Ғ?3S.S<3SP l  S3S T0 !!V!v!!!!"."`UV"3"3?lQ:QZQh3QvQQQ`3ŸR  (=h0==D==j==h= ? ?4444MM<3MV?Md?H?M\MvMM3N"NBN\Np?3N~N?NNOOO2?OL?OOOP P&?3P4PB?2?PX?P5j~Nb?GZ??<VvI??J0J>FJlE?K<?b?.?L?3P??3EE?E`E?3?3N?\?3Dx??? ?3BBCC(Cr3CCCCDD>?2L??P@D?p?33?@A&B? ????????4 44444 4 4  4 44  4 444444444444444 4 4444 44 4 4 4 44 4 44 44 4 4 4444 4 4 4 4  4 4 4   44 4 4 4 44 4 44 4 4 4 4 4 4 444 44 444  4 4 4  4 4 4 44 44 4 4  444 4 4 4 4 4 4 44  4 4 4 4 4   444   4 4 4 4 444 4 4 444  4 4 4 4 44 4 44 444 44 4 4 44 4 4 44 4 4 4 4 44 4 4 4 4  4 4 4 4 444 4 48 4 4 4 4 4 44 44    4    44  4 4 4 4  4  4 4  4 44 4 4  4 444444 44 4 4 44 4 4 4484 44848 4444 4 44 444 4 44 4  4 4 4 4 4 4 444 44444444 4  44 4 4 4 4 4 44444 444 4    4  444 4444 4 44  4 44   4 4 44    44    4 4 4 4     4 4  4 4 4 4 4 4 4 44 44 4 4 4 4444 4 4 4 4 4 4 444444 4 4  444 4 4 4 4 4 4 444 4 4 4 44 4 4 4 4 4 4 4444 4 4 4 44 4 4 4 4 4 4 4 4 4 4 444 44 4  4 4 4 4 4444 4 4 4 4 4 4 4 4 4 4444 4 4 4 4  4 4  44 4 4 4 4 44 4 4 4 4 44444 4444444 444444 4 44 4444 4 44 4444$  $$$$$$$#AZaz O~3OO 3O3BPOOPP(3PHPPPPPQRS(SH9STZT 5 0 5  0  5  0  55555 0555  5 5 55 55 5 5 55  5  5 5 JNJJJJJK2K|KKL6LnLLM.MTMMMN(NTNNNNNOOOBOVOv/H:HBHh.3NtH3HHHHHII,~I@INIbIvI~IIIII`J3J@3C|CCD,X`hv3CCD C`Ct333AAAA3ABB6BJBvBBBBBC C&<b3CF3@3A A&3AFATBAzA 7B8@v9D9R99:$:h77,8.8.7:555*f3@@"@03@D(63>FTz@X@$3,4@@^@+<0>=>(><h>P>^>l>>>>??8?L?f?z???@ ^3<" 3;;;;33;;<3;|;;;03v3,@T;8;L;Z;h ::::3:3:,:8555555 5   5  5 5 555555 5555555 55  5 5  5 555  55 55 55 55 555 5 5  5 5 5 55 55  55 5 5 5 5 555 5 555 55 5  55  555 55  5 5  5 555 5 5  55 5 5 5 55 5  5  55  55 55555 5  555  55 5 5 555 55 5 5  5  5 5 505  00  500 0 5  5  55  5  5 5 5 5 555 55 5 5 5  5 5 55 5  5 5 5 555555 5 5 55  5 5 5D050005  55 5 5  5 5(''..09::AZ__az00 0 E"38.11 313B12262\2v322333(3N4ENF596dF<7  5 0 !GlGCGDWWWXH&WWW3LW3LWX(XTXnXvXhv3CCD GLCt3W 7B8@v9D9R99:$Fz3LGV333LG>3L333L33L333343L44"3L83L3  5 5  5D050005    55     5 5555555 5  5 5 5 5 5555 5  5 5  55 5555 555 55 55 5  5 5 55 5 5 5 55 5 5 5 5 5  55 5 55 5 5 5 5 5 555 5 5 5 5 55 5555 5 5 5 5 5 555 5 5 5 5 5 5555 5 5 5555 5 5 55555 5 5 5 5 5555 5 5 5 5 555 5 5 5  5555 5555 555  5 5555 555 555 0 00  0  00000 000  0 0 00 00 0 0 00  0  0 0 abbb*bJbpbbc6cnccddNdddeeNeeeff*f8f@fNfVf|ff-` ``4.3Nt`N3`\`|``````aaa$a,a@aTataR`a3a3___D,X`hv3___ _r_333]^^&3^4B^l^^^^^^__$_><b3_^3]|3]3]:B]] TUvVVW$W8WWTTOOT000*f3\\\3 (63>FTz]]8$3,4]L]Z^]n*Y\YpY~YYYYYZdZ4ZBZZZZZ[[Z[*[8[L[T[b[[[[\\(\B\V\\\^3YN 3XX33YY Y43XXvXX3v3,@ThXXX XX2HXF3XT3Xb,X 000000 0  0 0 0 0000000000000 00 0 0 0 000  00 00 0000 000 0 0 0 0 0 00 00  00 0 0 0 0 000 0 000 00 0  00  000 00  0 0 0 000 0 0 00 0 0 0 00 0  0  00 00 00000 0  000  00 0 0 000 00 0 0 0  0 0 00 00  0 00  0  0 0 0 0 000 000 0 0  0 0 00 0 0 0 0000000 0 0 00 0 00000 0 0  0 0000000000  0 0 0 0 00000  0 0  00 0000000 00 00 0  0 0 00 0 0 0 00 0 0 0 0 0  00 0 00 0 0 0 0 0 000 0 0 0 0 00 0000 0 0 0 0 0000 0 0 0 0 00000 0 0 0000 00 00000 0 0 0 00000 0 0 0 0 000 0 0 0  00000000000 00000 000 00009 $8Rrf$2 5 55 f  5  f 55555 5f55  5 f 5 55 55 5 5 55  5  5 f 5 ~~~&R\HTBP^lz/|||.3Nt}3}}4}T}\}p}}~}}}}}~~*~P~^I`~l3~3x~xxD,X`hv3xxy xbxv333Avvv3wBw8wLwxwwwwwxx(<b3xH3u3vv63v\vjBvv lmvnno,o@oollgXgXl555*f3u8u@uN3@D(63>FTzubu$3,4uu^u+qvqqqqqr r rLdrfrzrrrss,sFsZssnshss>lsstt*tPtjt~?ftttuu*^3qh 3;qq;33q q:qN3pppp3v3,@T;8ppp p p@:pT3ph3pv,p8555555 5  5  5 5 5555555555555 55 5 5 5 555  55 55 55 55 555 5 5  5 5 5 55 55  55 5 5 5 5 555 5 555 55 5  55  555 55  5 5  5 555 5 5  55 5 5 5 55 5  5  55 55 55555 5  555  55 5 5 555 55 5 5  5  5 5 5ff5  fff  5fff f 5  5 55  5  5 5 5 5 555 55 5 5 5  5 5 55 5  5 5 5 555555 5 5 55  5 55y25ff5  f 55 5 5  5 5)'',,..09;;AZ__azfff f z*3gXg` 3g3BPgggg3h,hphhhhizP{kJ9k{>l 5 !|N|xGDWWWXH&WWW3LW3LWX(XTXnXvXhv3xxy |.xv3W lmvnno,o@o{|3LGV333L| 3L333L33L333343L44"3L83L35 5  5y25ff5   f 55 5 5555555 5  5 5 5 5 5555 5  5 5  55 5555 555 55 55 5  5 5 55 5 5 5 55 5 5 5 5 5  55 5 55 5 5 5 5 5 555 5 5 5 5 55 5555 5 5 5 5 5 555 5 5 5 5 5 5555 5 5 5555 5 5 55555 5 5 5 5 5555 5 5 5 5 555 5 5 5  5555 5555 555  5 5555 555 555fffffffffffff fffff fffff ff0JRZn@Nn||v .T\v"0>FZhv~"v~*2Lfz`hv~T:Bnvff  :BPX&$2@Zhvh(0DXl*8L۾Tbph8ffffffff ff ffffffffffffffffffffffffff fffff ff fffffff fff ffffffffffff fff fffff ff ffffff fffffffffffffffffff fff fffffffffff fffffffffffffffffff ff fffff f fff ffffffffff ffffffffff fffffff fffffffff f ffffffff fffff fff fffffffffffffffffffff ffffffff fffff ffffff ffffff ffffff fffffff fffff fffff ffff fff f ffffffffff444444444444444444444444 (0DXlXXX .X.tNnv$,:HP^ftHbpxD8@\Ljr*.6DLTzRl??44&# .<J^lt*8L`nv&FThp~&.6>44444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444?&J4pWjWxWWWWWWW3LW3LWX(XTXnXvXxD@Wz3L3T3b3pV333L3L333L33L333343L44"3L3L3444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444..............................................................................................................................$$,$$,$$$,$$$$$$$$$'$$$$$$$$$$$$$$$$$$$ $$$$$$$$$$$$$$$$$ $$$$$$$$$$$$$$$$$$$$$$$$$$$,  $$$$$$                         Bb .6DLltrz Fr(6DRZb*2:T,T:TBTJTR"0DLTbpxRSS,Rr  RZbj#dr08FNVdFrz(6J\*2@NP6P>z                                                                                                                                                                     $$ &R*WjWxWWWWWWW3LW3LWX(XTXnXvX2: T,T:WTBTJTR,R`3L3T3b3pV333L3L333L33L333343L44"3L3L3                                                                                                                              $$,$$$,$$$$$$$$$$$$$,$$,$, $$$$$$,$$$$ $$$$$$$,$$$$$$$$$$$,$$$,$$,$$,$$,$ $$$$$$$,$$$$$$$$$$$$$$$,$$$$ $,$$$$$,$$ $$$$$$$$$$$$$$$$$$$$$,$$$$$$$$$ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$,$$$$$$$$$$        $$$$$$$$$$$$,$$  $ $$$$$$ "0Pdrz&4 $$$$$ 0      0      ؘ 8|Bڀڸ.x۞:rܞ(6DR`݌ݠ/֦֞.3Nt3:BVjא~פײ6DI`R3؊3$\ӂD,X`hv3ӊӤӸ 333Ajфј3ѦB8L`ҀҎҴ<b333A&3.ATB<J v*>ʲǠǺnn  *f36>L3@D(63>FTz`И$3,4Ь^+ẗ̶̜ JddxͶ*DXl΀hΔ΢>lΰ(Nh|?fϖϪ(^3f 3;;338L3ˎ˜˰3v3,@T;8˸ >:R3f3t,8                                                           0                     000      $$$$  0 !4rӂGDWWWXH&WWW3LW3LWX(XTXnXvXhv3ӊӤӸ 3W v*>ʲb3LGV333L3L333L33L333343L44"3L83L3     000                                                                  RftB(f.Z.Nt-V^~.3Nt3 0DRfnvR`33D,X`hv3"6 333B\p3~B,@Nn<b3333:B" v$n:*f3 (63 (63>FTzJ$3,4^**>dd~*8L`*tD^r^3 34B33Pj~33v3,@Th \|H33,T                                                                                                                                                                                       @@           F  f          $ 2 : H P p x  v ~           h , : H V ^ f  B P ^ f n 2 : V d l           $$J&FNt   t|# &RZhpxh "0JXl~LTbp>*@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@Pjrz"BPjxx*   , 4 N2:Znv"B "* (HVjrz 0b6>^f"44<@@#"*>jr$,:HbpdlzV &4B@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@$$$$$$$$,$$$$$$$$$$$$ ,$$$,$$$$$$,,$$$,$$ $$$$$$$$$,$ $    $$$$     $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $$$$$$$$$$$$:`d   > L T b j   *2FTbpxDdxWjWxWWWWWWW3LW3LWX(XTXnXvX4(<WL<Zb&4<D*Jh#,:N\dx(<P^ft6DX`n|&.3L3T3b3pV333L3L333L33L333343L44"3L3L3,$ $$$$$ $$$$$$ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$,$$$$,,$,,$$,$$,$$,$,$$$$ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$,$$$$$$$$$ $$$,$$$$$$$$$ $$,$$$$$$$$ $$,$ $$,$$$$$$$$$$$$, $$$$$$$,$$$$$$$ $$,$ $$$$ $$$,$ $$$$$,$,$$$ppppppppppppppppp pppppppppppppp @AA0A>A^AABBVBBC CHCCCDDNDDDE.ENEzEEEEEEF/>>?@`z?>?L?l??????@@@,@4@H@b@@Rl@@;;B;R;<<(^ ;x;lt|99:: :N:b::::::;$;>8^;^9Rt9999 0412262D223"3f00**0,*&4<8L8h8 .6DR899909DV4+445 5&5:5T5z55556&6^6l666666677:B7 7X7r7777J888F8r84 4r48R4444<D4X34 4 >Lr4(464D 33333@V^r3ppppppppppppppppppppppppppppppp ppppppp pppppppppppppp ppppppppppppppppppppppppppppppppp pppppppppppppppppTp8pppppppppp ppppppppppppppppppppp8p88TTTTppppppppppp$ppp!>>B;4WWWWWW3LW3LWX(XTXnXvX;<<(^ >r;W 0412262D223"=3LZnV333L>d3L333L33L333343L44"3L3L3p8pTTTTpppppppppppppppppppppppp pppppppppppppppp pppppppp ppppp ppppppp ppp ppppppp ppp ppppppp ppp pppppppppppppppppp ppppppp ppppppp ppppppppppppp $ 0 $  $  $$$$$ 0$$$  $ $ $$ $$ $ $ $$  $  $ $ \J\|\\\\].]x]]^2^j^^_*_P_|__`$`P`````aaa>aRar/ZPZXZ~.3NtZ3ZZZZ[[[B~[V[d[x[[[[[[I`\3\<3VW WFD,X`hv3WNWhW| VV333AU.UHU\3UjBUUUUVV$VDVRVxV<b3V3T3TA&3TATBUU KLvMMMNNvNKdK~FTzT$T\$3,4TpT^T+P8PLP`PzPPPPQdQ(Q<QzQQQQRRQR0RDhRXRf>lRtRRRSS,S@?fSZSnSSS^3P* 3;OO;33OOP3OORO`Ot3v3,@T;8O|OO NO:O3O*3O8,N8$$$$$$ $  $  $ $ $$$$$$$$$$$$$ $$ $ $ $ $$$  $$ $$ $$ $$ $$$ $ $  $ $ $ $$ $$  $$ $ $ $ $ $$$ $ $$$ $$ $  $$  $$$ $$  $ $  $ $$$ $ $  $$ $ $ $ $$ $  $  $$ $$ $$$$$ $  $$$  $$ $ $ $$$ $$ $ $  $  $ $ $0$  $$  $ $$  $  $ $ $ $ $$$ $$ $ $ $  $ $ $$ $  $ $ $ $$$$$$ $ $ $$  $ $ $$000$  $$ $ $  $ $$$$ $ 0 !YZ$WFGDWWWXH&WWW3LW3LWX(XTXnXvXhv3WNWhW| YV3W KLvMMMNNvY3LGV333LY3L333L33L333343L44"3L83L3$ $  $$000$    $$ $ $$$$$$$ $  $ $ $ $ $$$$ $  $ $  $$ $$$$ $$$ $$ $$ $  $ $ $$ $ $ $ $$ $ $ $ $ $  $$ $ $$ $ $ $ $ $ $$$ $ $ $ $ $$ $$$$ $ $ $ $ $ $$$ $ $ $ $ $ $$$$ $ $ $$$$ $ $ $$$$$ $ $ $ $ $$$$ $ $ $ $ $$$ $ $ $  $$$$ $$$$ $$$  $ $$$$ $$$ $$$ , ,, f  ,  f ,,,,, ,f,,  , f , ,, ,, , , ,,  ,  , f , wxx"x0xPx|xy yHyyyz:zrzz{{@{~{{| |@|l|z|||||}/uuv.3Ntv03v>v^v~vvvv~vvw ww&w:wTwzwI`w3w3rhrrD,X`hv3rrr rLr`333Appp3pBq"q6qbq|qqqqqr<b3r23p 3pRv63prvjBpp gh viiihi|ij4ffaag,,,*f3oto|o3@D(63>FTzoo$3,4oo^p+kkkkll"lHl\ldlllm,m:mZmhmmmZmmhmm>lmn&n@nfnnn?fnnoo@of^3k 3;k@kN;33k\kvk3k&jjj3v3,@T;8jkk j\j|:j3j3j,jT8,,,,,, ,  ,  , , ,,,,,,,,,,,,, ,, , , , ,,,  ,, ,, ,, ,, ,,, , ,  , , , ,, ,,  ,, , , , , ,,, , ,,, ,, ,  ,,  ,,, ,,  , ,  , ,,, , ,  ,, , , , ,, ,  ,  ,, ,, ,,,,, ,  ,,,  ,, , , ,,, ,, , ,  ,  , , ,ff,  ,,  , ,,  ,  , , , , ,,, ,, , , ,  , , ,, ,  , , , ,,,,,, , , ,,  , ,,,ff,  f ,, , ,  , , $$$ , !uxurGDWWWXH&WWW3LW3LWX(XTXnXvXhv3rrr uXr`3W gh viiihi|it3LGV333LuJ3L333L33L333343L44"3L83L3, ,  ,,ff,   f ,, , ,,,,,,, ,  , , , , ,,,, ,  , ,  ,, ,,,, ,,, ,, ,, ,  , , ,, , , , ,, , , , , ,  ,, , ,, , , , , , ,,, , , , , ,, ,,,, , , , , , ,,, , , , , , ,,,, , , ,,,, , , ,,,,, , , , , ,,,, , , , , ,,, , , ,  ,,,, ,,,, ,,,  , ,,,, ,,, ,,,2  !/09:@AZ[^__``az{,$$sW?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~ :~hpxL  :~hpxB~~~L)'',,..09;;AZ__azfff f za3aa 3a3BPaab6b3hbbbbbccdfee90fff(''..09::AZ__az00 0 F3H<:F1 3ZF3B1nFFFF3F8GXGfGzGGbH~II J9JK\K%09AZ__azpTp*Υ֥** ,Υ*ΥN\*+0+J+Υv+++++",,.n..֪ުV///)'',,..09;;AZ__azfff f Ts3aa 3a3BPaab6b3hbbbbbcczsBte90fhtf(''..09::AZ__az00 0 W3H<:F1 3ZF3B1nFFFF3F8GXGfGzGGbHWX J9JX\K%09AZ__azpTpP<Υ֥** ,Υ*ΥN\*+0+J+Υv+++++",,v<>=.֪ުV/=/ThpxJLl"",4<DLZnDR)""''..09::AZ__az0 0 3nv1 33B130t6H9$#AZaz 3 3 3BP8Xr32r9<)""''..09::AZ__az0 0 N3nv1 33B130t(H9Z__VƧΧ֧ 4HPXf"jV.__ƧΧ֧ 4HPXf"ΩV4 &''(-..//09::;@AZ[^__``az{44b04b04 b04 4 4 4Xr3333343Ώ3L4`4X3ZQ< =99=*?^F?/ /09:@AZ[^__``az{44 4 4 4 4hr333hhh43kl"mm=m99=n o*?`oF?7 &''(+,,--..//09::;;<@AZ[^__``az{44444 44 4 4 4>r333|h433L4Viа@x3 2F`f=X99=p*?ƶF?/ /09:@AZ[^__``az{4484848484lʲҲʲPʲdʲx(Tʲtδܴĵf=`h=|*?@F?/ /09:@AZ[^__``az{444p4T4p4Υ0nΥΥLxΥ 2F`Z|=L֪= x*?F?4 &''(-..//09::;@AZ[^__``az{44b04b04 b04 4 4 4(3r333Υ3343243L4`4445.53f55556N67288=999=96:*?:F?7 &''(+,,--..//09::;;<@AZ[^__``az{44444 44 4 4 4 r333  h43> 3L4ViX    3>     2  =99=*?TF?/ /09:@AZ[^__``az{445454454[8.868JT[|88.8&\.888@\l\\\.8]p]]]]]^_``=`J>R>=Zaa*?bF?/ /09:@AZ[^__``az{445454645478.868L3J8|88.88.888(9T999.89X:~::::;<H===J>R>=f>>*?2?F?1x  1h2|2D!!&!.!6!D!X!l!z!!!!!!"#.#<####px  bD2\H<м޼  H<FfH<ʽ(6VpʾؾH<(6D^x 5ȷܷNb hҶ¹vҶ"<TJ40Xrֺ<Vp л4HH<H<vH<H<vH<hH<Ҷ H<H<4HLZH<nH<`H<̦ ,xH<ƣH<ԣhT 8FT$$$$$.,.........................rġءġġ$ġDdrġ.ڢ&.NVpT\|ʠҠ/0/ &4<Dd1111~01r1z1000000000 11"161P1d1h0p0~000tz Lfxx..00 0(0H0P0X0`0#".0.>.R.Z.h.v......../ //"//0/8/@/H/P/d/r/////////0.-`--- .-------@-`-h-p-x- !.:+T+\+d+x++++++++ ,,,:,T,b,+b,',,,,,,,,--8-*$*D*X*`*n*|*******j''****+ +,+))* *(*))(( ))2)@)T)\)d)r))))))((((($L$ %(%H%P%v%%# $!!&!v(~(((((((#&&&&&&&&& ''('T'\'j'r'z''j'''''''''((($(2(L(Z(n(&N&%V&d&r&@&%%&&&,&%%%%%%..................................................................................................................................................................... @1jWxWWWWWWWWL3WL3W(XTXnXvXX1~01 z1Wtz 2L3T3b3p3333L33L333L33L333334L34"4L3L33.,^r |~?̑ڑ&hJ)'',,..09;;AZ__azfff f >g3Xg`g 3g3BPgggg3,hphhhhhij*kJk9k\ll(''..09::AZ__az00 0 13.811 313B1262\2v232233(3N34,5559d66 7%09AZ__azpTpΥ֥ ,Υ:ΥN\vʦΥ:Zh|d,֪ުZ 4454454545454555455454545 4545454545545545545545454554544 444454544445 55454555454545554545545554 5554544 4ւTh.(Zȟ0Jp Nģ$tƤ0F}.8P}}}}x8~L~f~.8t~~~ę 0hVXs?$Pv.8.8"tzVz"{s?H{V{?d{x{.8j{{t|? J?y.8y?ėJؗ??.8v v.84v,v.8v@wܕ0h|xy.8.8.8.8Ĕؔ?pttt.8tttuƓ.8&Lf?@BPBdB=~B=B=CCD===N?==t?L3?L3??44444^pp.8p?p?L?p`qzBqVq.8|qqqq?.8qq?rDr^rlrr?r?s*sXsfss?.8ss?6?s\>D$t5nȌRf? k֍^?ƎԎ?@ZzHm?ڏ?mmJ4npԐh?o@?f?2?Jp?.8D?.8R.8`?.8*??.8??.8hh?vhi?.8?.8ʋnI?|I?.8II??G G?FG?.8ZGnGGG(H.8THhHHHHH?I(IBI?D?DD?E?.8.E.8HE?nEEF? ?>DLDlD?zD?D?4554545544454 444445454545455454455454545454545454545454'454545454545454545454545454545454545454 54545454544444444545545454545454545545 54545454545454545455455454545545455454554545545454444444I44I45454545444__JJJJJJK&K4KNKbKjKrKKPFPOOOPP"POOOOOOIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIITIIIIIII__8UJJJJJK&K4KNKbKjKrKKtttt.8tttuu@u.8fuzuuuubcBddB=e=pe=e4ff==2b=XbJT?JTrb?4444^pp.8p?p?p?ppq"qBqVq.8|qqqq?.8qq?rDr^rlrr?r?rs*s8sXsfss?.8sss?s?st>D$t5iiij>j^jjjj? kdkkk?Tlblll?lllm.mHmVm?vm?mmmn4nNnnnnh?no>o?dooo?0p?Jp?.8i??.8hh?hhhi?.8?.8,iLiliBh?Ph?.8Idh??gg?g?.8ZGnGGG(H.8THhHHHHH?ggh?f?gD?:g?.8Tg.8ng?nEEF? ?>DLDlD?f?D?4 44444545455454455454444444444444444445454445454544544545454545545454544545445445454544445454545 455454545 5445545454544 5454545454554544545454545454544454454445545454554545454454454545545444545445554554545454544554544545 5454545554445554545454545455444 54545444 545454545 5445454454454 5454545455454545444545544545545454554545454545545454554545454 5454545454554545445454545454554454545444545454545454545454545454544545454554545454545545454545454554455445545545444444545544545445454 45454545445445454555454455554454545 555544545 45454545554554545 455445454 5545444444 445454 45454545454554554454 454445444445455454444444 444454545444454545454545454545454545454545545455455455454545455454 5454545454 5454545445444 544444444545 545454545 54545454545 545454545454545455454554454554544554545455454545454 545454444454445455554554444544545444454545545445555545454555454545 445555445454554545454545454545454454545454545454 55554545445455455 55454554545454545454544544545455454444545454545454544444454545544454545454545454445454545445454545454545444454545454454545454545454545454 44454454 54545454544445454545454545454545444454545454 54545544545454545 445454545454444454444444544444454 544544445454454444454454545454454544545445454545454544445454545 455454545 544554545454454544545454545454544454454445545454554545454454454545544454545454545454455454 54545455544455545454545444 54545444 545454545 544545445444544545454454545445454545454454545454554545454446446454554454546454II54 4545454544545545454455554 5555445 45454545554554545 455445454 5545444444 44545454454 4544I4544I4II4I4I4I4I444646 64644454544545545454 5454545444 544444444545544545454 54545444445444545555455444544445454455454455555454 44555544 5555454545455 55454554545454545454544544545455454444545454545454544444454545544454545454545454445454545445454545454545444454545454454545454545454545454 44454454 54545454544445454545454545454545444454545454 54545544545454545 445454545454444454444444544444454 544544445454454444H<  H<H<  H<  Ҷ$H<2L~  FTzH<H<p6pppppppp66pppppppppppp6ppppppppp66666pp666pp6p6p pp66p66ppp6p6p66pp6pp6pp6 Fx8<t4Z.ZH\|/Υ@`zΥ *2FZ6DRlΥΥBJRΥ >^ ΥʲΥltΥ|Υ (<h8^ΥlΥΥΥP^BtΥΥ0 Ыάҭ^~6ȫ666*&Υ4<DLZhvΥΥ .6DR`Υ&V4+&@fz8p~,:BV0J^rΥΥΥΥΥ Υ*8RΥΥZt4<ΥDR԰Υ >LrΥȱܱ ̯ΥΥ@V^rzppppp666666p6ppp6p6p6p ppppppppppppppppppppppppppppppppp666666ppppppppppppppppppppppppppppppppppppppp666666688pppp66 44848484848484888488484848 484848484884884884884848488484848 48484848484848484884848 8484848484884884848484 <Jd~ʲ|L`H pB<\0&pʲP|ʲ :`&?:N\Nʲʲ<ʲ0ʲDd 2&??ʲ"<h? ?"<Pʲj<?~ʲ?ʲʲʲδʲ"<ʲJʲ:fʲʲʲʲp?δ4ʲBP^rʲ,ܺNb=|=޼===T=zʲ?ʲ?4848844*bʲ?|??ʲ4Tn?ʲ?6?P?d?ʲ&4B?\?<5ʲ2^rʲ?:T?ʲʲ8?^rʲj??Rl?.?T???ʲ?ʲʲ?ʲ??ʲ??ʲ?>?ʲ?ʲL`zN?\?ʲp???&?ʲ:Nʲ4Ht?"??ؾ? ?ʲʲ4?N? ?<Jj?x??48848488448484 8484848484848484848484848484848484848484848484'484848484848484848484848484848484848484 84848484844848848484484848848484848484848848 848484848484848484884884848488484884848848488484884884844848484844848484848484848484844848484848484884848848448484848484848484848484884844848484848484848484848484848484 84848484848848484848484848484884848484848484848484848484848488484848484484848448848 84848484848488484484884848484484884848484884848848484 84848484848 84848484884848484484484 84848484884848448484884848488484848848484848488484848848484 84848484848848484484848484848 8484848484848484848484848484848484848484848484848848484 8484848484848484848844884884848484844848488484848484848 48484848484848848484484848484848484848848484 848484848848848484848484848484484848484848484 484848484848848848484 4848484844848848484884848484484848 84848484848484484484848484848484848484848484848848488488484848484884848 8484848484 848484848484484 848484848484484848 848484848 84848484848 8484848484848484884848848484884844884848488484848484848484844884848448484884884848448448484484848848488484848484848484848484848848488484848484848484848448484848484848484848448488488484884848484848484884848484848484848448484848484848484848848484848484844848484848484848 8484848484848484848484848484884848484848484844848484848484848484848 848484848484 84848484884848484848484 8484848484848848484848484848484848 8484848484848 484848484848484848484848448484484844848448484 848484848484884848848484484ppp6p6ppppp6p666ppp66pp6ppp6p6666p666p6p6p6pp6p6p66p66pppp66ppppppp6 6pppppp6p6 666p6p666p66p6pp66ppp666p66pp6p6p6ppp ppppp666p6p6ppp66pp6p6p6p66p6pp6ppp66ppp66ppp66666p6pppp666pp6pppppppppp6ppp6pppp6pppppppppppppppppppppppppppppp p666ppp66pppp6ppppppp6p6ppppp6ppppp6ppppppppppppppp66TTppp6pTp886ppppp6pppppppppp66pp6pp6ppp p6p6p6p66p6p666ppp6p6pp6p6p66p6p6p6p6666666p6p6ppppppppp66p68p68888pppp8886f66TTTT66ppp6 pppppppppppppppppppppp6ppppppp6ppppppppp6p%09AZ__azpTpFΥ֥ ,Υ:ΥN\vʦΥ:Zh|dl4,֪ުxp6pp!BJ4WWWWWWL3WL3W(XTXnXvXXΥ >^ ΥWʲ Ыάҭ^~L3ZnV33L3L333L33L333334L34"4L3L33ppppp6p68p6f66TTTT66pp pppppppppppppp666666666ppppppppppppppppppppppppp6ppppppppppppp6p6p6pp66666pp6p6ppp66p6666666p66p66p6ppp6p6ppppp 66p6p6p6p66ppppppppppppppppppppp6p6p6p6p6pppp66p6p66p6p6p p6pppppp6p6 666p6pp6pp6 6ppppppp66 6666pp6p6 6pppppp6p6 666pp6p6 6pppppp6p6 6666pp6p 6pppppppppp6666p66ppppp66666pp6 p6ppppp6p6 6666pp6p6ppp6p6 666pp6p6p6ppppp66666666666p66666pp666p666 44p4T4p4p4p4p4pp4pTTp4p4p4p 4p4p4p4p4pp4pp4Tpp4pp4p4p4pp4p4T4T 4T4T4T4p4p4T4T4T4p pT44pTpp4pT4TpT4Tpp4p4TpTpTT4ppTp4 pTp4p4T4 j'''''(H((6))***P++D,,,.---0...,/L/l////0N00f!z!!!Υ."r""""#*#D#ΥR#f####$$8$F ^$@$$?$$$%2%@%T%%%%%%&Υ^&Υr&&&'Rfz v?& 4 ?B V Υj    !R!? b?<Υ<?~ʲ?Υ.<ΥPvΥ0>R"6n"Υ6ΥΥ?&|Υ(ΥNb0j= =l=0===?"?4T4TT44F~Υ??? *>Υd?Υ?,FTt?? @Nh?Υv?? 5&F?Ll?<J|?|0>?^?r6nF ?&?L??2?Υv?ΥΥ?Υ\??ΥN??ΥF T ?n    ?Υ?Υ  N?\?Υ  ??  ? ?Υ  V j  Υ   @ `  ?   ?(?f??ΥΥ?h T ? ????4pp4p4pp44T44 T4T4T4T4T4p4Tp44p4pT4p4T4pTp44p4p4p4p4p4p4p4p4p4'4p4p4p4p4p4p4p4p4p4p4p4p4p4p4p4p4p4p4p4 p4p4p4p4p44T4TT4T4T44T4p4pp4p4p4p4p4p4p4pp4p p4p4p4p4p4p4p4p4p4pp4pp4p4p4pp4p4pp4p4pp4p4pp4p4T4TT4T44T4T4T4p4p4p4p4T4T4p4p444p4T4pT4p44p4pp4p4TpTT4Tp44p4pT4T4p4p4T4p4p4p4TpTT44TTpT4T44T4p44TpT4 4pTp4pT4p4TpT Tp4T4Tp4p4p4pT4T4 p4p4p4p4p4ppT44T4p4p4p4p4p4p4TpTT4T4T4p4T4pT4T4T4pTTp44Tp4pTTp4p44pT4TT4pT4T4p44pTp444T44pT44pp4pp4p4p4p4pT4T4TpTpT444p4p pT4Tp4pT4pTpTpT44T4pTppT4p4p4pT4Tp44pp4T4T4 p4p4pT4T4T4T TpT44p4p4p pT4T4Tp444pT44p4 p4p4p4p4pp4p4p4p44T4p4pp4T4p4pp4pT4p4pp4p4p4p44pp4p4p4pp4p4p4p4 p4p4p4p4p4pp4p4p44p4p4p4p4p4ppT4T4Tp4p4p4T4T4p4p4p4pT4p4p4p4p4p4p4pT44p4T4p4p4p4ppT4p4p4p4p4pTp4p4p4p4p4p4pp44pp44pp4pp4p4T4T4T44T4p4pp4T44p4T4p488Tp4 4p4p4p4p4T4p4pp4p4p44p4p4pp4pT4T4ppTppT44p4p4p pTpTpTpT4T4p4TpT T44Tp4p4pTpT4ppT4p4TpT T4p4T44pT4 pTpT44T44T4T4T4T 4T4pT4pT4 4p4p4p4p4p4pp4ppT4T44 4p4T484p44T8484p4pp4p4T4T4T T4T4T4T4p4p4p44T44p4p4p4p4p4p4p4p4p4p4p4p4p4p4pp4p4pp4pp4pTp4p4p4p4pp4p4T p4p4p4p4p4 p4p4p4p4T4p44T4 4T4T4T4T4T44T4TT4p p4p4p4p4p p4p4p4p4p4p p4p4p4p4p4p4p4p4pp4p4pp4T4p4pp4p44pp4p4p4pp4p4p4pT4pT4 pT4p4p4T44TT4T444T44ppTpTp4pp4T4T44444pT44T4T4T44TT4TpTTp44T4TpTpppT4pT44pp4p4p4p 4T4TpTpTpTp4T4Tp4p4pp4p4p4p4p4p4p4p4p4p44p4p4p4p4p4p4p4 pTpTpTpT4pT4p44p4pp4TTpTpT TpTpT44ppT4p4p4p4p4p4p4TpTT4T4Tp4T4p4pT4p4p4T44T4p4p4p4p4p4p4p4T4TT4T4T4T4p4p4TpT44T4p4p4p4p4p4p4TpTT4T4T4p4pT44pT4T4p4p4p4p4p4p4TpTT4T4T4T4p4pT44p44Tp4p4p4p4p4p4p4p4p4p4T T4T4T4p4T44 Tp4p4p4p4pTT4T4T4T4p4pT44pT4p4p4p4p4p4TpTT4T4T4T4p4pT44p4 Tp4p4pTpTT4T4Tp4pT4pT44p 4T4p4p4p4p4p4T4T4T4T44T44T4T44T444T4T44T44 p4T44T4T4T4Tp4p4TT4TpT4T44T4#AZaz 3 3.3BPj3<J^~@P9B D^rH<L`( (<H<Pv,0<  H<!J!!!!!!"H<"2"L"f""" "("##(#B#V#v#####H<H<($H$$$ 44 4p4 4b04 4 4 4   4 p 4 4 4 4 b0 4 4 4  4  4p  4  4 4 b0  4 4p4p 4p4p4p4 4 b04p4p4p4 p 4 4 p  4 p4p p4p 4 4p p pp4  p 4 p 4 4p4 ___``4`l``ZaaYvY? bV?V<V3V<?~ʲ?3.STRTTTTT"U6UnUUU"V36V333SS?P:QZQhQ3vQQQQQQ30RDRRRR:(<h======*>=\>>P?==:=:Υ?Υ:?4p4pp44MJXJJJJ,KXK~KE?KKK? LTLL?L?L?34F?3BF3PF?3F??3 F??3EE?,ELE`EE?3?3EEEN?\?3DD??PBjB?B?3BBC(CrC3CCCCD>D?^DrDD???$@D@?^@?3x@3@?@&AB? ?????????4  4 4  44p4 4 p4p4p4p4p4 4p 4 4 4 p 4 4p4 p 4 4 4 4 4 4 4 4 4 4 4'4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 44p4pp4p4p44p4 4  4 4 4 4 4 4 4  4 4 4 4 4 4 4 4 4 4  4  4 4 4  4 4  4 4  4 4  4 4 p4pp4p44p4p4p4 4 4 4 4p4p4 4 4 44 4p4 p4 4 4 4  4 4p pp4p 44 4 p4p4 4 4p4 4 4 4p pp4 4pp p4p44p4 4 4p p4 4 p 4 p4 4p p p 4p4p 4 4 4 p4p4 4 4 4 4 4  p4 4p4 4 4 4 4 4 4p pp4p4p4 4p4 p4p4p4 pp 4 4p 4 pp 4 4 4 p4pp4 p4p4 4 4 p 4 44p4 4 p44  4  4 4 4 4 p4p4p p p4 44 4 p4p 4 p4 p p p44p4 p  p4 4 4 p4p 4 4  4p4p4 4 4 p4p4p4p p p4 4 4 4 p4p4p 4 44 p44 4 4 4 4 4  4 4 4 44p4 4  4p4 4  4 p4 4  4 4 4 4 4  4 4 4  4 4 4 4 4 4 4 4 4  4 4 44 4 4 4 4 4  p4p4p 4 4 4p4p4 4 4 4 p4 4 4 4 4 4 4 p4 4 4p4 4 4 4  p4 4 4 4 4 p 4 4 4 4 4 4  44  44  4  4 4p4b04p4 4 4 4b04b04 4  4p 4b04b04 b04 4 4 488p 4 4 4 4 4 4p4 4  4 4 44 4 4 4 p4p4  p  p44 4 4 p p p p4p4 4p p p4 4p 4 4 p p4  p4 4p p p4 4p4 4 p4 p p4 4p44p4p4p4p 4p4 p4 p4 4 4 4 4 4 4  4  p4p4 4 4 4p484 44p8484 4  4 4p4b04b04b0p 4 4 4p4p4 4 4 44p44 4 4 4 4 4 4 4 4 4 4 4 4 4 4  4 4  4  4 p 4 4 4 4  4 4p 4 4 4 4 4 4 4 4 4p4 44p4 4p4p4p4p4p44p4p p4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4  4 4  4p4 4  4 44  4 4 4  4 4 4 p4 p4 p4 4 4p44pp4p4 44p4 4  p p 4  4p4p44 44 4 p44p4p4p 4 4pp4p pp 4 4p4p p  p4 p4 4 4 4 4 4p4p p p p 4p4p 4 4  4 4 4 4 4 4 4 4 4 44 4 4 4 4 4 4 4 p p p p4 p4 44 4  4pp p p p p p4 4  p4 4 4 4 4 4 4p pp4p4p 4p4 4 p4 4 4p44p4 4 4 4 4 4 4 4p4pp4p4p4p4 4 4 p p44p4 4 4 4 4 4 4p pp4p4p4 4 p4 4 p4p4 4 4 4 4 4 4p pp4p4p4p4 4 p4 4 44p 4 4 4 4 4 4 4 4 4 4p p4p4p4 4p4 4 p 4 4 4 4 pp4p4p4p4 4 p4 4 p4 4 4 4 4 4p pp4p4p4p4 4 p4 4 4 p 4 4 p pp4p4p 4 p4 p4 4 4p4 4 4 4 4 4p4p4p4p4 4p44p4p44p4 44p4p44p4 4 4p4 4p4p4p4p 4 4pp4p p4p44p4 44 4 4 4 4  4 4 4 4 4 4 4 4  4  4 4 4  4 4 4 4 4 4 4 4 4 4 4 4 4 4  4 4 4 4 4  4  4 4 4 4 ą_P> XLڊ68ތ4Tt$V0xւZ3RZZZ[:[N[h[3v[[["TEO\?\8Lr]^.3x3^^ _2_|vWW$X4P?JXXX?fXzX3X Y@vY? $?\V3V?ėJؗ?3.S}?Pd}?P5vvvvvvwZwnww?Gwwfx?xxyy?HybyyyyIy?y?0J>JyRzJxzzz{E?"{KH{?n{{{?:|?L?3\v??3EE?uu`EE?3?3u v*vnI?|I?3Du??tt?u?3BBC(CrC3CCCCD>D?*u>uXu?t?\tD@?|t?3t3t?@&AB? ?????t???4 4 4 4 4 4 4 4 4 4 4 4 44 4  4 4 44 4 4  4 44 4 4 4 4 4 4 44 4 4 4 4  4 44 4 4 4 4 4  4 4  4 44 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4 4 4  4 4 4  4 4 4 4  4 4 4 4 4 44 4 4 4 4 4 4 4 4 4  4 4 4 4 4 44 4  4 4 4 4  4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 44 44 4 4 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 44 4 4  4 4 4 4 4 4II 4 4 4 4 4 4 4 4 4   4 4 4 4 4 4 4  4  4 4 4 4 4 4 4 4 4 44 4 4 4 4 4 4 4 4 4 4 4 4 4I4 44 I4I4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 44 4 4  4 4 4 4 4 4 4 4 44  4 4 44 4 4  4  4 4 4 44 4 4  4 4  4  4 4 4  4 4 4 4 4 4 4 4 4 4  4 4  4 4 4 4 4 4 4  4 4 4 4 4 4 4 4 44 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4 44 4 4 4 4 4 4 4  4 4 4 4 4 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4 4 44 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 44 4 44 4 44 4 44 4 4 4 4 4 4 4 4  4 4  4 4 44 4 %R%l%%%%%H&&&&'' ''r((()X)))*>*##X*r**,$$$ 44 4 4 4b04 4 4 4 4  4  4 4 4 4 4 4 4 4 b04 4 4 4  4 4 4 4 4 4 4  4  4 4 4  _$38X(`tD|Ҭ"8ή^~0Z3RZZZ[:[N[h[3v[[[2XEO\?\,Rr30T]^ȧ33^^ _2_tvWW$X4P?JXXX?fXzX3X0YPvY? ?TV3V?ėJؗ?3.SJҞ"J<bpE?Kğ?.f??L?3??3EE?~N`EE?3?3ЛnI?|I?3Dj??֚??3BBC(CrC3CCCCD>D?$>??ZD@?t?33?@&AB? ????????4 4 4 4 4 4 4 4 4 4 4 4 44 4  4 4 44 4 4  4 44 4 4 4 4 4 4 44 4 4 4  4 44 4 4 4 4 4 4 4  4 44 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4 4 4 4 4 4 4 4 4 4  4 4 4 4 44 4 4 4 4 4 4 4 4  4 4 4 4 4 44 4  4 4 4 4  4 4 4 4 4 4 4 4 4 4 4 4  4 4 4 44 44 4 4 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4 4 4 4 4 4 4 4 4 4b04 4 4 4 I 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4  4  4 4 4 4 4 4 4 4 44 4 4 4 4 4 4 4 4 4 4 4 4 4I4 44 I4I 4 4b04b04b0 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 44 4 4  4 4 4 4 4 4 4 4 44  4 4 44 4 4  4  4 4 4 44 4 4  4 4  4 4 4 4  44 4 4 4  4 4 4 4  4 4  4 4 4 4 4 4 4  4 4 4 4 4 4 4 4 44 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4 44 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4 4 44 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 44 4 44 4 44 4 44 4 4 4 4 4 4 4 4  4 4  4 4 44 409 <DLT\j~X~F 44 4 4 4 4  4 4   4   4 4 4 4 4 4 4 4 44 4 4 4 4  4 4 4 4 4  4  4 44 4 4 J_|vFxNPR ,Ll0BVZ3RZZZ[:[N[h[3v[[[JX~EO\?\ 0P^r]^363^^ _2_`vWW$X4P?JXXX?fXzX3XY vY? ?V3V?ėJؗ?3.S==ڶ=|?|?4 4  44MJPJ 6\E?|K?V??L?3??3EE?6`EE?3?3JjnI?|I?3D??6P?v?3BBC(CrC3CCCCD>D???»D@??33?@&AB? ?????v???4 4 4 4 4 4 4 4 4 4 4 4 44 4  4 4 44 4 4  4 44 4 4 4 4 4 4 44 4 4 4 4  4 44 4 4 4 4 4  4 4  4 44 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4 4 4  4 4 4  4 4 4 4  4 4 4 4 4 44 4 4 4 4 4 4 4 4 4  4 4 4 4 4 44 4  4 4 4 4  4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 44 44 4 4 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4 4 4 4 4 4 4 4 4 44 4 4 4 444 4  4 4444 44 4 I 4 4 4 4 4 4 4 4 4   4 4 4 4 4 4 4  4  4 4 4 4 4 4 4 4 4 44 4 4 4 4 4 4 4 4 4 4 4 4 4I4 44 I4I 4 444 4 44 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 44 4 4  4 4 4 4 4 4 4 4 44  4 4 44 4 4  4  4 4 4 44 4 4  4 4  4  4 4 4  4 4 4 4 4 4 4 4 4 4  4 4  4 4 4 4 4 4 4  4 4 4 4 4 4 4 4 44 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4 44 4 4 4 4 4 4 4  4 4 4 4 4 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4 4 44 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4 4 4 4 4  4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 44 4 44 4 44 4 44 4 4 4 4 4 4 4 4  4 4  4 4 44 4$$   @Zbj~*P^~"0>dl*8@Tbp~2 :HPX&FTn|v"NhDDL &N\p 8@T\jxTh&4Nb|F `*8@`hp~08                          `09 DLT\j~FHjWxWWWWWWWWL3WL3W(XTXnXvXXPXWvL3T3b3p3V33L3vL333L33L333334L34"4L38L33`                     $ b0   b0b0    b0    b0b0b0b0b0 b0b0b0  b0 b0 b0b0 b0b0 b0 b0 b0b0  b0  b0 b0 :l&jP *V V v        " -.3Nt3>^ft6DR`t33D,X`hv3 3333 BJ^<b3p33x33,:BJX zxvJdrb0b0b0*f33 (63>FTz$3,4<J^r*$Jd6DXl6(Nt 0P^3t3|33f3^ 3330D3LZn3v3,@Thv 4HP3^3l,    b0b0b0b0b0b0 b0   b0 b0 b0   b0b0b0b0b0b0           b0b0b0b0b0b0b0  b0b0   b0 b0   b0 b0b0b0  b0b0  b0   b0 b0b0b0b0 b0b0b0 b0 b0 b0 b0 b0 b0b0 b0b0  b0b0  b0 b0 b0 b0 b0b0b0 b0 b0b0b0 b0b0 b0  b0b0  b0b0b0 b0b0  b0 b0 b0  b0b0b0 b0 b0  b0b0 b0 b0 b0 b0b0 b0  b0  b0b0  b0b0 b0b0b0b0b0 b0  b0b0b0  b0     b0  b0  b0             b0b0b0 b0b0   b0      b0 b0   b0  b0        b0b0  b0  b0  b0      b0b0  b0  b0  b0 b0 b0 b0b0 b0 b0b0b0 b0 b0  b0 b0 b0b0 b0 b0 b0 b0b0b0b0b0b0b0 b0 b0   b0b0 b0 b0  b0b0b0b0b0  b0       b0   b0  b0 #AZaz 3 3.3BPj3<J^~@9B b0 !NnDWWWX`WWWL3WL3W(XTXnXvXXhv3 .3W zxv:L3V33L3 L333L33L333334L34"4L3L33    b0 b0 b0b0b0b0b0   b0b0b0b0b0b0b0b0b0       b0       b0 b0 b0 b0b0b0b0b0  b0 b0  b0b0 b0b0b0b0b0b0b0 b0b0 b0b0 b0  b0 b0  b0b0 b0 b0 b0 b0b0     b0 b0 b0 b0 b0   b0b0 b0 b0b0 b0 b0 b0 b0 b0 b0b0b0 b0 b0 b0 b0 b0b0 b0b0b0b0 b0 b0 b0 b0 b0b0b0b0 b0 b0 b0 b0 b0b0b0b0b0 b0 b0 b0b0b0b0 b0b0 b0b0b0b0b0 b0 b0 b0 b0b0b0b0b0 b0 b0 b0 b0 b0b0b0 b0 b0 b0  b0b0b0b0b0b0b0b0b0b0b0 b0b0b0b0b0 b0b0b0 b0b0b0$$ $$$$,$$ $,,$$ 44 44 4 4 4 4   4   44 4444 4 4 4444  4 4   4 4 4 4 4  4   4  44 44 V''_'''(()))X**"++,`,,-b--.d.../>/^/~/// 00B$V$$Z3RZZZ[$:[N[h[3v[[[$%V%d%%E%O%\?\&&<&\&j&~&&&&]^&3B'3^^ _2_#`#vW#W$X4P?JXXX?fXzX3#X#Y $vY? "?"<V3V<?~ʲ?3.SJFJlE?K<?b?.?L?3P??3EE?`EE?3?3N?\?3Dx??? ?3BBC(CrC3CCCCD>D?2L??PD@?p?33?@&AB? ????????4 44444 4 4  4 44  4 444444444444444 4 4444 44 4 4 4 44 4 44 44 4 4 4444 4 4 4 4  4 4 4   44 4 4 4 44 4 44 4 4 4 4 4 4 444 44 444  4 4 4  4 4 4 44 44 4 4  444 4 4 4 4 4 4 44  4 4 4 4 4   444   4 4 4 4 444 4 4 444  4 4 4 4 44 4 44 444 44 4 4 44 4 4 44 4 4 4 4 44 4 4 4 4  4 4 4 4 444 4 48 4 4 4 4 4 44 44    4    44  4 4 4 4  4  4 4  4 44 4 4  4 444444 44 4 4 44 4 4 4484 44848 4444 4 44 444 4 44 4  4 4 4 4 4 4 444 44444444 4  44 4 4 4 4 4 44444 444 4    4  444 4444 4 44  4 44   4 4 44    44    4 4 4 4     4 4  4 4 4 4 4 4 4 44 44 4 4 4 4444 4 4 4 4 4 4 444444 4 4  444 4 4 4 4 4 4 444 4 4 4 44 4 4 4 4 4 4 4444 4 4 4 44 4 4 4 4 4 4 4 4 4 4 444 44 4  4 4 4 4 4444 4 4 4 4 4 4 4 4 4 4444 4 4 4 4  4 4  44 4 4 4 4 44 4 4 4 4 44444 4444444 444444 4 44 4444 4 44 4444$  $$$$$$$#AZaz ~O3OO 3O3BPOOP(P3HPPPPPPQR(SHS9SZTT 5 0 5  0  5  0  55555 0555  5 5 55 55 5 5 55  5  5 5 NJJJJJJ2K|KKK6LnLLL.MTMMMM(NTNNNNNNOOBOVOvO/:HBHhH.3NtH3HHHHHI,I~@INIbIvI~IIIIII`J3@J3|CCCD,X`hv3CCD `CtC333AAAA3AB6BJBvBBBBBB C&C<b3FC3@3 A&A3FATABzAA B7@8vD9R999$:h:7,7.8.8:7555*f3@"@0@3D@(63>FTzX@@$3,4@@^@+0<D<X<r<<<<<=d =4=r=====>>=(><>hP>^>l>>>>>?8?L?f?z???? @^3"< 3;;;;33;;<3|;;;0;3v3,@T8;L;Z;h; ::::3:3:,:8555555 5   5  5 5 555555 5555555 55  5 5  5 555  55 55 55 55 555 5 5  5 5 5 55 55  55 5 5 5 5 555 5 555 55 5  55  555 55  5 5  5 555 5 5  55 5 5 5 55 5  5  55  55 55555 5  555  55 5 5 555 55 5 5  5  5 5 505  00  500 0 5  5  55  5  5 5 5 5 555 55 5 5 5  5 5 55 5  5 5 5 555555 5 5 55  5 5 50D50005  55 5 5  5 5(''..09::AZ__az00 0 "E3.811 313B1262\2v232233(3N34NEF59d6GL333L33L333334L34"4L38L33  5 5  50D50005    55     5 5555555 5  5 5 5 5 5555 5  5 5  55 5555 555 55 55 5  5 5 55 5 5 5 55 5 5 5 5 5  55 5 55 5 5 5 5 5 555 5 5 5 5 55 5555 5 5 5 5 5 555 5 5 5 5 5 5555 5 5 5555 5 5 55555 5 5 5 5 5555 5 5 5 5 555 5 5 5  5555 5555 555  5 5555 555 555 0 00  0  00000 000  0 0 00 00 0 0 00  0  0 0 abb*bJbpbbb6cncccdNddddeNeeeef*f8f@fNfVf|fff- ``4`.3NtN`3\`|```````aa$a,a@aTataaR`a3a3___D,X`hv3___ r__333]^&^34^Bl^^^^^^^_$_>_<b3^_3|]3]3]:B]] TUvVV$W8WWWTTOOT000*f3\\\3 (63>FTz]8]$3,4L]Z]^n]*\YpY~YYYYYYZd4ZBZZZZZZ[[Z*[8[L[T[b[[[[[\(\B\V\\\\^3NY 3XX33Y Y4Y3XvXXX3v3,@ThXXX X2XHFX3TX3bX, X000000 0  0 0 0 0000000000000 00 0 0 0 000  00 00 0000 000 0 0 0 0 0 00 00  00 0 0 0 0 000 0 000 00 0  00  000 00  0 0 0 000 0 0 00 0 0 0 00 0  0  00 00 00000 0  000  00 0 0 000 00 0 0 0  0 0 00 00  0 00  0  0 0 0 0 000 000 0 0  0 0 00 0 0 0 0000000 0 0 00 0 00000 0 0  0 0000000000  0 0 0 0 00000  0 0  00 0000000 00 00 0  0 0 00 0 0 0 00 0 0 0 0 0  00 0 00 0 0 0 0 0 000 0 0 0 0 00 0000 0 0 0 0 0000 0 0 0 0 00000 0 0 0000 00 00000 0 0 0 00000 0 0 0 0 000 0 0 0  00000000000 00000 000 00009 $8Rrf$2 5 55 f  5  f 55555 5f55  5 f 5 55 55 5 5 55  5  5 f 5 ~~~&R\ҀHTBP^lzڃ/|||.3Nt}3}4}T}\}p}}}~}}}}}~*~P~^~I`l~3~3~xxxD,X`hv3xxy bxvx333Avvv3wB8wLwxwwwwwwx(x<b3Hx3u3v6v3\vjvBvv lmvnn,o@ooollXgXgl555*f38u@uNu3D@(63>FTzbuu$3,4uu^u+vqqqqqq r rLrdfrzrrrrs,sFsZssnsshssl>sst*tPtjt~tf?tttu*u^3hq 3;qq;33 q:qNq3pppp3v3,@T8;ppp p@p:Tp3hp3vp,p8555555 5  5  5 5 5555555555555 55 5 5 5 555  55 55 55 55 555 5 5  5 5 5 55 55  55 5 5 5 5 555 5 555 55 5  55  555 55  5 5  5 555 5 5  55 5 5 5 55 5  5  55 55 55555 5  555  55 5 5 555 55 5 5  5  5 5 5ff5  fff  5fff f 5  5 55  5  5 5 5 5 555 55 5 5 5  5 5 55 5  5 5 5 555555 5 5 55  5 552y5ff5  f 55 5 5  5 5)'',,..09;;AZ__azfff f *z3Xg`g 3g3BPgggg3,hphhhhhiPz{Jk9k>{l 5 !N||xGDWWWX&HWWWL3WL3W(XTXnXvXXhv3xxy .|vx3W lmvnn,o@oo|{L3GV33L3 |L333L33L333334L34"4L38L335 5  52y5ff5   f 55 5 5555555 5  5 5 5 5 5555 5  5 5  55 5555 555 55 55 5  5 5 55 5 5 5 55 5 5 5 5 5  55 5 55 5 5 5 5 5 555 5 5 5 5 55 5555 5 5 5 5 5 555 5 5 5 5 5 5555 5 5 5555 5 5 55555 5 5 5 5 5555 5 5 5 5 555 5 5 5  5555 5555 555  5 5555 555 555fffffffffffff fffff fffff ff0JRZnȏ@Nn||vȐА .T\vҍڍ"0>FZhv~ʎގ"vʍ~ČҌ*2Lfz`hv~T:BnvȈff  :BPX&̉ډ$2@ZhvҊڊҊh(0DXl̋ĉ*8LTbpވhֈ8ffffffff ff ffffffffffffffffffffffffff fffff ff fffffff fff ffffffffffff fff fffff ff ffffff fffffffffffffffffff fff fffffffffff fffffffffffffffffff ff fffff f fff ffffffffff ffffffffff fffffff fffffffff f ffffffff fffff fff fffffffffffffffffffff ffffffff fffff ffffff ffffff ffffff fffffff fffff fffff ffff fff f ffffffffff444444444444444444444444 (0DXlXXX؞ .X.tNnvŸ$,:HP^ftʗȝН؝HbpxD8@\Ljręҙ*.6DLTz֕Rl??44֘ޘ&# .<J^ltʗҗڗʗ*8L`nvΘ&ĖҖFThp~&.6>44444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444?̑ڑ&›J4ԜpjWxWWWWWWWWL3WL3W(XTXnXvXXxD@Wz֕L3T3b3p3V33L3L333L33L333334L34"4L3L33444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444..............................................................................................................................$$,$$,$$$,$$$$$$$$$'$$$$$$$$$$$$$$$$$$$ $$$$$$$$$$$$$$$$$ $$$$$$$$$$$$$$$$$$$$$$$$$$$,  $$$$$$                         δBbص .6DLltrzijҳڳ Fr(6DRZb*2Ư:Ȱְ,T:TBTJTRT"0DLTbpxRƯSSίܯ,RrªܪƧ  RZbj#drƭڭ08FNVdFrzȮܮ(6J\*2@N6P>P¬Ьz                                                                                                                                                                     $$ &R*jWxWWWWWWWWL3WL3W(XTXnXvXX2Ư: ְ,T:TWBTJTRT,R`L3T3b3p3V33L3L333L33L333334L34"4L3L33                                                                                                                              $$,$$$,$$$$$$$$$$$$$,$$,$, $$$$$$,$$$$ $$$$$$$,$$$$$$$$$$$,$$$,$$,$$,$$,$ $$$$$$$,$$$$$$$$$$$$$$$,$$$$ $,$$$$$,$$ $$$$$$$$$$$$$$$$$$$$$,$$$$$$$$$ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$,$$$$$$$$$$        $$$$$$$$$$$$,$$  $ $$$$$$ "0Pdrz&4 $$$$$ 0      0       8|B.x:r(6DR`/.3Nt3:BVj~6DI`R33$\D,X`hv3 333Aj3B8L`<b333&A3.TAB<J v*>nn  *f36>L3D@(63>FTz`$3,4^+t Jddx*DXlhl>(Nh|f?(^3f 3;;338L33v3,@T8; >:R3f3t,8                                                           0                     000      $$$$  0 !4rGDWWWX&HWWWL3WL3W(XTXnXvXXhv3 3W v*>bL3GV33L3L333L33L333334L34"4L38L33     000                                                                 RftB(f.Z.Nt-V^~.3Nt3 0DRfnvR`33D,X`hv3"6 333B\p3~B,@Nn<b3333:B" v$n:*f3 (63 (63>FTzJ$3,4^**>dd~*8L`*tD^r^3 34B33Pj~33v3,@Th \|H33,T                                                                                                                                                                                       @@          F  f          $ 2 : H P p x  v ~            h, : H V ^ f  B P ^ f n 2 : V d l           $ $J&FNt   t|# &RZhpxh "0JXl~LTbp>*@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@Pjrz"BPjxx*  , 4 N 2:Znv"B "* (HVjrz 0b6>^f"44<@@#"*>jr$,:HbpdlzV &4B@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@$$$$$$$$,$$$$$$$$$$$$ ,$$$,$$$$$$,,$$$,$$ $$$$$$$$$,$ $    $$$$     $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $$$$$$$$$$$$:`d  > L T b j    *2FTbpxDdxjWxWWWWWWWWL3WL3W(XTXnXvXX4(<WL<Zb&4<D*Jh#,:N\dx(<P^ft6DX`n|&.L3T3b3p3V33L3L333L33L333334L34"4L3L33,$ $$$$$ $$$$$$ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$,$$$$,,$,,$$,$$,$$,$,$$$$ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$,$$$$$$$$$ $$$,$$$$$$$$$ $$,$$$$$$$$ $$,$ $$,$$$$$$$$$$$$, $$$$$$$,$$$$$$$ $$,$ $$$$ $$$,$ $$$$$,$,$$$ppppppppppppppppp pppppppppppppp @A0A>A^AAABVBBB CHCCCCDNDDDD.ENEzEEEEEEEF/>>?Υ@`z>?ΥL?l???????@@,@4@H@b@@@Rl@Υ@Υ;;B;RΥ;<(<^ x;;ΥʲΥltΥ|99:Υ: N:b:::::::$;>;8^Υ^;ΥR9tΥ9Υ999 4021ҭ62D222"3f300**,0*&Υ4<8L8h8ΥΥ .6DR89Υ909D9V4+44 5&5:5T5z55555&6^6l6666666677:B 7X7r77777J88F8r88Υ4 Υr448RΥΥ4444<ΥDX43 4 4Υ >LrΥȱܱ(464D4 333Υ3Υ3@V^r3ppppppppppppppppppppppppppppppp ppppppp pppppppppppppp ppppppppppppppppppppppppppppppppp pppppppppppppppppTp8pppppppppp ppppppppppppppppppppp8p88TTTTppppppppppp$ppp!>>B;4WWWWWWL3WL3W(XTXnXvXXΥ;<(<^ r>;ΥWʲ 4021ҭ62D222"3=L3ZnV33L3d>L333L33L333334L34"4L3L33p8pTTTTpppppppppppppppppppppppp pppppppppppppppp pppppppp ppppp ppppppp ppp ppppppp ppp ppppppp ppp pppppppppppppppppp ppppppp ppppppp ppppppppppppp $ 0 $  $  $$$$$ 0$$$  $ $ $$ $$ $ $ $$  $  $ $ J\|\\\\\.]x]]]2^j^^^*_P_|___$`P``````aa>aRara/PZXZ~Z.3NtZ3ZZZZ[[B[~V[d[x[[[[[[[I`\3<\3V WFWD,X`hv3NWhW|W VV333A.UHU\U3jUBUUUUV$VDVRVxVV<b3V3T3T&A3TTABUU KLvMMMNvNNdK~KH<H<K$$$*f3STT3D@(63>FTz$T\T$3,4pTT^T+8PLP`PzPPPPPQd(QtRRRRS,S@Sf?ZSnSSSS^3*P 3;OO;33OOP3ORO`OtO3v3,@T8;|OOO NO:O3*O38O,N8$$$$$$ $  $  $ $ $$$$$$$$$$$$$ $$ $ $ $ $$$  $$ $$ $$ $$ $$$ $ $  $ $ $ $$ $$  $$ $ $ $ $ $$$ $ $$$ $$ $  $$  $$$ $$  $ $  $ $$$ $ $  $$ $ $ $ $$ $  $  $$ $$ $$$$$ $  $$$  $$ $ $ $$$ $$ $ $  $  $ $ $0$  $$  $ $$  $  $ $ $ $ $$$ $$ $ $ $  $ $ $$ $  $ $ $ $$$$$$ $ $ $$  $ $ $$000$  $$ $ $  $ $$$$ $ 0 !Y$ZFWGDWWWX&HWWWL3WL3W(XTXnXvXXhv3NWhW|W YV3W KLvMMMNvNYL3GV33L3YL333L33L333334L34"4L38L33$ $  $$000$    $$ $ $$$$$$$ $  $ $ $ $ $$$$ $  $ $  $$ $$$$ $$$ $$ $$ $  $ $ $$ $ $ $ $$ $ $ $ $ $  $$ $ $$ $ $ $ $ $ $$$ $ $ $ $ $$ $$$$ $ $ $ $ $ $$$ $ $ $ $ $ $$$$ $ $ $$$$ $ $ $$$$$ $ $ $ $ $$$$ $ $ $ $ $$$ $ $ $  $$$$ $$$$ $$$  $ $$$$ $$$ $$$ , ,, f  ,  f ,,,,, ,f,,  , f , ,, ,, , , ,,  ,  , f , wx"x0xPx|xx yHyyyy:zrzzz{@{~{{{ |@|l|z||||||}/uuv.3Nt0v3>v^v~vvvvv~vv ww&w:wTwzwwI`w3w3hrrrD,X`hv3rrr Lr`r333Appp3pB"q6qbq|qqqqqqr<b32r3 p3Rp6v3rpjvBpp g hviihi|ii4jffaag,,,*f3to|oo3D@(63>FTzoo$3,4oo^p+kkkkl"lHl\lldlll,m:mZmhmmmZmmmhmml>m&n@nfnnnnf?nno@ofo^3k 3;@kNk;33\kvkk3&kjjj3v3,@T8;jkk \j|j:j3j3j,Tj8,,,,,, ,  ,  , , ,,,,,,,,,,,,, ,, , , , ,,,  ,, ,, ,, ,, ,,, , ,  , , , ,, ,,  ,, , , , , ,,, , ,,, ,, ,  ,,  ,,, ,,  , ,  , ,,, , ,  ,, , , , ,, ,  ,  ,, ,, ,,,,, ,  ,,,  ,, , , ,,, ,, , ,  ,  , , ,ff,  ,,  , ,,  ,  , , , , ,,, ,, , , ,  , , ,, ,  , , , ,,,,,, , , ,,  , ,,,ff,  f ,, , ,  , , $$$ , !xuurGDWWWX&HWWWL3WL3W(XTXnXvXXhv3rrr Xu`r3W g hviihi|iitL3GV33L3JuL333L33L333334L34"4L38L33, ,  ,,ff,   f ,, , ,,,,,,, ,  , , , , ,,,, ,  , ,  ,, ,,,, ,,, ,, ,, ,  , , ,, , , , ,, , , , , ,  ,, , ,, , , , , , ,,, , , , , ,, ,,,, , , , , , ,,, , , , , , ,,,, , , ,,,, , , ,,,,, , , , , ,,,, , , , , ,,, , , ,  ,,,, ,,,, ,,,  , ,,,, ,,, ,,,2  !/09:@AZ[^__``az{,$$sWH<@|H<BH<0h0B040 H<J * ",j2161ڤbJ04!h*2FTbpxRxx4D<*4<x4<bstr-0.2.17/src/unicode/fsm/word_break_fwd.rs000064400000000000000000000022650072674642500172510ustar 00000000000000// DO NOT EDIT THIS FILE. IT WAS AUTOMATICALLY GENERATED BY: // // ucd-generate dfa --name WORD_BREAK_FWD --sparse --minimize --anchored --state-size 4 src/unicode/fsm/ [snip (arg too long)] // // ucd-generate 0.2.9 is available on crates.io. #[cfg(target_endian = "big")] lazy_static::lazy_static! { pub static ref WORD_BREAK_FWD: ::regex_automata::SparseDFA<&'static [u8], u32> = { #[repr(C)] struct Aligned { _align: [u8; 0], bytes: B, } static ALIGNED: &'static Aligned<[u8]> = &Aligned { _align: [], bytes: *include_bytes!("word_break_fwd.bigendian.dfa"), }; unsafe { ::regex_automata::SparseDFA::from_bytes(&ALIGNED.bytes) } }; } #[cfg(target_endian = "little")] lazy_static::lazy_static! { pub static ref WORD_BREAK_FWD: ::regex_automata::SparseDFA<&'static [u8], u32> = { #[repr(C)] struct Aligned { _align: [u8; 0], bytes: B, } static ALIGNED: &'static Aligned<[u8]> = &Aligned { _align: [], bytes: *include_bytes!("word_break_fwd.littleendian.dfa"), }; unsafe { ::regex_automata::SparseDFA::from_bytes(&ALIGNED.bytes) } }; } bstr-0.2.17/src/unicode/grapheme.rs000064400000000000000000000301540072674642500152730ustar 00000000000000use regex_automata::DFA; use crate::ext_slice::ByteSlice; use crate::unicode::fsm::grapheme_break_fwd::GRAPHEME_BREAK_FWD; use crate::unicode::fsm::grapheme_break_rev::GRAPHEME_BREAK_REV; use crate::unicode::fsm::regional_indicator_rev::REGIONAL_INDICATOR_REV; use crate::utf8; /// An iterator over grapheme clusters in a byte string. /// /// This iterator is typically constructed by /// [`ByteSlice::graphemes`](trait.ByteSlice.html#method.graphemes). /// /// Unicode defines a grapheme cluster as an *approximation* to a single user /// visible character. A grapheme cluster, or just "grapheme," is made up of /// one or more codepoints. For end user oriented tasks, one should generally /// prefer using graphemes instead of [`Chars`](struct.Chars.html), which /// always yields one codepoint at a time. /// /// Since graphemes are made up of one or more codepoints, this iterator yields /// `&str` elements. When invalid UTF-8 is encountered, replacement codepoints /// are [substituted](index.html#handling-of-invalid-utf-8). /// /// This iterator can be used in reverse. When reversed, exactly the same /// set of grapheme clusters are yielded, but in reverse order. /// /// This iterator only yields *extended* grapheme clusters, in accordance with /// [UAX #29](https://www.unicode.org/reports/tr29/tr29-33.html#Grapheme_Cluster_Boundaries). #[derive(Clone, Debug)] pub struct Graphemes<'a> { bs: &'a [u8], } impl<'a> Graphemes<'a> { pub(crate) fn new(bs: &'a [u8]) -> Graphemes<'a> { Graphemes { bs } } /// View the underlying data as a subslice of the original data. /// /// The slice returned has the same lifetime as the original slice, and so /// the iterator can continue to be used while this exists. /// /// # Examples /// /// ``` /// use bstr::ByteSlice; /// /// let mut it = b"abc".graphemes(); /// /// assert_eq!(b"abc", it.as_bytes()); /// it.next(); /// assert_eq!(b"bc", it.as_bytes()); /// it.next(); /// it.next(); /// assert_eq!(b"", it.as_bytes()); /// ``` #[inline] pub fn as_bytes(&self) -> &'a [u8] { self.bs } } impl<'a> Iterator for Graphemes<'a> { type Item = &'a str; #[inline] fn next(&mut self) -> Option<&'a str> { let (grapheme, size) = decode_grapheme(self.bs); if size == 0 { return None; } self.bs = &self.bs[size..]; Some(grapheme) } } impl<'a> DoubleEndedIterator for Graphemes<'a> { #[inline] fn next_back(&mut self) -> Option<&'a str> { let (grapheme, size) = decode_last_grapheme(self.bs); if size == 0 { return None; } self.bs = &self.bs[..self.bs.len() - size]; Some(grapheme) } } /// An iterator over grapheme clusters in a byte string and their byte index /// positions. /// /// This iterator is typically constructed by /// [`ByteSlice::grapheme_indices`](trait.ByteSlice.html#method.grapheme_indices). /// /// Unicode defines a grapheme cluster as an *approximation* to a single user /// visible character. A grapheme cluster, or just "grapheme," is made up of /// one or more codepoints. For end user oriented tasks, one should generally /// prefer using graphemes instead of [`Chars`](struct.Chars.html), which /// always yields one codepoint at a time. /// /// Since graphemes are made up of one or more codepoints, this iterator /// yields `&str` elements (along with their start and end byte offsets). /// When invalid UTF-8 is encountered, replacement codepoints are /// [substituted](index.html#handling-of-invalid-utf-8). Because of this, the /// indices yielded by this iterator may not correspond to the length of the /// grapheme cluster yielded with those indices. For example, when this /// iterator encounters `\xFF` in the byte string, then it will yield a pair /// of indices ranging over a single byte, but will provide an `&str` /// equivalent to `"\u{FFFD}"`, which is three bytes in length. However, when /// given only valid UTF-8, then all indices are in exact correspondence with /// their paired grapheme cluster. /// /// This iterator can be used in reverse. When reversed, exactly the same /// set of grapheme clusters are yielded, but in reverse order. /// /// This iterator only yields *extended* grapheme clusters, in accordance with /// [UAX #29](https://www.unicode.org/reports/tr29/tr29-33.html#Grapheme_Cluster_Boundaries). #[derive(Clone, Debug)] pub struct GraphemeIndices<'a> { bs: &'a [u8], forward_index: usize, reverse_index: usize, } impl<'a> GraphemeIndices<'a> { pub(crate) fn new(bs: &'a [u8]) -> GraphemeIndices<'a> { GraphemeIndices { bs: bs, forward_index: 0, reverse_index: bs.len() } } /// View the underlying data as a subslice of the original data. /// /// The slice returned has the same lifetime as the original slice, and so /// the iterator can continue to be used while this exists. /// /// # Examples /// /// ``` /// use bstr::ByteSlice; /// /// let mut it = b"abc".grapheme_indices(); /// /// assert_eq!(b"abc", it.as_bytes()); /// it.next(); /// assert_eq!(b"bc", it.as_bytes()); /// it.next(); /// it.next(); /// assert_eq!(b"", it.as_bytes()); /// ``` #[inline] pub fn as_bytes(&self) -> &'a [u8] { self.bs } } impl<'a> Iterator for GraphemeIndices<'a> { type Item = (usize, usize, &'a str); #[inline] fn next(&mut self) -> Option<(usize, usize, &'a str)> { let index = self.forward_index; let (grapheme, size) = decode_grapheme(self.bs); if size == 0 { return None; } self.bs = &self.bs[size..]; self.forward_index += size; Some((index, index + size, grapheme)) } } impl<'a> DoubleEndedIterator for GraphemeIndices<'a> { #[inline] fn next_back(&mut self) -> Option<(usize, usize, &'a str)> { let (grapheme, size) = decode_last_grapheme(self.bs); if size == 0 { return None; } self.bs = &self.bs[..self.bs.len() - size]; self.reverse_index -= size; Some((self.reverse_index, self.reverse_index + size, grapheme)) } } /// Decode a grapheme from the given byte string. /// /// This returns the resulting grapheme (which may be a Unicode replacement /// codepoint if invalid UTF-8 was found), along with the number of bytes /// decoded in the byte string. The number of bytes decoded may not be the /// same as the length of grapheme in the case where invalid UTF-8 is found. pub fn decode_grapheme(bs: &[u8]) -> (&str, usize) { if bs.is_empty() { ("", 0) } else if let Some(end) = GRAPHEME_BREAK_FWD.find(bs) { // Safe because a match can only occur for valid UTF-8. let grapheme = unsafe { bs[..end].to_str_unchecked() }; (grapheme, grapheme.len()) } else { const INVALID: &'static str = "\u{FFFD}"; // No match on non-empty bytes implies we found invalid UTF-8. let (_, size) = utf8::decode_lossy(bs); (INVALID, size) } } fn decode_last_grapheme(bs: &[u8]) -> (&str, usize) { if bs.is_empty() { ("", 0) } else if let Some(mut start) = GRAPHEME_BREAK_REV.rfind(bs) { start = adjust_rev_for_regional_indicator(bs, start); // Safe because a match can only occur for valid UTF-8. let grapheme = unsafe { bs[start..].to_str_unchecked() }; (grapheme, grapheme.len()) } else { const INVALID: &'static str = "\u{FFFD}"; // No match on non-empty bytes implies we found invalid UTF-8. let (_, size) = utf8::decode_last_lossy(bs); (INVALID, size) } } /// Return the correct offset for the next grapheme decoded at the end of the /// given byte string, where `i` is the initial guess. In particular, /// `&bs[i..]` represents the candidate grapheme. /// /// `i` is returned by this function in all cases except when `&bs[i..]` is /// a pair of regional indicator codepoints. In that case, if an odd number of /// additional regional indicator codepoints precedes `i`, then `i` is /// adjusted such that it points to only a single regional indicator. /// /// This "fixing" is necessary to handle the requirement that a break cannot /// occur between regional indicators where it would cause an odd number of /// regional indicators to exist before the break from the *start* of the /// string. A reverse regex cannot detect this case easily without look-around. fn adjust_rev_for_regional_indicator(mut bs: &[u8], i: usize) -> usize { // All regional indicators use a 4 byte encoding, and we only care about // the case where we found a pair of regional indicators. if bs.len() - i != 8 { return i; } // Count all contiguous occurrences of regional indicators. If there's an // even number of them, then we can accept the pair we found. Otherwise, // we can only take one of them. // // FIXME: This is quadratic in the worst case, e.g., a string of just // regional indicator codepoints. A fix probably requires refactoring this // code a bit such that we don't rescan regional indicators. let mut count = 0; while let Some(start) = REGIONAL_INDICATOR_REV.rfind(bs) { bs = &bs[..start]; count += 1; } if count % 2 == 0 { i } else { i + 4 } } #[cfg(test)] mod tests { use ucd_parse::GraphemeClusterBreakTest; use super::*; use crate::ext_slice::ByteSlice; use crate::tests::LOSSY_TESTS; #[test] fn forward_ucd() { for (i, test) in ucdtests().into_iter().enumerate() { let given = test.grapheme_clusters.concat(); let got: Vec = Graphemes::new(given.as_bytes()) .map(|cluster| cluster.to_string()) .collect(); assert_eq!( test.grapheme_clusters, got, "\ngrapheme forward break test {} failed:\n\ given: {:?}\n\ expected: {:?}\n\ got: {:?}\n", i, uniescape(&given), uniescape_vec(&test.grapheme_clusters), uniescape_vec(&got), ); } } #[test] fn reverse_ucd() { for (i, test) in ucdtests().into_iter().enumerate() { let given = test.grapheme_clusters.concat(); let mut got: Vec = Graphemes::new(given.as_bytes()) .rev() .map(|cluster| cluster.to_string()) .collect(); got.reverse(); assert_eq!( test.grapheme_clusters, got, "\n\ngrapheme reverse break test {} failed:\n\ given: {:?}\n\ expected: {:?}\n\ got: {:?}\n", i, uniescape(&given), uniescape_vec(&test.grapheme_clusters), uniescape_vec(&got), ); } } #[test] fn forward_lossy() { for &(expected, input) in LOSSY_TESTS { let got = Graphemes::new(input.as_bytes()).collect::(); assert_eq!(expected, got); } } #[test] fn reverse_lossy() { for &(expected, input) in LOSSY_TESTS { let expected: String = expected.chars().rev().collect(); let got = Graphemes::new(input.as_bytes()).rev().collect::(); assert_eq!(expected, got); } } fn uniescape(s: &str) -> String { s.chars().flat_map(|c| c.escape_unicode()).collect::() } fn uniescape_vec(strs: &[String]) -> Vec { strs.iter().map(|s| uniescape(s)).collect() } /// Return all of the UCD for grapheme breaks. fn ucdtests() -> Vec { const TESTDATA: &'static str = include_str!("data/GraphemeBreakTest.txt"); let mut tests = vec![]; for mut line in TESTDATA.lines() { line = line.trim(); if line.starts_with("#") || line.contains("surrogate") { continue; } tests.push(line.parse().unwrap()); } tests } } bstr-0.2.17/src/unicode/mod.rs000064400000000000000000000005320072674642500142570ustar 00000000000000pub use self::grapheme::{decode_grapheme, GraphemeIndices, Graphemes}; pub use self::sentence::{SentenceIndices, Sentences}; pub use self::whitespace::{whitespace_len_fwd, whitespace_len_rev}; pub use self::word::{ WordIndices, Words, WordsWithBreakIndices, WordsWithBreaks, }; mod fsm; mod grapheme; mod sentence; mod whitespace; mod word; bstr-0.2.17/src/unicode/sentence.rs000064400000000000000000000155660072674642500153210ustar 00000000000000use regex_automata::DFA; use crate::ext_slice::ByteSlice; use crate::unicode::fsm::sentence_break_fwd::SENTENCE_BREAK_FWD; use crate::utf8; /// An iterator over sentences in a byte string. /// /// This iterator is typically constructed by /// [`ByteSlice::sentences`](trait.ByteSlice.html#method.sentences). /// /// Sentences typically include their trailing punctuation and whitespace. /// /// Since sentences are made up of one or more codepoints, this iterator yields /// `&str` elements. When invalid UTF-8 is encountered, replacement codepoints /// are [substituted](index.html#handling-of-invalid-utf-8). /// /// This iterator yields words in accordance with the default sentence boundary /// rules specified in /// [UAX #29](https://www.unicode.org/reports/tr29/tr29-33.html#Sentence_Boundaries). #[derive(Clone, Debug)] pub struct Sentences<'a> { bs: &'a [u8], } impl<'a> Sentences<'a> { pub(crate) fn new(bs: &'a [u8]) -> Sentences<'a> { Sentences { bs } } /// View the underlying data as a subslice of the original data. /// /// The slice returned has the same lifetime as the original slice, and so /// the iterator can continue to be used while this exists. /// /// # Examples /// /// ``` /// use bstr::ByteSlice; /// /// let mut it = b"I want this. Not that. Right now.".sentences(); /// /// assert_eq!(&b"I want this. Not that. Right now."[..], it.as_bytes()); /// it.next(); /// assert_eq!(b"Not that. Right now.", it.as_bytes()); /// it.next(); /// it.next(); /// assert_eq!(b"", it.as_bytes()); /// ``` #[inline] pub fn as_bytes(&self) -> &'a [u8] { self.bs } } impl<'a> Iterator for Sentences<'a> { type Item = &'a str; #[inline] fn next(&mut self) -> Option<&'a str> { let (sentence, size) = decode_sentence(self.bs); if size == 0 { return None; } self.bs = &self.bs[size..]; Some(sentence) } } /// An iterator over sentences in a byte string, along with their byte offsets. /// /// This iterator is typically constructed by /// [`ByteSlice::sentence_indices`](trait.ByteSlice.html#method.sentence_indices). /// /// Sentences typically include their trailing punctuation and whitespace. /// /// Since sentences are made up of one or more codepoints, this iterator /// yields `&str` elements (along with their start and end byte offsets). /// When invalid UTF-8 is encountered, replacement codepoints are /// [substituted](index.html#handling-of-invalid-utf-8). Because of this, the /// indices yielded by this iterator may not correspond to the length of the /// sentence yielded with those indices. For example, when this iterator /// encounters `\xFF` in the byte string, then it will yield a pair of indices /// ranging over a single byte, but will provide an `&str` equivalent to /// `"\u{FFFD}"`, which is three bytes in length. However, when given only /// valid UTF-8, then all indices are in exact correspondence with their paired /// word. /// /// This iterator yields words in accordance with the default sentence boundary /// rules specified in /// [UAX #29](https://www.unicode.org/reports/tr29/tr29-33.html#Sentence_Boundaries). #[derive(Clone, Debug)] pub struct SentenceIndices<'a> { bs: &'a [u8], forward_index: usize, } impl<'a> SentenceIndices<'a> { pub(crate) fn new(bs: &'a [u8]) -> SentenceIndices<'a> { SentenceIndices { bs: bs, forward_index: 0 } } /// View the underlying data as a subslice of the original data. /// /// The slice returned has the same lifetime as the original slice, and so /// the iterator can continue to be used while this exists. /// /// # Examples /// /// ``` /// use bstr::ByteSlice; /// /// let mut it = b"I want this. Not that. Right now.".sentence_indices(); /// /// assert_eq!(&b"I want this. Not that. Right now."[..], it.as_bytes()); /// it.next(); /// assert_eq!(b"Not that. Right now.", it.as_bytes()); /// it.next(); /// it.next(); /// assert_eq!(b"", it.as_bytes()); /// ``` #[inline] pub fn as_bytes(&self) -> &'a [u8] { self.bs } } impl<'a> Iterator for SentenceIndices<'a> { type Item = (usize, usize, &'a str); #[inline] fn next(&mut self) -> Option<(usize, usize, &'a str)> { let index = self.forward_index; let (word, size) = decode_sentence(self.bs); if size == 0 { return None; } self.bs = &self.bs[size..]; self.forward_index += size; Some((index, index + size, word)) } } fn decode_sentence(bs: &[u8]) -> (&str, usize) { if bs.is_empty() { ("", 0) } else if let Some(end) = SENTENCE_BREAK_FWD.find(bs) { // Safe because a match can only occur for valid UTF-8. let sentence = unsafe { bs[..end].to_str_unchecked() }; (sentence, sentence.len()) } else { const INVALID: &'static str = "\u{FFFD}"; // No match on non-empty bytes implies we found invalid UTF-8. let (_, size) = utf8::decode_lossy(bs); (INVALID, size) } } #[cfg(test)] mod tests { use ucd_parse::SentenceBreakTest; use crate::ext_slice::ByteSlice; #[test] fn forward_ucd() { for (i, test) in ucdtests().into_iter().enumerate() { let given = test.sentences.concat(); let got = sentences(given.as_bytes()); assert_eq!( test.sentences, got, "\n\nsentence forward break test {} failed:\n\ given: {:?}\n\ expected: {:?}\n\ got: {:?}\n", i, given, strs_to_bstrs(&test.sentences), strs_to_bstrs(&got), ); } } // Some additional tests that don't seem to be covered by the UCD tests. #[test] fn forward_additional() { assert_eq!(vec!["a.. ", "A"], sentences(b"a.. A")); assert_eq!(vec!["a.. a"], sentences(b"a.. a")); assert_eq!(vec!["a... ", "A"], sentences(b"a... A")); assert_eq!(vec!["a... a"], sentences(b"a... a")); assert_eq!(vec!["a...,..., a"], sentences(b"a...,..., a")); } fn sentences(bytes: &[u8]) -> Vec<&str> { bytes.sentences().collect() } fn strs_to_bstrs>(strs: &[S]) -> Vec<&[u8]> { strs.iter().map(|s| s.as_ref().as_bytes()).collect() } /// Return all of the UCD for sentence breaks. fn ucdtests() -> Vec { const TESTDATA: &'static str = include_str!("data/SentenceBreakTest.txt"); let mut tests = vec![]; for mut line in TESTDATA.lines() { line = line.trim(); if line.starts_with("#") || line.contains("surrogate") { continue; } tests.push(line.parse().unwrap()); } tests } } bstr-0.2.17/src/unicode/whitespace.rs000064400000000000000000000010120072674642500156260ustar 00000000000000use regex_automata::DFA; use crate::unicode::fsm::whitespace_anchored_fwd::WHITESPACE_ANCHORED_FWD; use crate::unicode::fsm::whitespace_anchored_rev::WHITESPACE_ANCHORED_REV; /// Return the first position of a non-whitespace character. pub fn whitespace_len_fwd(slice: &[u8]) -> usize { WHITESPACE_ANCHORED_FWD.find(slice).unwrap_or(0) } /// Return the last position of a non-whitespace character. pub fn whitespace_len_rev(slice: &[u8]) -> usize { WHITESPACE_ANCHORED_REV.rfind(slice).unwrap_or(slice.len()) } bstr-0.2.17/src/unicode/word.rs000064400000000000000000000333100072674642500144530ustar 00000000000000use regex_automata::DFA; use crate::ext_slice::ByteSlice; use crate::unicode::fsm::simple_word_fwd::SIMPLE_WORD_FWD; use crate::unicode::fsm::word_break_fwd::WORD_BREAK_FWD; use crate::utf8; /// An iterator over words in a byte string. /// /// This iterator is typically constructed by /// [`ByteSlice::words`](trait.ByteSlice.html#method.words). /// /// This is similar to the [`WordsWithBreaks`](struct.WordsWithBreaks.html) /// iterator, except it only returns elements that contain a "word" character. /// A word character is defined by UTS #18 (Annex C) to be the combination /// of the `Alphabetic` and `Join_Control` properties, along with the /// `Decimal_Number`, `Mark` and `Connector_Punctuation` general categories. /// /// Since words are made up of one or more codepoints, this iterator yields /// `&str` elements. When invalid UTF-8 is encountered, replacement codepoints /// are [substituted](index.html#handling-of-invalid-utf-8). /// /// This iterator yields words in accordance with the default word boundary /// rules specified in /// [UAX #29](https://www.unicode.org/reports/tr29/tr29-33.html#Word_Boundaries). /// In particular, this may not be suitable for Japanese and Chinese scripts /// that do not use spaces between words. #[derive(Clone, Debug)] pub struct Words<'a>(WordsWithBreaks<'a>); impl<'a> Words<'a> { pub(crate) fn new(bs: &'a [u8]) -> Words<'a> { Words(WordsWithBreaks::new(bs)) } /// View the underlying data as a subslice of the original data. /// /// The slice returned has the same lifetime as the original slice, and so /// the iterator can continue to be used while this exists. /// /// # Examples /// /// ``` /// use bstr::ByteSlice; /// /// let mut it = b"foo bar baz".words(); /// /// assert_eq!(b"foo bar baz", it.as_bytes()); /// it.next(); /// it.next(); /// assert_eq!(b" baz", it.as_bytes()); /// it.next(); /// assert_eq!(b"", it.as_bytes()); /// ``` #[inline] pub fn as_bytes(&self) -> &'a [u8] { self.0.as_bytes() } } impl<'a> Iterator for Words<'a> { type Item = &'a str; #[inline] fn next(&mut self) -> Option<&'a str> { while let Some(word) = self.0.next() { if SIMPLE_WORD_FWD.is_match(word.as_bytes()) { return Some(word); } } None } } /// An iterator over words in a byte string and their byte index positions. /// /// This iterator is typically constructed by /// [`ByteSlice::word_indices`](trait.ByteSlice.html#method.word_indices). /// /// This is similar to the /// [`WordsWithBreakIndices`](struct.WordsWithBreakIndices.html) iterator, /// except it only returns elements that contain a "word" character. A /// word character is defined by UTS #18 (Annex C) to be the combination /// of the `Alphabetic` and `Join_Control` properties, along with the /// `Decimal_Number`, `Mark` and `Connector_Punctuation` general categories. /// /// Since words are made up of one or more codepoints, this iterator /// yields `&str` elements (along with their start and end byte offsets). /// When invalid UTF-8 is encountered, replacement codepoints are /// [substituted](index.html#handling-of-invalid-utf-8). Because of this, the /// indices yielded by this iterator may not correspond to the length of the /// word yielded with those indices. For example, when this iterator encounters /// `\xFF` in the byte string, then it will yield a pair of indices ranging /// over a single byte, but will provide an `&str` equivalent to `"\u{FFFD}"`, /// which is three bytes in length. However, when given only valid UTF-8, then /// all indices are in exact correspondence with their paired word. /// /// This iterator yields words in accordance with the default word boundary /// rules specified in /// [UAX #29](https://www.unicode.org/reports/tr29/tr29-33.html#Word_Boundaries). /// In particular, this may not be suitable for Japanese and Chinese scripts /// that do not use spaces between words. #[derive(Clone, Debug)] pub struct WordIndices<'a>(WordsWithBreakIndices<'a>); impl<'a> WordIndices<'a> { pub(crate) fn new(bs: &'a [u8]) -> WordIndices<'a> { WordIndices(WordsWithBreakIndices::new(bs)) } /// View the underlying data as a subslice of the original data. /// /// The slice returned has the same lifetime as the original slice, and so /// the iterator can continue to be used while this exists. /// /// # Examples /// /// ``` /// use bstr::ByteSlice; /// /// let mut it = b"foo bar baz".word_indices(); /// /// assert_eq!(b"foo bar baz", it.as_bytes()); /// it.next(); /// it.next(); /// assert_eq!(b" baz", it.as_bytes()); /// it.next(); /// it.next(); /// assert_eq!(b"", it.as_bytes()); /// ``` #[inline] pub fn as_bytes(&self) -> &'a [u8] { self.0.as_bytes() } } impl<'a> Iterator for WordIndices<'a> { type Item = (usize, usize, &'a str); #[inline] fn next(&mut self) -> Option<(usize, usize, &'a str)> { while let Some((start, end, word)) = self.0.next() { if SIMPLE_WORD_FWD.is_match(word.as_bytes()) { return Some((start, end, word)); } } None } } /// An iterator over all word breaks in a byte string. /// /// This iterator is typically constructed by /// [`ByteSlice::words_with_breaks`](trait.ByteSlice.html#method.words_with_breaks). /// /// This iterator yields not only all words, but the content that comes between /// words. In particular, if all elements yielded by this iterator are /// concatenated, then the result is the original string (subject to Unicode /// replacement codepoint substitutions). /// /// Since words are made up of one or more codepoints, this iterator yields /// `&str` elements. When invalid UTF-8 is encountered, replacement codepoints /// are [substituted](index.html#handling-of-invalid-utf-8). /// /// This iterator yields words in accordance with the default word boundary /// rules specified in /// [UAX #29](https://www.unicode.org/reports/tr29/tr29-33.html#Word_Boundaries). /// In particular, this may not be suitable for Japanese and Chinese scripts /// that do not use spaces between words. #[derive(Clone, Debug)] pub struct WordsWithBreaks<'a> { bs: &'a [u8], } impl<'a> WordsWithBreaks<'a> { pub(crate) fn new(bs: &'a [u8]) -> WordsWithBreaks<'a> { WordsWithBreaks { bs } } /// View the underlying data as a subslice of the original data. /// /// The slice returned has the same lifetime as the original slice, and so /// the iterator can continue to be used while this exists. /// /// # Examples /// /// ``` /// use bstr::ByteSlice; /// /// let mut it = b"foo bar baz".words_with_breaks(); /// /// assert_eq!(b"foo bar baz", it.as_bytes()); /// it.next(); /// assert_eq!(b" bar baz", it.as_bytes()); /// it.next(); /// it.next(); /// assert_eq!(b" baz", it.as_bytes()); /// it.next(); /// it.next(); /// assert_eq!(b"", it.as_bytes()); /// ``` #[inline] pub fn as_bytes(&self) -> &'a [u8] { self.bs } } impl<'a> Iterator for WordsWithBreaks<'a> { type Item = &'a str; #[inline] fn next(&mut self) -> Option<&'a str> { let (word, size) = decode_word(self.bs); if size == 0 { return None; } self.bs = &self.bs[size..]; Some(word) } } /// An iterator over all word breaks in a byte string, along with their byte /// index positions. /// /// This iterator is typically constructed by /// [`ByteSlice::words_with_break_indices`](trait.ByteSlice.html#method.words_with_break_indices). /// /// This iterator yields not only all words, but the content that comes between /// words. In particular, if all elements yielded by this iterator are /// concatenated, then the result is the original string (subject to Unicode /// replacement codepoint substitutions). /// /// Since words are made up of one or more codepoints, this iterator /// yields `&str` elements (along with their start and end byte offsets). /// When invalid UTF-8 is encountered, replacement codepoints are /// [substituted](index.html#handling-of-invalid-utf-8). Because of this, the /// indices yielded by this iterator may not correspond to the length of the /// word yielded with those indices. For example, when this iterator encounters /// `\xFF` in the byte string, then it will yield a pair of indices ranging /// over a single byte, but will provide an `&str` equivalent to `"\u{FFFD}"`, /// which is three bytes in length. However, when given only valid UTF-8, then /// all indices are in exact correspondence with their paired word. /// /// This iterator yields words in accordance with the default word boundary /// rules specified in /// [UAX #29](https://www.unicode.org/reports/tr29/tr29-33.html#Word_Boundaries). /// In particular, this may not be suitable for Japanese and Chinese scripts /// that do not use spaces between words. #[derive(Clone, Debug)] pub struct WordsWithBreakIndices<'a> { bs: &'a [u8], forward_index: usize, } impl<'a> WordsWithBreakIndices<'a> { pub(crate) fn new(bs: &'a [u8]) -> WordsWithBreakIndices<'a> { WordsWithBreakIndices { bs: bs, forward_index: 0 } } /// View the underlying data as a subslice of the original data. /// /// The slice returned has the same lifetime as the original slice, and so /// the iterator can continue to be used while this exists. /// /// # Examples /// /// ``` /// use bstr::ByteSlice; /// /// let mut it = b"foo bar baz".words_with_break_indices(); /// /// assert_eq!(b"foo bar baz", it.as_bytes()); /// it.next(); /// assert_eq!(b" bar baz", it.as_bytes()); /// it.next(); /// it.next(); /// assert_eq!(b" baz", it.as_bytes()); /// it.next(); /// it.next(); /// assert_eq!(b"", it.as_bytes()); /// ``` #[inline] pub fn as_bytes(&self) -> &'a [u8] { self.bs } } impl<'a> Iterator for WordsWithBreakIndices<'a> { type Item = (usize, usize, &'a str); #[inline] fn next(&mut self) -> Option<(usize, usize, &'a str)> { let index = self.forward_index; let (word, size) = decode_word(self.bs); if size == 0 { return None; } self.bs = &self.bs[size..]; self.forward_index += size; Some((index, index + size, word)) } } fn decode_word(bs: &[u8]) -> (&str, usize) { if bs.is_empty() { ("", 0) } else if let Some(end) = WORD_BREAK_FWD.find(bs) { // Safe because a match can only occur for valid UTF-8. let word = unsafe { bs[..end].to_str_unchecked() }; (word, word.len()) } else { const INVALID: &'static str = "\u{FFFD}"; // No match on non-empty bytes implies we found invalid UTF-8. let (_, size) = utf8::decode_lossy(bs); (INVALID, size) } } #[cfg(test)] mod tests { use ucd_parse::WordBreakTest; use crate::ext_slice::ByteSlice; #[test] fn forward_ucd() { for (i, test) in ucdtests().into_iter().enumerate() { let given = test.words.concat(); let got = words(given.as_bytes()); assert_eq!( test.words, got, "\n\nword forward break test {} failed:\n\ given: {:?}\n\ expected: {:?}\n\ got: {:?}\n", i, given, strs_to_bstrs(&test.words), strs_to_bstrs(&got), ); } } // Some additional tests that don't seem to be covered by the UCD tests. // // It's pretty amazing that the UCD tests miss these cases. I only found // them by running this crate's segmenter and ICU's segmenter on the same // text and comparing the output. #[test] fn forward_additional() { assert_eq!(vec!["a", ".", " ", "Y"], words(b"a. Y")); assert_eq!(vec!["r", ".", " ", "Yo"], words(b"r. Yo")); assert_eq!( vec!["whatsoever", ".", " ", "You", " ", "may"], words(b"whatsoever. You may") ); assert_eq!( vec!["21stcentury'syesterday"], words(b"21stcentury'syesterday") ); assert_eq!(vec!["Bonta_", "'", "s"], words(b"Bonta_'s")); assert_eq!(vec!["_vhat's"], words(b"_vhat's")); assert_eq!(vec!["__on'anima"], words(b"__on'anima")); assert_eq!(vec!["123_", "'", "4"], words(b"123_'4")); assert_eq!(vec!["_123'4"], words(b"_123'4")); assert_eq!(vec!["__12'345"], words(b"__12'345")); assert_eq!( vec!["tomorrowat4", ":", "00", ","], words(b"tomorrowat4:00,") ); assert_eq!(vec!["RS1", "'", "s"], words(b"RS1's")); assert_eq!(vec!["X38"], words(b"X38")); assert_eq!(vec!["4abc", ":", "00", ","], words(b"4abc:00,")); assert_eq!(vec!["12S", "'", "1"], words(b"12S'1")); assert_eq!(vec!["1XY"], words(b"1XY")); assert_eq!(vec!["\u{FEFF}", "Ты"], words("\u{FEFF}Ты".as_bytes())); } fn words(bytes: &[u8]) -> Vec<&str> { bytes.words_with_breaks().collect() } fn strs_to_bstrs>(strs: &[S]) -> Vec<&[u8]> { strs.iter().map(|s| s.as_ref().as_bytes()).collect() } /// Return all of the UCD for word breaks. fn ucdtests() -> Vec { const TESTDATA: &'static str = include_str!("data/WordBreakTest.txt"); let mut tests = vec![]; for mut line in TESTDATA.lines() { line = line.trim(); if line.starts_with("#") || line.contains("surrogate") { continue; } tests.push(line.parse().unwrap()); } tests } } bstr-0.2.17/src/utf8.rs000064400000000000000000001317500072674642500127470ustar 00000000000000use core::char; use core::cmp; use core::fmt; use core::str; #[cfg(feature = "std")] use std::error; use crate::ascii; use crate::bstr::BStr; use crate::ext_slice::ByteSlice; // The UTF-8 decoder provided here is based on the one presented here: // https://bjoern.hoehrmann.de/utf-8/decoder/dfa/ // // We *could* have done UTF-8 decoding by using a DFA generated by `\p{any}` // using regex-automata that is roughly the same size. The real benefit of // Hoehrmann's formulation is that the byte class mapping below is manually // tailored such that each byte's class doubles as a shift to mask out the // bits necessary for constructing the leading bits of each codepoint value // from the initial byte. // // There are some minor differences between this implementation and Hoehrmann's // formulation. // // Firstly, we make REJECT have state ID 0, since it makes the state table // itself a little easier to read and is consistent with the notion that 0 // means "false" or "bad." // // Secondly, when doing bulk decoding, we add a SIMD accelerated ASCII fast // path. // // Thirdly, we pre-multiply the state IDs to avoid a multiplication instruction // in the core decoding loop. (Which is what regex-automata would do by // default.) // // Fourthly, we split the byte class mapping and transition table into two // arrays because it's clearer. // // It is unlikely that this is the fastest way to do UTF-8 decoding, however, // it is fairly simple. const ACCEPT: usize = 12; const REJECT: usize = 0; /// SAFETY: The decode below function relies on the correctness of these /// equivalence classes. #[cfg_attr(rustfmt, rustfmt::skip)] const CLASSES: [u8; 256] = [ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9, 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, 8,8,2,2,2,2,2,2,2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2, 10,3,3,3,3,3,3,3,3,3,3,3,3,4,3,3, 11,6,6,6,5,8,8,8,8,8,8,8,8,8,8,8, ]; /// SAFETY: The decode below function relies on the correctness of this state /// machine. #[cfg_attr(rustfmt, rustfmt::skip)] const STATES_FORWARD: &'static [u8] = &[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 0, 24, 36, 60, 96, 84, 0, 0, 0, 48, 72, 0, 12, 0, 0, 0, 0, 0, 12, 0, 12, 0, 0, 0, 24, 0, 0, 0, 0, 0, 24, 0, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0, 24, 0, 0, 0, 0, 0, 24, 0, 0, 0, 0, 0, 0, 0, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0, 36, 0, 36, 0, 0, 0, 36, 0, 0, 0, 0, 0, 36, 0, 36, 0, 0, 0, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ]; /// An iterator over Unicode scalar values in a byte string. /// /// When invalid UTF-8 byte sequences are found, they are substituted with the /// Unicode replacement codepoint (`U+FFFD`) using the /// ["maximal subpart" strategy](http://www.unicode.org/review/pr-121.html). /// /// This iterator is created by the /// [`chars`](trait.ByteSlice.html#method.chars) method provided by the /// [`ByteSlice`](trait.ByteSlice.html) extension trait for `&[u8]`. #[derive(Clone, Debug)] pub struct Chars<'a> { bs: &'a [u8], } impl<'a> Chars<'a> { pub(crate) fn new(bs: &'a [u8]) -> Chars<'a> { Chars { bs } } /// View the underlying data as a subslice of the original data. /// /// The slice returned has the same lifetime as the original slice, and so /// the iterator can continue to be used while this exists. /// /// # Examples /// /// ``` /// use bstr::ByteSlice; /// /// let mut chars = b"abc".chars(); /// /// assert_eq!(b"abc", chars.as_bytes()); /// chars.next(); /// assert_eq!(b"bc", chars.as_bytes()); /// chars.next(); /// chars.next(); /// assert_eq!(b"", chars.as_bytes()); /// ``` #[inline] pub fn as_bytes(&self) -> &'a [u8] { self.bs } } impl<'a> Iterator for Chars<'a> { type Item = char; #[inline] fn next(&mut self) -> Option { let (ch, size) = decode_lossy(self.bs); if size == 0 { return None; } self.bs = &self.bs[size..]; Some(ch) } } impl<'a> DoubleEndedIterator for Chars<'a> { #[inline] fn next_back(&mut self) -> Option { let (ch, size) = decode_last_lossy(self.bs); if size == 0 { return None; } self.bs = &self.bs[..self.bs.len() - size]; Some(ch) } } /// An iterator over Unicode scalar values in a byte string and their /// byte index positions. /// /// When invalid UTF-8 byte sequences are found, they are substituted with the /// Unicode replacement codepoint (`U+FFFD`) using the /// ["maximal subpart" strategy](http://www.unicode.org/review/pr-121.html). /// /// Note that this is slightly different from the `CharIndices` iterator /// provided by the standard library. Aside from working on possibly invalid /// UTF-8, this iterator provides both the corresponding starting and ending /// byte indices of each codepoint yielded. The ending position is necessary to /// slice the original byte string when invalid UTF-8 bytes are converted into /// a Unicode replacement codepoint, since a single replacement codepoint can /// substitute anywhere from 1 to 3 invalid bytes (inclusive). /// /// This iterator is created by the /// [`char_indices`](trait.ByteSlice.html#method.char_indices) method provided /// by the [`ByteSlice`](trait.ByteSlice.html) extension trait for `&[u8]`. #[derive(Clone, Debug)] pub struct CharIndices<'a> { bs: &'a [u8], forward_index: usize, reverse_index: usize, } impl<'a> CharIndices<'a> { pub(crate) fn new(bs: &'a [u8]) -> CharIndices<'a> { CharIndices { bs: bs, forward_index: 0, reverse_index: bs.len() } } /// View the underlying data as a subslice of the original data. /// /// The slice returned has the same lifetime as the original slice, and so /// the iterator can continue to be used while this exists. /// /// # Examples /// /// ``` /// use bstr::ByteSlice; /// /// let mut it = b"abc".char_indices(); /// /// assert_eq!(b"abc", it.as_bytes()); /// it.next(); /// assert_eq!(b"bc", it.as_bytes()); /// it.next(); /// it.next(); /// assert_eq!(b"", it.as_bytes()); /// ``` #[inline] pub fn as_bytes(&self) -> &'a [u8] { self.bs } } impl<'a> Iterator for CharIndices<'a> { type Item = (usize, usize, char); #[inline] fn next(&mut self) -> Option<(usize, usize, char)> { let index = self.forward_index; let (ch, size) = decode_lossy(self.bs); if size == 0 { return None; } self.bs = &self.bs[size..]; self.forward_index += size; Some((index, index + size, ch)) } } impl<'a> DoubleEndedIterator for CharIndices<'a> { #[inline] fn next_back(&mut self) -> Option<(usize, usize, char)> { let (ch, size) = decode_last_lossy(self.bs); if size == 0 { return None; } self.bs = &self.bs[..self.bs.len() - size]; self.reverse_index -= size; Some((self.reverse_index, self.reverse_index + size, ch)) } } impl<'a> ::core::iter::FusedIterator for CharIndices<'a> {} /// An iterator over chunks of valid UTF-8 in a byte slice. /// /// See [`utf8_chunks`](trait.ByteSlice.html#method.utf8_chunks). #[derive(Clone, Debug)] pub struct Utf8Chunks<'a> { pub(super) bytes: &'a [u8], } /// A chunk of valid UTF-8, possibly followed by invalid UTF-8 bytes. /// /// This is yielded by the /// [`Utf8Chunks`](struct.Utf8Chunks.html) /// iterator, which can be created via the /// [`ByteSlice::utf8_chunks`](trait.ByteSlice.html#method.utf8_chunks) /// method. /// /// The `'a` lifetime parameter corresponds to the lifetime of the bytes that /// are being iterated over. #[cfg_attr(test, derive(Debug, PartialEq))] pub struct Utf8Chunk<'a> { /// A valid UTF-8 piece, at the start, end, or between invalid UTF-8 bytes. /// /// This is empty between adjacent invalid UTF-8 byte sequences. valid: &'a str, /// A sequence of invalid UTF-8 bytes. /// /// Can only be empty in the last chunk. /// /// Should be replaced by a single unicode replacement character, if not /// empty. invalid: &'a BStr, /// Indicates whether the invalid sequence could've been valid if there /// were more bytes. /// /// Can only be true in the last chunk. incomplete: bool, } impl<'a> Utf8Chunk<'a> { /// Returns the (possibly empty) valid UTF-8 bytes in this chunk. /// /// This may be empty if there are consecutive sequences of invalid UTF-8 /// bytes. #[inline] pub fn valid(&self) -> &'a str { self.valid } /// Returns the (possibly empty) invalid UTF-8 bytes in this chunk that /// immediately follow the valid UTF-8 bytes in this chunk. /// /// This is only empty when this chunk corresponds to the last chunk in /// the original bytes. /// /// The maximum length of this slice is 3. That is, invalid UTF-8 byte /// sequences greater than 1 always correspond to a valid _prefix_ of /// a valid UTF-8 encoded codepoint. This corresponds to the "substitution /// of maximal subparts" strategy that is described in more detail in the /// docs for the /// [`ByteSlice::to_str_lossy`](trait.ByteSlice.html#method.to_str_lossy) /// method. #[inline] pub fn invalid(&self) -> &'a [u8] { self.invalid.as_bytes() } /// Returns whether the invalid sequence might still become valid if more /// bytes are added. /// /// Returns true if the end of the input was reached unexpectedly, /// without encountering an unexpected byte. /// /// This can only be the case for the last chunk. #[inline] pub fn incomplete(&self) -> bool { self.incomplete } } impl<'a> Iterator for Utf8Chunks<'a> { type Item = Utf8Chunk<'a>; #[inline] fn next(&mut self) -> Option> { if self.bytes.is_empty() { return None; } match validate(self.bytes) { Ok(()) => { let valid = self.bytes; self.bytes = &[]; Some(Utf8Chunk { // SAFETY: This is safe because of the guarantees provided // by utf8::validate. valid: unsafe { str::from_utf8_unchecked(valid) }, invalid: [].as_bstr(), incomplete: false, }) } Err(e) => { let (valid, rest) = self.bytes.split_at(e.valid_up_to()); // SAFETY: This is safe because of the guarantees provided by // utf8::validate. let valid = unsafe { str::from_utf8_unchecked(valid) }; let (invalid_len, incomplete) = match e.error_len() { Some(n) => (n, false), None => (rest.len(), true), }; let (invalid, rest) = rest.split_at(invalid_len); self.bytes = rest; Some(Utf8Chunk { valid, invalid: invalid.as_bstr(), incomplete, }) } } } #[inline] fn size_hint(&self) -> (usize, Option) { if self.bytes.is_empty() { (0, Some(0)) } else { (1, Some(self.bytes.len())) } } } impl<'a> ::core::iter::FusedIterator for Utf8Chunks<'a> {} /// An error that occurs when UTF-8 decoding fails. /// /// This error occurs when attempting to convert a non-UTF-8 byte /// string to a Rust string that must be valid UTF-8. For example, /// [`to_str`](trait.ByteSlice.html#method.to_str) is one such method. /// /// # Example /// /// This example shows what happens when a given byte sequence is invalid, /// but ends with a sequence that is a possible prefix of valid UTF-8. /// /// ``` /// use bstr::{B, ByteSlice}; /// /// let s = B(b"foobar\xF1\x80\x80"); /// let err = s.to_str().unwrap_err(); /// assert_eq!(err.valid_up_to(), 6); /// assert_eq!(err.error_len(), None); /// ``` /// /// This example shows what happens when a given byte sequence contains /// invalid UTF-8. /// /// ``` /// use bstr::ByteSlice; /// /// let s = b"foobar\xF1\x80\x80quux"; /// let err = s.to_str().unwrap_err(); /// assert_eq!(err.valid_up_to(), 6); /// // The error length reports the maximum number of bytes that correspond to /// // a valid prefix of a UTF-8 encoded codepoint. /// assert_eq!(err.error_len(), Some(3)); /// /// // In contrast to the above which contains a single invalid prefix, /// // consider the case of multiple individal bytes that are never valid /// // prefixes. Note how the value of error_len changes! /// let s = b"foobar\xFF\xFFquux"; /// let err = s.to_str().unwrap_err(); /// assert_eq!(err.valid_up_to(), 6); /// assert_eq!(err.error_len(), Some(1)); /// /// // The fact that it's an invalid prefix does not change error_len even /// // when it immediately precedes the end of the string. /// let s = b"foobar\xFF"; /// let err = s.to_str().unwrap_err(); /// assert_eq!(err.valid_up_to(), 6); /// assert_eq!(err.error_len(), Some(1)); /// ``` #[derive(Debug, Eq, PartialEq)] pub struct Utf8Error { valid_up_to: usize, error_len: Option, } impl Utf8Error { /// Returns the byte index of the position immediately following the last /// valid UTF-8 byte. /// /// # Example /// /// This examples shows how `valid_up_to` can be used to retrieve a /// possibly empty prefix that is guaranteed to be valid UTF-8: /// /// ``` /// use bstr::ByteSlice; /// /// let s = b"foobar\xF1\x80\x80quux"; /// let err = s.to_str().unwrap_err(); /// /// // This is guaranteed to never panic. /// let string = s[..err.valid_up_to()].to_str().unwrap(); /// assert_eq!(string, "foobar"); /// ``` #[inline] pub fn valid_up_to(&self) -> usize { self.valid_up_to } /// Returns the total number of invalid UTF-8 bytes immediately following /// the position returned by `valid_up_to`. This value is always at least /// `1`, but can be up to `3` if bytes form a valid prefix of some UTF-8 /// encoded codepoint. /// /// If the end of the original input was found before a valid UTF-8 encoded /// codepoint could be completed, then this returns `None`. This is useful /// when processing streams, where a `None` value signals that more input /// might be needed. #[inline] pub fn error_len(&self) -> Option { self.error_len } } #[cfg(feature = "std")] impl error::Error for Utf8Error { fn description(&self) -> &str { "invalid UTF-8" } } impl fmt::Display for Utf8Error { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "invalid UTF-8 found at byte offset {}", self.valid_up_to) } } /// Returns OK if and only if the given slice is completely valid UTF-8. /// /// If the slice isn't valid UTF-8, then an error is returned that explains /// the first location at which invalid UTF-8 was detected. pub fn validate(slice: &[u8]) -> Result<(), Utf8Error> { // The fast path for validating UTF-8. It steps through a UTF-8 automaton // and uses a SIMD accelerated ASCII fast path on x86_64. If an error is // detected, it backs up and runs the slower version of the UTF-8 automaton // to determine correct error information. fn fast(slice: &[u8]) -> Result<(), Utf8Error> { let mut state = ACCEPT; let mut i = 0; while i < slice.len() { let b = slice[i]; // ASCII fast path. If we see two consecutive ASCII bytes, then try // to validate as much ASCII as possible very quickly. if state == ACCEPT && b <= 0x7F && slice.get(i + 1).map_or(false, |&b| b <= 0x7F) { i += ascii::first_non_ascii_byte(&slice[i..]); continue; } state = step(state, b); if state == REJECT { return Err(find_valid_up_to(slice, i)); } i += 1; } if state != ACCEPT { Err(find_valid_up_to(slice, slice.len())) } else { Ok(()) } } // Given the first position at which a UTF-8 sequence was determined to be // invalid, return an error that correctly reports the position at which // the last complete UTF-8 sequence ends. #[inline(never)] fn find_valid_up_to(slice: &[u8], rejected_at: usize) -> Utf8Error { // In order to find the last valid byte, we need to back up an amount // that guarantees every preceding byte is part of a valid UTF-8 // code unit sequence. To do this, we simply locate the last leading // byte that occurs before rejected_at. let mut backup = rejected_at.saturating_sub(1); while backup > 0 && !is_leading_or_invalid_utf8_byte(slice[backup]) { backup -= 1; } let upto = cmp::min(slice.len(), rejected_at.saturating_add(1)); let mut err = slow(&slice[backup..upto]).unwrap_err(); err.valid_up_to += backup; err } // Like top-level UTF-8 decoding, except it correctly reports a UTF-8 error // when an invalid sequence is found. This is split out from validate so // that the fast path doesn't need to keep track of the position of the // last valid UTF-8 byte. In particular, tracking this requires checking // for an ACCEPT state on each byte, which degrades throughput pretty // badly. fn slow(slice: &[u8]) -> Result<(), Utf8Error> { let mut state = ACCEPT; let mut valid_up_to = 0; for (i, &b) in slice.iter().enumerate() { state = step(state, b); if state == ACCEPT { valid_up_to = i + 1; } else if state == REJECT { // Our error length must always be at least 1. let error_len = Some(cmp::max(1, i - valid_up_to)); return Err(Utf8Error { valid_up_to, error_len }); } } if state != ACCEPT { Err(Utf8Error { valid_up_to, error_len: None }) } else { Ok(()) } } // Advance to the next state given the current state and current byte. fn step(state: usize, b: u8) -> usize { let class = CLASSES[b as usize]; // SAFETY: This is safe because 'class' is always <=11 and 'state' is // always <=96. Therefore, the maximal index is 96+11 = 107, where // STATES_FORWARD.len() = 108 such that every index is guaranteed to be // valid by construction of the state machine and the byte equivalence // classes. unsafe { *STATES_FORWARD.get_unchecked(state + class as usize) as usize } } fast(slice) } /// UTF-8 decode a single Unicode scalar value from the beginning of a slice. /// /// When successful, the corresponding Unicode scalar value is returned along /// with the number of bytes it was encoded with. The number of bytes consumed /// for a successful decode is always between 1 and 4, inclusive. /// /// When unsuccessful, `None` is returned along with the number of bytes that /// make up a maximal prefix of a valid UTF-8 code unit sequence. In this case, /// the number of bytes consumed is always between 0 and 3, inclusive, where /// 0 is only returned when `slice` is empty. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::decode_utf8; /// /// // Decoding a valid codepoint. /// let (ch, size) = decode_utf8(b"\xE2\x98\x83"); /// assert_eq!(Some('☃'), ch); /// assert_eq!(3, size); /// /// // Decoding an incomplete codepoint. /// let (ch, size) = decode_utf8(b"\xE2\x98"); /// assert_eq!(None, ch); /// assert_eq!(2, size); /// ``` /// /// This example shows how to iterate over all codepoints in UTF-8 encoded /// bytes, while replacing invalid UTF-8 sequences with the replacement /// codepoint: /// /// ``` /// use bstr::{B, decode_utf8}; /// /// let mut bytes = B(b"\xE2\x98\x83\xFF\xF0\x9D\x9E\x83\xE2\x98\x61"); /// let mut chars = vec![]; /// while !bytes.is_empty() { /// let (ch, size) = decode_utf8(bytes); /// bytes = &bytes[size..]; /// chars.push(ch.unwrap_or('\u{FFFD}')); /// } /// assert_eq!(vec!['☃', '\u{FFFD}', '𝞃', '\u{FFFD}', 'a'], chars); /// ``` #[inline] pub fn decode>(slice: B) -> (Option, usize) { let slice = slice.as_ref(); match slice.get(0) { None => return (None, 0), Some(&b) if b <= 0x7F => return (Some(b as char), 1), _ => {} } let (mut state, mut cp, mut i) = (ACCEPT, 0, 0); while i < slice.len() { decode_step(&mut state, &mut cp, slice[i]); i += 1; if state == ACCEPT { // SAFETY: This is safe because `decode_step` guarantees that // `cp` is a valid Unicode scalar value in an ACCEPT state. let ch = unsafe { char::from_u32_unchecked(cp) }; return (Some(ch), i); } else if state == REJECT { // At this point, we always want to advance at least one byte. return (None, cmp::max(1, i.saturating_sub(1))); } } (None, i) } /// Lossily UTF-8 decode a single Unicode scalar value from the beginning of a /// slice. /// /// When successful, the corresponding Unicode scalar value is returned along /// with the number of bytes it was encoded with. The number of bytes consumed /// for a successful decode is always between 1 and 4, inclusive. /// /// When unsuccessful, the Unicode replacement codepoint (`U+FFFD`) is returned /// along with the number of bytes that make up a maximal prefix of a valid /// UTF-8 code unit sequence. In this case, the number of bytes consumed is /// always between 0 and 3, inclusive, where 0 is only returned when `slice` is /// empty. /// /// # Examples /// /// Basic usage: /// /// ```ignore /// use bstr::decode_utf8_lossy; /// /// // Decoding a valid codepoint. /// let (ch, size) = decode_utf8_lossy(b"\xE2\x98\x83"); /// assert_eq!('☃', ch); /// assert_eq!(3, size); /// /// // Decoding an incomplete codepoint. /// let (ch, size) = decode_utf8_lossy(b"\xE2\x98"); /// assert_eq!('\u{FFFD}', ch); /// assert_eq!(2, size); /// ``` /// /// This example shows how to iterate over all codepoints in UTF-8 encoded /// bytes, while replacing invalid UTF-8 sequences with the replacement /// codepoint: /// /// ```ignore /// use bstr::{B, decode_utf8_lossy}; /// /// let mut bytes = B(b"\xE2\x98\x83\xFF\xF0\x9D\x9E\x83\xE2\x98\x61"); /// let mut chars = vec![]; /// while !bytes.is_empty() { /// let (ch, size) = decode_utf8_lossy(bytes); /// bytes = &bytes[size..]; /// chars.push(ch); /// } /// assert_eq!(vec!['☃', '\u{FFFD}', '𝞃', '\u{FFFD}', 'a'], chars); /// ``` #[inline] pub fn decode_lossy>(slice: B) -> (char, usize) { match decode(slice) { (Some(ch), size) => (ch, size), (None, size) => ('\u{FFFD}', size), } } /// UTF-8 decode a single Unicode scalar value from the end of a slice. /// /// When successful, the corresponding Unicode scalar value is returned along /// with the number of bytes it was encoded with. The number of bytes consumed /// for a successful decode is always between 1 and 4, inclusive. /// /// When unsuccessful, `None` is returned along with the number of bytes that /// make up a maximal prefix of a valid UTF-8 code unit sequence. In this case, /// the number of bytes consumed is always between 0 and 3, inclusive, where /// 0 is only returned when `slice` is empty. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bstr::decode_last_utf8; /// /// // Decoding a valid codepoint. /// let (ch, size) = decode_last_utf8(b"\xE2\x98\x83"); /// assert_eq!(Some('☃'), ch); /// assert_eq!(3, size); /// /// // Decoding an incomplete codepoint. /// let (ch, size) = decode_last_utf8(b"\xE2\x98"); /// assert_eq!(None, ch); /// assert_eq!(2, size); /// ``` /// /// This example shows how to iterate over all codepoints in UTF-8 encoded /// bytes in reverse, while replacing invalid UTF-8 sequences with the /// replacement codepoint: /// /// ``` /// use bstr::{B, decode_last_utf8}; /// /// let mut bytes = B(b"\xE2\x98\x83\xFF\xF0\x9D\x9E\x83\xE2\x98\x61"); /// let mut chars = vec![]; /// while !bytes.is_empty() { /// let (ch, size) = decode_last_utf8(bytes); /// bytes = &bytes[..bytes.len()-size]; /// chars.push(ch.unwrap_or('\u{FFFD}')); /// } /// assert_eq!(vec!['a', '\u{FFFD}', '𝞃', '\u{FFFD}', '☃'], chars); /// ``` #[inline] pub fn decode_last>(slice: B) -> (Option, usize) { // TODO: We could implement this by reversing the UTF-8 automaton, but for // now, we do it the slow way by using the forward automaton. let slice = slice.as_ref(); if slice.is_empty() { return (None, 0); } let mut start = slice.len() - 1; let limit = slice.len().saturating_sub(4); while start > limit && !is_leading_or_invalid_utf8_byte(slice[start]) { start -= 1; } let (ch, size) = decode(&slice[start..]); // If we didn't consume all of the bytes, then that means there's at least // one stray byte that never occurs in a valid code unit prefix, so we can // advance by one byte. if start + size != slice.len() { (None, 1) } else { (ch, size) } } /// Lossily UTF-8 decode a single Unicode scalar value from the end of a slice. /// /// When successful, the corresponding Unicode scalar value is returned along /// with the number of bytes it was encoded with. The number of bytes consumed /// for a successful decode is always between 1 and 4, inclusive. /// /// When unsuccessful, the Unicode replacement codepoint (`U+FFFD`) is returned /// along with the number of bytes that make up a maximal prefix of a valid /// UTF-8 code unit sequence. In this case, the number of bytes consumed is /// always between 0 and 3, inclusive, where 0 is only returned when `slice` is /// empty. /// /// # Examples /// /// Basic usage: /// /// ```ignore /// use bstr::decode_last_utf8_lossy; /// /// // Decoding a valid codepoint. /// let (ch, size) = decode_last_utf8_lossy(b"\xE2\x98\x83"); /// assert_eq!('☃', ch); /// assert_eq!(3, size); /// /// // Decoding an incomplete codepoint. /// let (ch, size) = decode_last_utf8_lossy(b"\xE2\x98"); /// assert_eq!('\u{FFFD}', ch); /// assert_eq!(2, size); /// ``` /// /// This example shows how to iterate over all codepoints in UTF-8 encoded /// bytes in reverse, while replacing invalid UTF-8 sequences with the /// replacement codepoint: /// /// ```ignore /// use bstr::decode_last_utf8_lossy; /// /// let mut bytes = B(b"\xE2\x98\x83\xFF\xF0\x9D\x9E\x83\xE2\x98\x61"); /// let mut chars = vec![]; /// while !bytes.is_empty() { /// let (ch, size) = decode_last_utf8_lossy(bytes); /// bytes = &bytes[..bytes.len()-size]; /// chars.push(ch); /// } /// assert_eq!(vec!['a', '\u{FFFD}', '𝞃', '\u{FFFD}', '☃'], chars); /// ``` #[inline] pub fn decode_last_lossy>(slice: B) -> (char, usize) { match decode_last(slice) { (Some(ch), size) => (ch, size), (None, size) => ('\u{FFFD}', size), } } /// SAFETY: The decode function relies on state being equal to ACCEPT only if /// cp is a valid Unicode scalar value. #[inline] pub fn decode_step(state: &mut usize, cp: &mut u32, b: u8) { let class = CLASSES[b as usize]; if *state == ACCEPT { *cp = (0xFF >> class) & (b as u32); } else { *cp = (b as u32 & 0b111111) | (*cp << 6); } *state = STATES_FORWARD[*state + class as usize] as usize; } /// Returns true if and only if the given byte is either a valid leading UTF-8 /// byte, or is otherwise an invalid byte that can never appear anywhere in a /// valid UTF-8 sequence. fn is_leading_or_invalid_utf8_byte(b: u8) -> bool { // In the ASCII case, the most significant bit is never set. The leading // byte of a 2/3/4-byte sequence always has the top two most significant // bits set. For bytes that can never appear anywhere in valid UTF-8, this // also returns true, since every such byte has its two most significant // bits set: // // \xC0 :: 11000000 // \xC1 :: 11000001 // \xF5 :: 11110101 // \xF6 :: 11110110 // \xF7 :: 11110111 // \xF8 :: 11111000 // \xF9 :: 11111001 // \xFA :: 11111010 // \xFB :: 11111011 // \xFC :: 11111100 // \xFD :: 11111101 // \xFE :: 11111110 // \xFF :: 11111111 (b & 0b1100_0000) != 0b1000_0000 } #[cfg(test)] mod tests { use std::char; use crate::ext_slice::{ByteSlice, B}; use crate::tests::LOSSY_TESTS; use crate::utf8::{self, Utf8Error}; fn utf8e(valid_up_to: usize) -> Utf8Error { Utf8Error { valid_up_to, error_len: None } } fn utf8e2(valid_up_to: usize, error_len: usize) -> Utf8Error { Utf8Error { valid_up_to, error_len: Some(error_len) } } #[test] fn validate_all_codepoints() { for i in 0..(0x10FFFF + 1) { let cp = match char::from_u32(i) { None => continue, Some(cp) => cp, }; let mut buf = [0; 4]; let s = cp.encode_utf8(&mut buf); assert_eq!(Ok(()), utf8::validate(s.as_bytes())); } } #[test] fn validate_multiple_codepoints() { assert_eq!(Ok(()), utf8::validate(b"abc")); assert_eq!(Ok(()), utf8::validate(b"a\xE2\x98\x83a")); assert_eq!(Ok(()), utf8::validate(b"a\xF0\x9D\x9C\xB7a")); assert_eq!(Ok(()), utf8::validate(b"\xE2\x98\x83\xF0\x9D\x9C\xB7",)); assert_eq!( Ok(()), utf8::validate(b"a\xE2\x98\x83a\xF0\x9D\x9C\xB7a",) ); assert_eq!( Ok(()), utf8::validate(b"\xEF\xBF\xBD\xE2\x98\x83\xEF\xBF\xBD",) ); } #[test] fn validate_errors() { // single invalid byte assert_eq!(Err(utf8e2(0, 1)), utf8::validate(b"\xFF")); // single invalid byte after ASCII assert_eq!(Err(utf8e2(1, 1)), utf8::validate(b"a\xFF")); // single invalid byte after 2 byte sequence assert_eq!(Err(utf8e2(2, 1)), utf8::validate(b"\xCE\xB2\xFF")); // single invalid byte after 3 byte sequence assert_eq!(Err(utf8e2(3, 1)), utf8::validate(b"\xE2\x98\x83\xFF")); // single invalid byte after 4 byte sequence assert_eq!(Err(utf8e2(4, 1)), utf8::validate(b"\xF0\x9D\x9D\xB1\xFF")); // An invalid 2-byte sequence with a valid 1-byte prefix. assert_eq!(Err(utf8e2(0, 1)), utf8::validate(b"\xCE\xF0")); // An invalid 3-byte sequence with a valid 2-byte prefix. assert_eq!(Err(utf8e2(0, 2)), utf8::validate(b"\xE2\x98\xF0")); // An invalid 4-byte sequence with a valid 3-byte prefix. assert_eq!(Err(utf8e2(0, 3)), utf8::validate(b"\xF0\x9D\x9D\xF0")); // An overlong sequence. Should be \xE2\x82\xAC, but we encode the // same codepoint value in 4 bytes. This not only tests that we reject // overlong sequences, but that we get valid_up_to correct. assert_eq!(Err(utf8e2(0, 1)), utf8::validate(b"\xF0\x82\x82\xAC")); assert_eq!(Err(utf8e2(1, 1)), utf8::validate(b"a\xF0\x82\x82\xAC")); assert_eq!( Err(utf8e2(3, 1)), utf8::validate(b"\xE2\x98\x83\xF0\x82\x82\xAC",) ); // Check that encoding a surrogate codepoint using the UTF-8 scheme // fails validation. assert_eq!(Err(utf8e2(0, 1)), utf8::validate(b"\xED\xA0\x80")); assert_eq!(Err(utf8e2(1, 1)), utf8::validate(b"a\xED\xA0\x80")); assert_eq!( Err(utf8e2(3, 1)), utf8::validate(b"\xE2\x98\x83\xED\xA0\x80",) ); // Check that an incomplete 2-byte sequence fails. assert_eq!(Err(utf8e2(0, 1)), utf8::validate(b"\xCEa")); assert_eq!(Err(utf8e2(1, 1)), utf8::validate(b"a\xCEa")); assert_eq!( Err(utf8e2(3, 1)), utf8::validate(b"\xE2\x98\x83\xCE\xE2\x98\x83",) ); // Check that an incomplete 3-byte sequence fails. assert_eq!(Err(utf8e2(0, 2)), utf8::validate(b"\xE2\x98a")); assert_eq!(Err(utf8e2(1, 2)), utf8::validate(b"a\xE2\x98a")); assert_eq!( Err(utf8e2(3, 2)), utf8::validate(b"\xE2\x98\x83\xE2\x98\xE2\x98\x83",) ); // Check that an incomplete 4-byte sequence fails. assert_eq!(Err(utf8e2(0, 3)), utf8::validate(b"\xF0\x9D\x9Ca")); assert_eq!(Err(utf8e2(1, 3)), utf8::validate(b"a\xF0\x9D\x9Ca")); assert_eq!( Err(utf8e2(4, 3)), utf8::validate(b"\xF0\x9D\x9C\xB1\xF0\x9D\x9C\xE2\x98\x83",) ); assert_eq!( Err(utf8e2(6, 3)), utf8::validate(b"foobar\xF1\x80\x80quux",) ); // Check that an incomplete (EOF) 2-byte sequence fails. assert_eq!(Err(utf8e(0)), utf8::validate(b"\xCE")); assert_eq!(Err(utf8e(1)), utf8::validate(b"a\xCE")); assert_eq!(Err(utf8e(3)), utf8::validate(b"\xE2\x98\x83\xCE")); // Check that an incomplete (EOF) 3-byte sequence fails. assert_eq!(Err(utf8e(0)), utf8::validate(b"\xE2\x98")); assert_eq!(Err(utf8e(1)), utf8::validate(b"a\xE2\x98")); assert_eq!(Err(utf8e(3)), utf8::validate(b"\xE2\x98\x83\xE2\x98")); // Check that an incomplete (EOF) 4-byte sequence fails. assert_eq!(Err(utf8e(0)), utf8::validate(b"\xF0\x9D\x9C")); assert_eq!(Err(utf8e(1)), utf8::validate(b"a\xF0\x9D\x9C")); assert_eq!( Err(utf8e(4)), utf8::validate(b"\xF0\x9D\x9C\xB1\xF0\x9D\x9C",) ); // Test that we errors correct even after long valid sequences. This // checks that our "backup" logic for detecting errors is correct. assert_eq!( Err(utf8e2(8, 1)), utf8::validate(b"\xe2\x98\x83\xce\xb2\xe3\x83\x84\xFF",) ); } #[test] fn decode_valid() { fn d(mut s: &str) -> Vec { let mut chars = vec![]; while !s.is_empty() { let (ch, size) = utf8::decode(s.as_bytes()); s = &s[size..]; chars.push(ch.unwrap()); } chars } assert_eq!(vec!['☃'], d("☃")); assert_eq!(vec!['☃', '☃'], d("☃☃")); assert_eq!(vec!['α', 'β', 'γ', 'δ', 'ε'], d("αβγδε")); assert_eq!(vec!['☃', '⛄', '⛇'], d("☃⛄⛇")); assert_eq!(vec!['𝗮', '𝗯', '𝗰', '𝗱', '𝗲'], d("𝗮𝗯𝗰𝗱𝗲")); } #[test] fn decode_invalid() { let (ch, size) = utf8::decode(b""); assert_eq!(None, ch); assert_eq!(0, size); let (ch, size) = utf8::decode(b"\xFF"); assert_eq!(None, ch); assert_eq!(1, size); let (ch, size) = utf8::decode(b"\xCE\xF0"); assert_eq!(None, ch); assert_eq!(1, size); let (ch, size) = utf8::decode(b"\xE2\x98\xF0"); assert_eq!(None, ch); assert_eq!(2, size); let (ch, size) = utf8::decode(b"\xF0\x9D\x9D"); assert_eq!(None, ch); assert_eq!(3, size); let (ch, size) = utf8::decode(b"\xF0\x9D\x9D\xF0"); assert_eq!(None, ch); assert_eq!(3, size); let (ch, size) = utf8::decode(b"\xF0\x82\x82\xAC"); assert_eq!(None, ch); assert_eq!(1, size); let (ch, size) = utf8::decode(b"\xED\xA0\x80"); assert_eq!(None, ch); assert_eq!(1, size); let (ch, size) = utf8::decode(b"\xCEa"); assert_eq!(None, ch); assert_eq!(1, size); let (ch, size) = utf8::decode(b"\xE2\x98a"); assert_eq!(None, ch); assert_eq!(2, size); let (ch, size) = utf8::decode(b"\xF0\x9D\x9Ca"); assert_eq!(None, ch); assert_eq!(3, size); } #[test] fn decode_lossy() { let (ch, size) = utf8::decode_lossy(b""); assert_eq!('\u{FFFD}', ch); assert_eq!(0, size); let (ch, size) = utf8::decode_lossy(b"\xFF"); assert_eq!('\u{FFFD}', ch); assert_eq!(1, size); let (ch, size) = utf8::decode_lossy(b"\xCE\xF0"); assert_eq!('\u{FFFD}', ch); assert_eq!(1, size); let (ch, size) = utf8::decode_lossy(b"\xE2\x98\xF0"); assert_eq!('\u{FFFD}', ch); assert_eq!(2, size); let (ch, size) = utf8::decode_lossy(b"\xF0\x9D\x9D\xF0"); assert_eq!('\u{FFFD}', ch); assert_eq!(3, size); let (ch, size) = utf8::decode_lossy(b"\xF0\x82\x82\xAC"); assert_eq!('\u{FFFD}', ch); assert_eq!(1, size); let (ch, size) = utf8::decode_lossy(b"\xED\xA0\x80"); assert_eq!('\u{FFFD}', ch); assert_eq!(1, size); let (ch, size) = utf8::decode_lossy(b"\xCEa"); assert_eq!('\u{FFFD}', ch); assert_eq!(1, size); let (ch, size) = utf8::decode_lossy(b"\xE2\x98a"); assert_eq!('\u{FFFD}', ch); assert_eq!(2, size); let (ch, size) = utf8::decode_lossy(b"\xF0\x9D\x9Ca"); assert_eq!('\u{FFFD}', ch); assert_eq!(3, size); } #[test] fn decode_last_valid() { fn d(mut s: &str) -> Vec { let mut chars = vec![]; while !s.is_empty() { let (ch, size) = utf8::decode_last(s.as_bytes()); s = &s[..s.len() - size]; chars.push(ch.unwrap()); } chars } assert_eq!(vec!['☃'], d("☃")); assert_eq!(vec!['☃', '☃'], d("☃☃")); assert_eq!(vec!['ε', 'δ', 'γ', 'β', 'α'], d("αβγδε")); assert_eq!(vec!['⛇', '⛄', '☃'], d("☃⛄⛇")); assert_eq!(vec!['𝗲', '𝗱', '𝗰', '𝗯', '𝗮'], d("𝗮𝗯𝗰𝗱𝗲")); } #[test] fn decode_last_invalid() { let (ch, size) = utf8::decode_last(b""); assert_eq!(None, ch); assert_eq!(0, size); let (ch, size) = utf8::decode_last(b"\xFF"); assert_eq!(None, ch); assert_eq!(1, size); let (ch, size) = utf8::decode_last(b"\xCE\xF0"); assert_eq!(None, ch); assert_eq!(1, size); let (ch, size) = utf8::decode_last(b"\xCE"); assert_eq!(None, ch); assert_eq!(1, size); let (ch, size) = utf8::decode_last(b"\xE2\x98\xF0"); assert_eq!(None, ch); assert_eq!(1, size); let (ch, size) = utf8::decode_last(b"\xE2\x98"); assert_eq!(None, ch); assert_eq!(2, size); let (ch, size) = utf8::decode_last(b"\xF0\x9D\x9D\xF0"); assert_eq!(None, ch); assert_eq!(1, size); let (ch, size) = utf8::decode_last(b"\xF0\x9D\x9D"); assert_eq!(None, ch); assert_eq!(3, size); let (ch, size) = utf8::decode_last(b"\xF0\x82\x82\xAC"); assert_eq!(None, ch); assert_eq!(1, size); let (ch, size) = utf8::decode_last(b"\xED\xA0\x80"); assert_eq!(None, ch); assert_eq!(1, size); let (ch, size) = utf8::decode_last(b"\xED\xA0"); assert_eq!(None, ch); assert_eq!(1, size); let (ch, size) = utf8::decode_last(b"\xED"); assert_eq!(None, ch); assert_eq!(1, size); let (ch, size) = utf8::decode_last(b"a\xCE"); assert_eq!(None, ch); assert_eq!(1, size); let (ch, size) = utf8::decode_last(b"a\xE2\x98"); assert_eq!(None, ch); assert_eq!(2, size); let (ch, size) = utf8::decode_last(b"a\xF0\x9D\x9C"); assert_eq!(None, ch); assert_eq!(3, size); } #[test] fn decode_last_lossy() { let (ch, size) = utf8::decode_last_lossy(b""); assert_eq!('\u{FFFD}', ch); assert_eq!(0, size); let (ch, size) = utf8::decode_last_lossy(b"\xFF"); assert_eq!('\u{FFFD}', ch); assert_eq!(1, size); let (ch, size) = utf8::decode_last_lossy(b"\xCE\xF0"); assert_eq!('\u{FFFD}', ch); assert_eq!(1, size); let (ch, size) = utf8::decode_last_lossy(b"\xCE"); assert_eq!('\u{FFFD}', ch); assert_eq!(1, size); let (ch, size) = utf8::decode_last_lossy(b"\xE2\x98\xF0"); assert_eq!('\u{FFFD}', ch); assert_eq!(1, size); let (ch, size) = utf8::decode_last_lossy(b"\xE2\x98"); assert_eq!('\u{FFFD}', ch); assert_eq!(2, size); let (ch, size) = utf8::decode_last_lossy(b"\xF0\x9D\x9D\xF0"); assert_eq!('\u{FFFD}', ch); assert_eq!(1, size); let (ch, size) = utf8::decode_last_lossy(b"\xF0\x9D\x9D"); assert_eq!('\u{FFFD}', ch); assert_eq!(3, size); let (ch, size) = utf8::decode_last_lossy(b"\xF0\x82\x82\xAC"); assert_eq!('\u{FFFD}', ch); assert_eq!(1, size); let (ch, size) = utf8::decode_last_lossy(b"\xED\xA0\x80"); assert_eq!('\u{FFFD}', ch); assert_eq!(1, size); let (ch, size) = utf8::decode_last_lossy(b"\xED\xA0"); assert_eq!('\u{FFFD}', ch); assert_eq!(1, size); let (ch, size) = utf8::decode_last_lossy(b"\xED"); assert_eq!('\u{FFFD}', ch); assert_eq!(1, size); let (ch, size) = utf8::decode_last_lossy(b"a\xCE"); assert_eq!('\u{FFFD}', ch); assert_eq!(1, size); let (ch, size) = utf8::decode_last_lossy(b"a\xE2\x98"); assert_eq!('\u{FFFD}', ch); assert_eq!(2, size); let (ch, size) = utf8::decode_last_lossy(b"a\xF0\x9D\x9C"); assert_eq!('\u{FFFD}', ch); assert_eq!(3, size); } #[test] fn chars() { for (i, &(expected, input)) in LOSSY_TESTS.iter().enumerate() { let got: String = B(input).chars().collect(); assert_eq!( expected, got, "chars(ith: {:?}, given: {:?})", i, input, ); let got: String = B(input).char_indices().map(|(_, _, ch)| ch).collect(); assert_eq!( expected, got, "char_indices(ith: {:?}, given: {:?})", i, input, ); let expected: String = expected.chars().rev().collect(); let got: String = B(input).chars().rev().collect(); assert_eq!( expected, got, "chars.rev(ith: {:?}, given: {:?})", i, input, ); let got: String = B(input).char_indices().rev().map(|(_, _, ch)| ch).collect(); assert_eq!( expected, got, "char_indices.rev(ith: {:?}, given: {:?})", i, input, ); } } #[test] fn utf8_chunks() { let mut c = utf8::Utf8Chunks { bytes: b"123\xC0" }; assert_eq!( (c.next(), c.next()), ( Some(utf8::Utf8Chunk { valid: "123", invalid: b"\xC0".as_bstr(), incomplete: false, }), None, ) ); let mut c = utf8::Utf8Chunks { bytes: b"123\xFF\xFF" }; assert_eq!( (c.next(), c.next(), c.next()), ( Some(utf8::Utf8Chunk { valid: "123", invalid: b"\xFF".as_bstr(), incomplete: false, }), Some(utf8::Utf8Chunk { valid: "", invalid: b"\xFF".as_bstr(), incomplete: false, }), None, ) ); let mut c = utf8::Utf8Chunks { bytes: b"123\xD0" }; assert_eq!( (c.next(), c.next()), ( Some(utf8::Utf8Chunk { valid: "123", invalid: b"\xD0".as_bstr(), incomplete: true, }), None, ) ); let mut c = utf8::Utf8Chunks { bytes: b"123\xD0456" }; assert_eq!( (c.next(), c.next(), c.next()), ( Some(utf8::Utf8Chunk { valid: "123", invalid: b"\xD0".as_bstr(), incomplete: false, }), Some(utf8::Utf8Chunk { valid: "456", invalid: b"".as_bstr(), incomplete: false, }), None, ) ); let mut c = utf8::Utf8Chunks { bytes: b"123\xE2\x98" }; assert_eq!( (c.next(), c.next()), ( Some(utf8::Utf8Chunk { valid: "123", invalid: b"\xE2\x98".as_bstr(), incomplete: true, }), None, ) ); let mut c = utf8::Utf8Chunks { bytes: b"123\xF4\x8F\xBF" }; assert_eq!( (c.next(), c.next()), ( Some(utf8::Utf8Chunk { valid: "123", invalid: b"\xF4\x8F\xBF".as_bstr(), incomplete: true, }), None, ) ); } }