Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 0 additions & 7 deletions Cargo-minimal.lock
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,6 @@ dependencies = [
"tiny_http",
"tokio",
"tokio-rustls",
"urlencoding",
"webpki-roots",
]

Expand Down Expand Up @@ -850,12 +849,6 @@ version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1"

[[package]]
name = "urlencoding"
version = "2.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da"

[[package]]
name = "wasi"
version = "0.11.0+wasi-snapshot-preview1"
Expand Down
7 changes: 0 additions & 7 deletions Cargo-recent.lock
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,6 @@ dependencies = [
"tiny_http",
"tokio",
"tokio-rustls",
"urlencoding",
"webpki-roots",
]

Expand Down Expand Up @@ -883,12 +882,6 @@ version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1"

[[package]]
name = "urlencoding"
version = "2.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da"

[[package]]
name = "wasi"
version = "0.11.0+wasi-snapshot-preview1"
Expand Down
2 changes: 0 additions & 2 deletions bitreq/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,6 @@ rust-version = "1.75.0"
maintenance = { status = "experimental" }

[dependencies]
# For the urlencoding feature:
urlencoding = { version = "2.1.0", optional = true }
# For the punycode feature:
punycode = { version = "0.4.1", optional = true }
# For the json-using-serde feature:
Expand Down
69 changes: 40 additions & 29 deletions bitreq/src/http_url.rs
Original file line number Diff line number Diff line change
@@ -1,14 +1,18 @@
#[cfg(feature = "std")]
use core::fmt::{self, Write};

#[cfg(feature = "std")]
use crate::Error;

#[cfg(feature = "std")]
#[derive(Clone, Copy, PartialEq)]
pub(crate) enum Port {
ImplicitHttp,
ImplicitHttps,
Explicit(u32),
}

#[cfg(feature = "std")]
impl Port {
pub(crate) fn port(self) -> u32 {
match self {
Expand All @@ -27,6 +31,7 @@ impl Port {
/// ```text
/// scheme "://" host [ ":" port ] path [ "?" query ] [ "#" fragment ]
/// ```
#[cfg(feature = "std")]
#[derive(Clone, PartialEq)]
pub(crate) struct HttpUrl {
/// If scheme is "https", true, if "http", false.
Expand All @@ -41,6 +46,7 @@ pub(crate) struct HttpUrl {
pub(crate) fragment: Option<String>,
}

#[cfg(feature = "std")]
impl HttpUrl {
pub(crate) fn parse(url: &str, redirected_from: Option<&HttpUrl>) -> Result<HttpUrl, Error> {
enum UrlParseStatus {
Expand Down Expand Up @@ -96,9 +102,6 @@ impl HttpUrl {
path_and_query = Some(resource);
resource = String::new();
}
#[cfg(not(feature = "urlencoding"))]
UrlParseStatus::PathAndQuery | UrlParseStatus::Fragment => resource.push(c),
#[cfg(feature = "urlencoding")]
UrlParseStatus::PathAndQuery | UrlParseStatus::Fragment => match c {
// All URL-'safe' characters, plus URL 'special
// characters' like &, #, =, / ,?
Expand All @@ -116,25 +119,8 @@ impl HttpUrl {
| '?' => {
resource.push(c);
}
// There is probably a simpler way to do this, but this
// method avoids any heap allocations (except extending
// `resource`)
_ => {
// Any UTF-8 character can fit in 4 bytes
let mut utf8_buf = [0u8; 4];
// Bytes fill buffer from the front
c.encode_utf8(&mut utf8_buf);
// Slice disregards the unused portion of the buffer
utf8_buf[..c.len_utf8()].iter().for_each(|byte| {
// Convert byte to URL escape, e.g. %21 for b'!'
let rem = *byte % 16;
let right_char = to_hex_digit(rem);
let left_char = to_hex_digit((*byte - rem) >> 4);
resource.push('%');
resource.push(left_char);
resource.push(right_char);
});
}
// Every other character gets percent-encoded.
_ => percent_encode_char(c, &mut resource),
},
}
}
Expand Down Expand Up @@ -191,12 +177,37 @@ impl HttpUrl {
}
}

// https://github.com/kornelski/rust_urlencoding/blob/a4df8027ab34a86a63f1be727965cf101556403f/src/enc.rs#L130-L136
// Converts a UTF-8 byte to a single hexadecimal character
#[cfg(feature = "urlencoding")]
fn to_hex_digit(digit: u8) -> char {
match digit {
0..=9 => (b'0' + digit) as char,
10..=255 => (b'A' - 10 + digit) as char,
/// Returns the `%HH` triplet representing `byte` for percent encoding.
fn percent_encoded_triplet(byte: u8) -> [char; 3] {
const HEX: &[u8; 16] = b"0123456789ABCDEF";
['%', HEX[(byte >> 4) as usize] as char, HEX[(byte & 0x0F) as usize] as char]
}

/// Percent-encodes a char and appends it to `result`.
/// Unreserved characters (0-9, A-Z, a-z, -, ., _, ~) are not encoded.
pub(crate) fn percent_encode_char(c: char, result: &mut String) {
match c {
// All URL-'safe' characters are not encoded
'0'..='9' | 'A'..='Z' | 'a'..='z' | '-' | '.' | '_' | '~' => {
result.push(c);
}
_ => {
// Any UTF-8 character can fit in 4 bytes
let mut utf8_buf = [0u8; 4];
c.encode_utf8(&mut utf8_buf).as_bytes().iter().for_each(|byte| {
for ch in percent_encoded_triplet(*byte) {
result.push(ch);
}
});
}
}
}

/// Percent-encodes the entire input string and returns the encoded version.
pub(crate) fn percent_encode_string(input: &str) -> String {
let mut encoded = String::with_capacity(input.len());
for ch in input.chars() {
percent_encode_char(ch, &mut encoded);
}
encoded
}
7 changes: 0 additions & 7 deletions bitreq/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -72,12 +72,6 @@
//!
//! This feature enables HTTP proxy support. See [Proxy].
//!
//! ## `urlencoding`
//!
//! This feature enables percent-encoding for the URL resource when
//! creating a request and any subsequently added parameters from
//! [`Request::with_param`].
//!
//! # Examples
//!
//! ## Get
Expand Down Expand Up @@ -231,7 +225,6 @@ extern crate alloc;
#[cfg(feature = "std")]
mod connection;
mod error;
#[cfg(feature = "std")]
mod http_url;
#[cfg(feature = "proxy")]
mod proxy;
Expand Down
23 changes: 7 additions & 16 deletions bitreq/src/request.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ use core::fmt::Write;
use crate::connection::AsyncConnection;
#[cfg(feature = "std")]
use crate::connection::Connection;
use crate::http_url::percent_encode_string;
#[cfg(feature = "std")]
use crate::http_url::{HttpUrl, Port};
#[cfg(feature = "proxy")]
Expand Down Expand Up @@ -98,12 +99,8 @@ impl Request {
/// This is only the request's data, it is not sent yet. For
/// sending the request, see [`send`](struct.Request.html#method.send).
///
/// If `urlencoding` is not enabled, it is the responsibility of the
/// user to ensure there are no illegal characters in the URL.
///
/// If `urlencoding` is enabled, the resource part of the URL will be
/// encoded. Any URL special characters (e.g. &, #, =) are not encoded
/// as they are assumed to be meaningful parameters etc.
/// The resource part of the URL will be encoded. Any URL special characters (e.g. &, #, =) are
/// not encoded as they are assumed to be meaningful parameters etc.
pub fn new<T: Into<URL>>(method: Method, url: T) -> Request {
Request {
method,
Expand Down Expand Up @@ -151,18 +148,12 @@ impl Request {
/// Adds given key and value as query parameter to request url
/// (resource).
///
/// If `urlencoding` is not enabled, it is the responsibility
/// of the user to ensure there are no illegal characters in the
/// key or value.
///
/// If `urlencoding` is enabled, the key and value are both encoded.
/// The key and value are both encoded.
pub fn with_param<T: Into<String>, U: Into<String>>(mut self, key: T, value: U) -> Request {
let key = key.into();
#[cfg(feature = "urlencoding")]
let key = urlencoding::encode(&key);
let key = percent_encode_string(&key);
let value = value.into();
#[cfg(feature = "urlencoding")]
let value = urlencoding::encode(&value);
let value = percent_encode_string(&value);

if !self.params.is_empty() {
self.params.push('&');
Expand Down Expand Up @@ -610,7 +601,7 @@ mod parsing_tests {
}
}

#[cfg(all(test, feature = "urlencoding"))]
#[cfg(all(test, feature = "std"))]
mod encoding_tests {
use super::{get, ParsedRequest};

Expand Down
73 changes: 29 additions & 44 deletions verify/src/reexports.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,22 +52,19 @@ pub fn check_type_reexports(version: Version) -> Result<()> {
};

for type_name in version_defs.keys() {
let exported = export_map.values().any(|info| {
info.source_version == version_name && type_name == &info.source_ident
});
let exported = export_map
.values()
.any(|info| info.source_version == version_name && type_name == &info.source_ident);
if !exported {
missing.push(format!(
"{} defines {} but does not re-export it",
version_name, type_name
));
missing
.push(format!("{} defines {} but does not re-export it", version_name, type_name));
}
}

// Checks all auxiliary types are re-exported.
for (exported_name, export) in &export_map {
if let Some(deps) = definitions
.get(&export.source_version)
.and_then(|map| map.get(&export.source_ident))
if let Some(deps) =
definitions.get(&export.source_version).and_then(|map| map.get(&export.source_ident))
{
for dep in deps {
if !export_map.contains_key(dep) {
Expand Down Expand Up @@ -108,10 +105,7 @@ fn collect_version_dirs(src_dir: &Path) -> Result<Vec<String>> {
}

/// Parses all versioned source files and records every public struct/enum name.
fn collect_type_files_and_names(
src_dir: &Path,
versions: &[String],
) -> Result<ParsedTypeFiles> {
fn collect_type_files_and_names(src_dir: &Path, versions: &[String]) -> Result<ParsedTypeFiles> {
let mut files = Vec::new();
let mut names = HashSet::new();

Expand Down Expand Up @@ -162,14 +156,18 @@ fn collect_type_definitions(
match item {
Item::Struct(item_struct) if is_public(&item_struct.vis) => {
let deps = collect_deps_from_fields(&item_struct.fields, known_names);
defs.entry(version.clone()).or_default().insert(item_struct.ident.to_string(), deps);
defs.entry(version.clone())
.or_default()
.insert(item_struct.ident.to_string(), deps);
}
Item::Enum(item_enum) if is_public(&item_enum.vis) => {
let mut deps = BTreeSet::new();
for variant in item_enum.variants {
deps.extend(collect_deps_from_fields(&variant.fields, known_names));
}
defs.entry(version.clone()).or_default().insert(item_enum.ident.to_string(), deps);
defs.entry(version.clone())
.or_default()
.insert(item_enum.ident.to_string(), deps);
}
_ => {}
}
Expand All @@ -180,15 +178,12 @@ fn collect_type_definitions(
}

/// Reads `mod.rs` for the chosen version and lists its public re-exports.
fn collect_exports(
src_dir: &Path,
version: &str,
) -> Result<HashMap<String, ExportInfo>> {
fn collect_exports(src_dir: &Path, version: &str) -> Result<HashMap<String, ExportInfo>> {
let mod_path = src_dir.join(version).join("mod.rs");
let content = fs::read_to_string(&mod_path)
.with_context(|| format!("reading {}", mod_path.display()))?;
let syntax = syn::parse_file(&content)
.with_context(|| format!("parsing {}", mod_path.display()))?;
let content =
fs::read_to_string(&mod_path).with_context(|| format!("reading {}", mod_path.display()))?;
let syntax =
syn::parse_file(&content).with_context(|| format!("parsing {}", mod_path.display()))?;
let mut exports = HashMap::new();

for item in syntax.items {
Expand All @@ -213,16 +208,14 @@ fn collect_exports(
fn collect_deps_from_fields(fields: &Fields, known_names: &HashSet<String>) -> BTreeSet<String> {
let mut deps = BTreeSet::new();
match fields {
Fields::Named(named) => {
Fields::Named(named) =>
for field in &named.named {
collect_type_dependencies(&field.ty, known_names, &mut deps);
}
}
Fields::Unnamed(unnamed) => {
},
Fields::Unnamed(unnamed) =>
for field in &unnamed.unnamed {
collect_type_dependencies(&field.ty, known_names, &mut deps);
}
}
},
Fields::Unit => {}
}
deps
Expand Down Expand Up @@ -255,11 +248,10 @@ fn collect_type_dependencies(
Type::Reference(reference) => collect_type_dependencies(&reference.elem, known_names, deps),
Type::Paren(paren) => collect_type_dependencies(&paren.elem, known_names, deps),
Type::Group(group) => collect_type_dependencies(&group.elem, known_names, deps),
Type::Tuple(tuple) => {
Type::Tuple(tuple) =>
for elem in &tuple.elems {
collect_type_dependencies(elem, known_names, deps);
}
}
},
Type::Array(array) => collect_type_dependencies(&array.elem, known_names, deps),
Type::Slice(slice) => collect_type_dependencies(&slice.elem, known_names, deps),
Type::Ptr(ptr) => collect_type_dependencies(&ptr.elem, known_names, deps),
Expand All @@ -278,21 +270,17 @@ fn flatten_use_tree(prefix: Vec<String>, tree: &UseTree, acc: &mut Vec<UseEntry>
UseTree::Rename(rename) => {
let mut path = prefix;
path.push(rename.ident.to_string());
acc.push(UseEntry {
path,
rename: Some(rename.rename.to_string()),
});
acc.push(UseEntry { path, rename: Some(rename.rename.to_string()) });
}
UseTree::Path(path) => {
let mut new_prefix = prefix;
new_prefix.push(path.ident.to_string());
flatten_use_tree(new_prefix, &path.tree, acc);
}
UseTree::Group(group) => {
UseTree::Group(group) =>
for item in &group.items {
flatten_use_tree(prefix.clone(), item, acc);
}
}
},
UseTree::Glob(_) => {}
}
}
Expand All @@ -303,10 +291,7 @@ fn interpret_flat_use(target_version: &str, entry: &UseEntry) -> Option<ExportIn
return None;
}
let source_ident = entry.path.last()?.clone();
let exported_ident = entry
.rename
.clone()
.unwrap_or_else(|| source_ident.clone());
let exported_ident = entry.rename.clone().unwrap_or_else(|| source_ident.clone());

match entry.path.first()?.as_str() {
"self" => Some(ExportInfo {
Expand Down