netdoc: remove the Item::off field as redundant
Now that we are okay with using slice offset pointer math, we can remove the 'off' field from Item entirely.
This commit is contained in:
parent
9443fc7690
commit
b42f91b591
|
@ -287,12 +287,12 @@ impl RouterDesc {
|
|||
|
||||
let (header, body, sig) = RouterDesc::parse_sections(s)?;
|
||||
|
||||
let start_offset = header.get_required(ROUTER)?.off;
|
||||
let start_offset = header.get_required(ROUTER)?.offset_in(s).unwrap();
|
||||
|
||||
// ed25519 identity and signing key.
|
||||
let (identity_cert, ed25519_signing_key) = {
|
||||
let cert_tok = header.get_required(IDENTITY_ED25519)?;
|
||||
if cert_tok.off < start_offset {
|
||||
if cert_tok.offset_in(s).unwrap() < start_offset {
|
||||
return Err(Error::MisplacedToken("identity-ed25519", cert_tok.pos()));
|
||||
}
|
||||
let cert = cert_tok.get_obj("ED25519 CERT")?;
|
||||
|
@ -332,8 +332,10 @@ impl RouterDesc {
|
|||
|
||||
let ed_sig = sig.get_required(ROUTER_SIG_ED25519)?;
|
||||
let rsa_sig = sig.get_required(ROUTER_SIGNATURE)?;
|
||||
let ed_sig_pos = ed_sig.offset_in(s).unwrap();
|
||||
let rsa_sig_pos = rsa_sig.offset_in(s).unwrap();
|
||||
|
||||
if ed_sig.off > rsa_sig.off {
|
||||
if ed_sig_pos > rsa_sig_pos {
|
||||
return Err(Error::UnexpectedToken(
|
||||
ROUTER_SIG_ED25519.to_str(),
|
||||
ed_sig.pos(),
|
||||
|
@ -346,7 +348,7 @@ impl RouterDesc {
|
|||
// XXXX spec is ambiguous whether this prefix goes on
|
||||
// before or after taking the hash.
|
||||
d.input(&b"Tor router descriptor signature v1"[..]);
|
||||
let signed_end = ed_sig.off + b"router-sig-ed25519 ".len();
|
||||
let signed_end = ed_sig_pos + b"router-sig-ed25519 ".len();
|
||||
d.input(&s[start_offset..signed_end]);
|
||||
let d = d.result();
|
||||
let sig: B64 = ed_sig.parse_arg(0)?;
|
||||
|
@ -363,7 +365,7 @@ impl RouterDesc {
|
|||
// Check legacy RSA signature.
|
||||
{
|
||||
let mut d = ll::d::Sha1::new();
|
||||
let signed_end = rsa_sig.off + b"router-signature\n".len();
|
||||
let signed_end = rsa_sig_pos + b"router-signature\n".len();
|
||||
d.input(&s[start_offset..signed_end]);
|
||||
let d = d.result();
|
||||
let sig = rsa_sig.get_obj("SIGNATURE")?;
|
||||
|
|
|
@ -30,7 +30,6 @@ pub struct Object<'a> {
|
|||
/// containing string.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Item<'a> {
|
||||
pub off: usize, // don't make this pub.XXXX
|
||||
kwd: &'a str,
|
||||
args: &'a str,
|
||||
/// The arguments, split by whitespace. This vector is contructed
|
||||
|
@ -168,12 +167,10 @@ impl<'a> NetDocReader<'a> {
|
|||
if self.remaining() == 0 {
|
||||
return Ok(None);
|
||||
}
|
||||
let off = self.off;
|
||||
let (kwd, args) = self.get_kwdline()?;
|
||||
let object = self.get_object()?;
|
||||
let split_args = RefCell::new(None);
|
||||
Ok(Some(Item {
|
||||
off,
|
||||
kwd,
|
||||
args,
|
||||
split_args,
|
||||
|
@ -324,10 +321,18 @@ impl<'a> Item<'a> {
|
|||
}
|
||||
}
|
||||
}
|
||||
/// Return the position of this item without reference to its containing
|
||||
/// string.
|
||||
/// Return the position of this item.
|
||||
///
|
||||
/// This position won't be useful unless it is later contextualized
|
||||
/// with the containing string.
|
||||
pub fn pos(&self) -> Pos {
|
||||
Pos::from_byte(self.off)
|
||||
Pos::at(self.kwd)
|
||||
}
|
||||
/// Return the position of this Item in a string.
|
||||
///
|
||||
/// Returns None if this item doesn't actually belong to the string.
|
||||
pub fn offset_in(&self, s: &str) -> Option<usize> {
|
||||
crate::util::str_offset(s, self.kwd)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -77,6 +77,44 @@ impl<I: Iterator, F: FnMut(&I::Item) -> bool> Iterator for PauseAt<I, F> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Return the position of one string slice within another.
|
||||
///
|
||||
/// If `needle` is indeed part of `haystack`, returns some offset
|
||||
/// `off`, such that `needle` is the same as
|
||||
/// `&haystack[off..needle.len()]`.
|
||||
///
|
||||
/// Returns None if `needle` is not a part of `haystack`.
|
||||
///
|
||||
/// Remember, offsets are in bytes, not in characters.
|
||||
///
|
||||
/// # Example
|
||||
/// ```ignore
|
||||
/// use tor_netdoc::util::str_offset;
|
||||
/// let quote = "A rose is a rose is a rose."; // -- Gertrude Stein
|
||||
/// assert_eq!("e[2..6], "rose");
|
||||
/// assert_eq!(str_offset(quote, "e[2..6]).unwrap(), 2);
|
||||
/// assert_eq!("e[12..16], "rose");
|
||||
/// assert_eq!(str_offset(quote, "e[12..16]).unwrap(), 12);
|
||||
/// assert_eq!("e[22..26], "rose");
|
||||
/// assert_eq!(str_offset(quote, "e[22..26]).unwrap(), 22);
|
||||
///
|
||||
/// assert_eq!(str_offset(quote, "rose"), None);
|
||||
///
|
||||
/// assert_eq!(str_offset("e[1..], "e[2..6]), Some(1));
|
||||
/// assert_eq!(str_offset("e[1..5], "e[2..6]), None);
|
||||
/// ```
|
||||
pub fn str_offset(haystack: &str, needle: &str) -> Option<usize> {
|
||||
let needle_start_u = needle.as_ptr() as usize;
|
||||
let needle_end_u = needle_start_u + needle.len();
|
||||
let haystack_start_u = haystack.as_ptr() as usize;
|
||||
let haystack_end_u = haystack_start_u + haystack.len();
|
||||
if haystack_start_u <= needle_start_u && needle_end_u <= haystack_end_u {
|
||||
Some(needle_start_u - haystack_start_u)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
|
@ -102,4 +140,21 @@ mod tests {
|
|||
assert_eq!(iter.next(), Some(9));
|
||||
assert_eq!(iter.next(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_str_offset() {
|
||||
use super::str_offset;
|
||||
let quote = "A rose is a rose is a rose."; // -- Gertrude Stein
|
||||
assert_eq!("e[2..6], "rose");
|
||||
assert_eq!(str_offset(quote, "e[2..6]).unwrap(), 2);
|
||||
assert_eq!("e[12..16], "rose");
|
||||
assert_eq!(str_offset(quote, "e[12..16]).unwrap(), 12);
|
||||
assert_eq!("e[22..26], "rose");
|
||||
assert_eq!(str_offset(quote, "e[22..26]).unwrap(), 22);
|
||||
|
||||
assert_eq!(str_offset(quote, "rose"), None);
|
||||
|
||||
assert_eq!(str_offset("e[1..], "e[2..6]), Some(1));
|
||||
assert_eq!(str_offset("e[1..5], "e[2..6]), None);
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue