Merge branch 'rpc-objectmap' into 'main'

RPC: revise semantics for weak references and object IDs

Closes #848

See merge request tpo/core/arti!1183
This commit is contained in:
Nick Mathewson 2023-05-16 13:30:42 +00:00
commit ef3c049064
5 changed files with 211 additions and 228 deletions

4
Cargo.lock generated
View File

@ -232,15 +232,19 @@ version = "0.1.0"
dependencies = [ dependencies = [
"arti-client", "arti-client",
"asynchronous-codec", "asynchronous-codec",
"base64ct",
"bytes", "bytes",
"erased-serde", "erased-serde",
"futures", "futures",
"futures-await-test", "futures-await-test",
"pin-project", "pin-project",
"rand 0.8.5",
"serde", "serde",
"serde_json", "serde_json",
"thiserror", "thiserror",
"tor-async-utils", "tor-async-utils",
"tor-basic-utils",
"tor-bytes",
"tor-error", "tor-error",
"tor-rpcbase", "tor-rpcbase",
"tor-rtcompat", "tor-rtcompat",

View File

@ -18,6 +18,7 @@ full = ["arti-client/full", "tor-async-utils/full", "tor-error/full", "tor-rpcba
[dependencies] [dependencies]
arti-client = { path = "../arti-client", version = "0.9.0", features = ["rpc"] } arti-client = { path = "../arti-client", version = "0.9.0", features = ["rpc"] }
asynchronous-codec = { version = "0.6.0", features = ["json"] } asynchronous-codec = { version = "0.6.0", features = ["json"] }
base64ct = "1.5.1"
bytes = "1" bytes = "1"
erased-serde = "0.3.25" erased-serde = "0.3.25"
futures = "0.3.14" futures = "0.3.14"
@ -25,10 +26,12 @@ futures = "0.3.14"
# out. # out.
# generational-arena = "0.2.8" # generational-arena = "0.2.8"
pin-project = "1" pin-project = "1"
rand = "0.8"
serde = { version = "1.0.103", features = ["derive"] } serde = { version = "1.0.103", features = ["derive"] }
serde_json = "1.0.50" serde_json = "1.0.50"
thiserror = "1" thiserror = "1"
tor-async-utils = { path = "../tor-async-utils", version = "0.1.0" } tor-async-utils = { path = "../tor-async-utils", version = "0.1.0" }
tor-bytes = { path = "../tor-bytes", version = "0.7.0" }
tor-error = { path = "../tor-error", version = "0.5.0" } tor-error = { path = "../tor-error", version = "0.5.0" }
tor-rpcbase = { path = "../tor-rpcbase", version = "0.1.0" } tor-rpcbase = { path = "../tor-rpcbase", version = "0.1.0" }
tor-rtcompat = { path = "../tor-rtcompat", version = "0.9.0" } tor-rtcompat = { path = "../tor-rtcompat", version = "0.9.0" }
@ -37,3 +40,4 @@ typetag = "0.2.7"
[dev-dependencies] [dev-dependencies]
futures-await-test = "0.3.0" futures-await-test = "0.3.0"
tor-basic-utils = { path = "../tor-basic-utils", version = "0.7.0" }

View File

@ -103,7 +103,7 @@ impl Connection {
} else { } else {
inner inner
.objects .objects
.lookup(id.try_into()?) .lookup(crate::objmap::GenIdx::try_decode(id)?)
.ok_or(rpc::LookupError::NoObject(id.clone())) .ok_or(rpc::LookupError::NoObject(id.clone()))
} }
} }
@ -364,7 +364,7 @@ impl rpc::Context for RequestContext {
.expect("Lock poisoned") .expect("Lock poisoned")
.objects .objects
.insert_strong(object) .insert_strong(object)
.into() .encode()
} }
fn register_weak(&self, object: Arc<dyn rpc::Object>) -> rpc::ObjectId { fn register_weak(&self, object: Arc<dyn rpc::Object>) -> rpc::ObjectId {
@ -374,7 +374,7 @@ impl rpc::Context for RequestContext {
.expect("Lock poisoned") .expect("Lock poisoned")
.objects .objects
.insert_weak(object) .insert_weak(object)
.into() .encode()
} }
} }

View File

@ -93,30 +93,34 @@ mod fake_generational_arena {
/// A mechanism to look up RPC `Objects` by their `ObjectId`. /// A mechanism to look up RPC `Objects` by their `ObjectId`.
#[derive(Default)] #[derive(Default)]
pub(crate) struct ObjMap { pub(crate) struct ObjMap {
/// Generationally indexed arena of object references. /// Generationally indexed arena of strong object references.
strong_arena: Arena<Arc<dyn rpc::Object>>,
/// Generationally indexed arena of weak object references.
/// ///
/// Invariants: /// Invariants:
/// * No object has more than one reference in this arena. /// * No object has more than one reference in this arena.
/// * Every `entry` in this arena at position `idx` has a corresponding /// * Every `entry` in this arena at position `idx` has a corresponding
/// entry in `reverse_map` entry such that /// entry in `reverse_map` entry such that
/// `reverse_map[entry.tagged_addr()] == idx`. /// `reverse_map[entry.tagged_addr()] == idx`.
arena: Arena<ArenaEntry>, weak_arena: Arena<WeakArenaEntry>,
/// Backwards reference to look up arena references by the underlying object identity. /// Backwards reference to look up weak arena references by the underlying
/// object identity.
/// ///
/// Invariants: /// Invariants:
/// * For every `(addr,idx)` entry in this map, there is a corresponding /// * For every weak `(addr,idx)` entry in this map, there is a
/// ArenaEntry in `arena` such that `arena[idx].tagged_addr() == addr` /// corresponding ArenaEntry in `arena` such that
reverse_map: HashMap<TaggedAddr, GenIdx>, /// `arena[idx].tagged_addr() == addr`
reverse_map: HashMap<TaggedAddr, generational_arena::Index>,
/// Testing only: How many times have we tidied this map? /// Testing only: How many times have we tidied this map?
#[cfg(test)] #[cfg(test)]
n_tidies: usize, n_tidies: usize,
} }
/// A single entry to an Object stored in the generational arena. /// A single entry to a weak Object stored in the generational arena.
/// ///
struct ArenaEntry { struct WeakArenaEntry {
/// The actual Arc or Weak reference for the object that we're storing here. /// The actual Arc or Weak reference for the object that we're storing here.
obj: ObjRef, obj: Weak<dyn rpc::Object>,
/// ///
/// This contains a strong or weak reference, along with the object's true TypeId. /// This contains a strong or weak reference, along with the object's true TypeId.
/// See the [`TaggedAddr`] for more info on /// See the [`TaggedAddr`] for more info on
@ -124,35 +128,6 @@ struct ArenaEntry {
id: any::TypeId, id: any::TypeId,
} }
/// Strong or weak reference to an Object.
enum ObjRef {
/// A strong reference
Strong(Arc<dyn rpc::Object>),
/// A weak reference
Weak(Weak<dyn rpc::Object>),
}
impl ObjRef {
/// Try to return a strong reference to this object, upgrading a weak
/// reference if needed.
///
/// A `None` return indicates a dangling weak reference.
fn strong(&self) -> Option<Arc<dyn rpc::Object>> {
match self {
ObjRef::Strong(s) => Some(s.clone()),
ObjRef::Weak(w) => Weak::upgrade(w),
}
}
/// Return the [`RawAddr`] associated with this object.
fn raw_addr(&self) -> RawAddr {
match self {
ObjRef::Strong(s) => raw_addr_of(s),
ObjRef::Weak(w) => raw_addr_of_weak(w),
}
}
}
/// The raw address of an object held in an Arc or Weak. /// The raw address of an object held in an Arc or Weak.
/// ///
/// This will be the same for every clone of an Arc, and the same for every Weak /// This will be the same for every clone of an Arc, and the same for every Weak
@ -198,16 +173,16 @@ struct TaggedAddr {
addr: RawAddr, addr: RawAddr,
/// The type of the object. /// The type of the object.
type_id: any::TypeId, type_id: any::TypeId,
/// True if this is a strong reference.
///
/// TODO: We could use one of the unused lower-order bits in raw-addr to
/// avoid bloating this type.
is_strong: bool,
} }
/// A generational index for [`ObjMap`]. /// A generational index for [`ObjMap`].
#[derive(Copy, Clone, Debug, Eq, PartialEq)] #[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub(crate) struct GenIdx(generational_arena::Index); pub(crate) enum GenIdx {
/// An index into the arena of weak references.
Weak(generational_arena::Index),
/// An index into the arena of strong references
Strong(generational_arena::Index),
}
/// Return the [`RawAddr`] of an arbitrary `Arc<T>`. /// Return the [`RawAddr`] of an arbitrary `Arc<T>`.
fn raw_addr_of<T: ?Sized>(arc: &Arc<T>) -> RawAddr { fn raw_addr_of<T: ?Sized>(arc: &Arc<T>) -> RawAddr {
@ -221,61 +196,38 @@ fn raw_addr_of_weak<T: ?Sized>(arc: &Weak<T>) -> RawAddr {
RawAddr(Weak::as_ptr(arc) as *const () as usize) RawAddr(Weak::as_ptr(arc) as *const () as usize)
} }
impl ArenaEntry { impl WeakArenaEntry {
/// Create a new `ArenaEntry` for a strong reference. /// Create a new `WeakArenaEntry` for a weak reference.
fn new_strong(object: Arc<dyn rpc::Object>) -> Self { fn new(object: &Arc<dyn rpc::Object>) -> Self {
let id = (*object).type_id();
Self {
obj: ObjRef::Strong(object),
id,
}
}
/// Create a new `ArenaEntry` for a weak reference.
fn new_weak(object: &Arc<dyn rpc::Object>) -> Self {
let id = (**object).type_id(); let id = (**object).type_id();
Self { Self {
obj: ObjRef::Weak(Arc::downgrade(object)), obj: Arc::downgrade(object),
id, id,
} }
} }
/// Return true if this `ArenaEntry` is really present. /// Return true if this `ArenaEntry` is really present.
/// ///
/// Note that this function can produce false positives (if the entry is Weak /// Note that this function can produce false positives (if the entry's
/// and its last strong reference is dropped in another thread), but it can /// last strong reference is dropped in another thread), but it can
/// never produce false negatives. /// never produce false negatives.
fn is_present(&self) -> bool { fn is_present(&self) -> bool {
match &self.obj { // This is safe from false negatives because: if we can ever
ObjRef::Strong(_) => true, // observe strong_count == 0, then there is no way for anybody
ObjRef::Weak(w) => { // else to "resurrect" the object.
// This is safe from false negatives because: if we can ever self.obj.strong_count() > 0
// observe strong_count == 0, then there is no way for anybody
// else to "resurrect" the object.
w.strong_count() > 0
}
}
} }
/// Return a strong reference to the object in this entry, if possible. /// Return a strong reference to the object in this entry, if possible.
fn strong(&self) -> Option<Arc<dyn rpc::Object>> { fn strong(&self) -> Option<Arc<dyn rpc::Object>> {
match &self.obj { Weak::upgrade(&self.obj)
ObjRef::Strong(s) => Some(Arc::clone(s)),
ObjRef::Weak(w) => Weak::upgrade(w),
}
}
/// Return true if this is a weak reference.
fn is_weak(&self) -> bool {
matches!(&self.obj, ObjRef::Weak(_))
} }
/// Return the [`TaggedAddr`] that can be used to identify this entry's object. /// Return the [`TaggedAddr`] that can be used to identify this entry's object.
fn tagged_addr(&self) -> TaggedAddr { fn tagged_addr(&self) -> TaggedAddr {
TaggedAddr { TaggedAddr {
addr: self.obj.raw_addr(), addr: raw_addr_of_weak(&self.obj),
type_id: self.id, type_id: self.id,
is_strong: matches!(self.obj, ObjRef::Strong(_)),
} }
} }
} }
@ -283,48 +235,79 @@ impl ArenaEntry {
impl TaggedAddr { impl TaggedAddr {
/// Return the `TaggedAddr` to uniquely identify `obj` over the course of /// Return the `TaggedAddr` to uniquely identify `obj` over the course of
/// its existence. /// its existence.
fn for_object(obj: &Arc<dyn rpc::Object>, is_strong: bool) -> Self { fn for_object(obj: &Arc<dyn rpc::Object>) -> Self {
let type_id = (*obj).type_id(); let type_id = (*obj).type_id();
let addr = raw_addr_of(obj); let addr = raw_addr_of(obj);
TaggedAddr { TaggedAddr { addr, type_id }
addr,
type_id,
is_strong,
}
} }
} }
/// The character we use to start all generational indices when encoded as a string. /// Encoding functions for GenIdx.
const IDX_INDICATOR_CHAR: char = '%'; ///
/// The character we use to separate the two parts of a generational index when /// The encoding is deliberately nondeterministic: we want to avoid situations
/// encoding as a string. /// where applications depend on the details of our ObjectIds, or hardcode the
const IDX_SEPARATOR_CHAR: char = ':'; /// ObjectIds they expect, or rely on the same weak generational index getting
/// encoded the same way every time they see it.
impl From<GenIdx> for rpc::ObjectId { ///
fn from(idx: GenIdx) -> Self { /// The encoding is deliberately non-cryptographic: we do not want to imply
let (a, b) = idx.0.into_raw_parts(); /// that this gives any security. It is just a mild deterrent to misuse.
rpc::ObjectId::from(format!("{IDX_SEPARATOR_CHAR}{a}{IDX_SEPARATOR_CHAR}{b}")) ///
/// If you find yourself wanting to reverse-engineer this code so that you can
/// analyze these object IDs, please contact the Arti developers instead and let
/// us give you a better way to do whatever you want.
impl GenIdx {
/// Encode `self` into an rpc::ObjectId that we can give to a client.
pub(crate) fn encode(self) -> rpc::ObjectId {
self.encode_with_rng(&mut rand::thread_rng())
} }
}
impl TryFrom<&rpc::ObjectId> for GenIdx { /// As `encode`, but take a Rng as an argument. For testing.
type Error = rpc::LookupError; fn encode_with_rng<R: rand::RngCore>(self, rng: &mut R) -> rpc::ObjectId {
use base64ct::Encoding;
use rand::Rng;
use tor_bytes::Writer;
let (weak_bit, idx) = match self {
GenIdx::Weak(idx) => (1, idx),
GenIdx::Strong(idx) => (0, idx),
};
let (a, b) = idx.into_raw_parts();
let x = rng.gen::<u64>() << 1;
let mut bytes = Vec::new();
bytes.write_u64(x | weak_bit);
bytes.write_u64((a as u64).wrapping_add(x));
bytes.write_u64(b.wrapping_sub(x));
rpc::ObjectId::from(base64ct::Base64UrlUnpadded::encode_string(&bytes[..]))
}
fn try_from(id: &rpc::ObjectId) -> Result<Self, Self::Error> { /// Attempt to decode `id` into a `GenIdx` than an ObjMap can use.
let s = id.as_ref(); pub(crate) fn try_decode(id: &rpc::ObjectId) -> Result<Self, rpc::LookupError> {
if let Some(s) = s.strip_prefix(IDX_INDICATOR_CHAR) { use base64ct::Encoding;
if let Some((a_str, b_str)) = s.split_once(IDX_SEPARATOR_CHAR) { use tor_bytes::Reader;
let a = a_str
.parse() let bytes = base64ct::Base64UrlUnpadded::decode_vec(id.as_ref())
.map_err(|_| rpc::LookupError::NoObject(id.clone()))?; .map_err(|_| rpc::LookupError::NoObject(id.clone()))?;
let b = b_str let mut r = Reader::from_slice(&bytes);
.parse() let mut get_u64 = || {
.map_err(|_| rpc::LookupError::NoObject(id.clone()))?; r.take_u64()
return Ok(GenIdx(generational_arena::Index::from_raw_parts(a, b))); .map_err(|_| rpc::LookupError::NoObject(id.clone()))
} };
let x = get_u64()?;
let is_weak = (x & 1) == 1;
let x = x & !1;
let a = get_u64()?;
let b = get_u64()?;
r.should_be_exhausted()
.map_err(|_| rpc::LookupError::NoObject(id.clone()))?;
let a = a.wrapping_sub(x) as usize;
let b = b.wrapping_add(x);
let idx = generational_arena::Index::from_raw_parts(a, b);
if is_weak {
Ok(GenIdx::Weak(idx))
} else {
Ok(GenIdx::Strong(idx))
} }
Err(rpc::LookupError::NoObject(id.clone()))
} }
} }
@ -334,7 +317,7 @@ impl ObjMap {
Self::default() Self::default()
} }
/// Reclaim unused space in this map. /// Reclaim unused space in this map's weak arena.
/// ///
/// This runs in `O(n)` time. /// This runs in `O(n)` time.
fn tidy(&mut self) { fn tidy(&mut self) {
@ -342,26 +325,26 @@ impl ObjMap {
{ {
self.n_tidies += 1; self.n_tidies += 1;
} }
self.arena.retain(|index, entry| { self.weak_arena.retain(|index, entry| {
let present = entry.is_present(); let present = entry.is_present();
if !present { if !present {
// For everything we are removing from the `arena`, we must also // For everything we are removing from the `arena`, we must also
// remove it from `reverse_map`. // remove it from `reverse_map`.
let ptr = entry.tagged_addr(); let ptr = entry.tagged_addr();
let found = self.reverse_map.remove(&ptr); let found = self.reverse_map.remove(&ptr);
debug_assert_eq!(found, Some(GenIdx(index))); debug_assert_eq!(found, Some(index));
} }
present present
}); });
} }
/// If needed, clean this arena and resize it. /// If needed, clean the weak arena and resize it.
/// ///
/// (We call this whenever we're about to add an entry. This ensures that /// (We call this whenever we're about to add an entry. This ensures that
/// our insertion operations run in `O(1)` time.) /// our insertion operations run in `O(1)` time.)
fn adjust_size(&mut self) { fn adjust_size(&mut self) {
// If we're about to fill the arena... // If we're about to fill the arena...
if self.arena.len() >= self.arena.capacity() { if self.weak_arena.len() >= self.weak_arena.capacity() {
// ... we delete any dead `Weak` entries. // ... we delete any dead `Weak` entries.
self.tidy(); self.tidy();
// Then, if the arena is still above half-full, we double the // Then, if the arena is still above half-full, we double the
@ -371,77 +354,74 @@ impl ObjMap {
// entries, or else we might re-run tidy() too soon. But we don't // entries, or else we might re-run tidy() too soon. But we don't
// want to grow the arena if tidy() removed _most_ entries, or some // want to grow the arena if tidy() removed _most_ entries, or some
// normal usage patterns will lead to unbounded growth.) // normal usage patterns will lead to unbounded growth.)
if self.arena.len() > self.arena.capacity() / 2 { if self.weak_arena.len() > self.weak_arena.capacity() / 2 {
self.arena.reserve(self.arena.capacity()); self.weak_arena.reserve(self.weak_arena.capacity());
} }
} }
} }
/// Ensure that there is a strong entry for `value` in self (by inserting it /// Unconditionally insert a strong entry for `value` in self, and return its index.
/// as needed), and return its index.
pub(crate) fn insert_strong(&mut self, value: Arc<dyn rpc::Object>) -> GenIdx { pub(crate) fn insert_strong(&mut self, value: Arc<dyn rpc::Object>) -> GenIdx {
let ptr = TaggedAddr::for_object(&value, true); GenIdx::Strong(self.strong_arena.insert(value))
if let Some(idx) = self.reverse_map.get(&ptr) {
if let Some(entry) = self.arena.get_mut(idx.0) {
debug_assert!(entry.tagged_addr() == ptr);
return *idx;
}
}
self.adjust_size();
let idx = GenIdx(self.arena.insert(ArenaEntry::new_strong(value)));
self.reverse_map.insert(ptr, idx);
idx
} }
/// Ensure that there is an entry for `value` in self, and return its index. /// Ensure that there is a weak entry for `value` in self, and return an
/// index for it.
/// If there is no entry, create a weak entry. /// If there is no entry, create a weak entry.
#[allow(clippy::needless_pass_by_value)] // TODO: Decide whether to make this take a reference. #[allow(clippy::needless_pass_by_value)] // TODO: Decide whether to make this take a reference.
pub(crate) fn insert_weak(&mut self, value: Arc<dyn rpc::Object>) -> GenIdx { pub(crate) fn insert_weak(&mut self, value: Arc<dyn rpc::Object>) -> GenIdx {
let ptr = TaggedAddr::for_object(&value, false); let ptr = TaggedAddr::for_object(&value);
if let Some(idx) = self.reverse_map.get(&ptr) { if let Some(idx) = self.reverse_map.get(&ptr) {
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
match self.arena.get(idx.0) { match self.weak_arena.get(*idx) {
Some(entry) => debug_assert!(entry.tagged_addr() == ptr), Some(entry) => debug_assert!(entry.tagged_addr() == ptr),
None => panic!("Found a dangling reference"), None => panic!("Found a dangling reference"),
} }
return *idx; return GenIdx::Weak(*idx);
} }
self.adjust_size(); self.adjust_size();
let idx = self.weak_arena.insert(WeakArenaEntry::new(&value));
let idx = GenIdx(self.arena.insert(ArenaEntry::new_weak(&value)));
self.reverse_map.insert(ptr, idx); self.reverse_map.insert(ptr, idx);
idx GenIdx::Weak(idx)
} }
/// Return the entry from this ObjMap for `idx`. /// Return the entry from this ObjMap for `idx`.
pub(crate) fn lookup(&self, idx: GenIdx) -> Option<Arc<dyn rpc::Object>> { pub(crate) fn lookup(&self, idx: GenIdx) -> Option<Arc<dyn rpc::Object>> {
self.arena.get(idx.0).and_then(ArenaEntry::strong) match idx {
GenIdx::Weak(idx) => self.weak_arena.get(idx).and_then(WeakArenaEntry::strong),
GenIdx::Strong(idx) => self.strong_arena.get(idx).map(Arc::clone),
}
} }
/// Remove the entry at `idx`, if any. /// Remove the entry at `idx`, if any.
pub(crate) fn remove(&mut self, idx: GenIdx) { pub(crate) fn remove(&mut self, idx: GenIdx) {
if let Some(entry) = self.arena.remove(idx.0) { match idx {
let old_idx = self.reverse_map.remove(&entry.tagged_addr()); GenIdx::Weak(idx) => {
debug_assert_eq!(old_idx, Some(idx)); if let Some(entry) = self.weak_arena.remove(idx) {
let old_idx = self.reverse_map.remove(&entry.tagged_addr());
debug_assert_eq!(old_idx, Some(idx));
}
}
GenIdx::Strong(idx) => {
self.strong_arena.remove(idx);
}
} }
} }
/// Testing only: Assert that every invariant for this structure is met. /// Testing only: Assert that every invariant for this structure is met.
#[cfg(test)] #[cfg(test)]
fn assert_okay(&self) { fn assert_okay(&self) {
for (index, entry) in self.arena.iter() { for (index, entry) in self.weak_arena.iter() {
let ptr = entry.tagged_addr(); let ptr = entry.tagged_addr();
assert_eq!(self.reverse_map.get(&ptr), Some(&GenIdx(index))); assert_eq!(self.reverse_map.get(&ptr), Some(&index));
assert_eq!(ptr, entry.tagged_addr()); assert_eq!(ptr, entry.tagged_addr());
} }
for (ptr, idx) in self.reverse_map.iter() { for (ptr, idx) in self.reverse_map.iter() {
let entry = self let entry = self
.arena .weak_arena
.get(idx.0) .get(*idx)
.expect("Dangling pointer in reverse map"); .expect("Dangling pointer in reverse map");
assert_eq!(&entry.tagged_addr(), ptr); assert_eq!(&entry.tagged_addr(), ptr);
@ -528,52 +508,52 @@ mod test {
let wrapped_weak = Arc::downgrade(&wrapped_dyn); let wrapped_weak = Arc::downgrade(&wrapped_dyn);
assert_eq!( assert_eq!(
TaggedAddr::for_object(&object_dyn, true), TaggedAddr::for_object(&object_dyn),
TaggedAddr::for_object(&object_dyn2, true) TaggedAddr::for_object(&object_dyn2)
); );
assert_ne!( assert_ne!(
TaggedAddr::for_object(&object_dyn, true), TaggedAddr::for_object(&object_dyn),
TaggedAddr::for_object(&object2, true) TaggedAddr::for_object(&object2)
); );
assert_eq!( assert_eq!(
TaggedAddr::for_object(&wrapped_dyn, true), TaggedAddr::for_object(&wrapped_dyn),
TaggedAddr::for_object(&wrapped_dyn2, true) TaggedAddr::for_object(&wrapped_dyn2)
); );
assert_ne!( assert_ne!(
TaggedAddr::for_object(&object_dyn, true), TaggedAddr::for_object(&object_dyn),
TaggedAddr::for_object(&wrapped_dyn, true) TaggedAddr::for_object(&wrapped_dyn)
); );
assert_eq!( assert_eq!(
TaggedAddr::for_object(&object_dyn, true).addr, TaggedAddr::for_object(&object_dyn).addr,
TaggedAddr::for_object(&wrapped_dyn, true).addr TaggedAddr::for_object(&wrapped_dyn).addr
); );
assert_eq!( assert_eq!(
TaggedAddr::for_object(&wrapped_dyn, true).addr, TaggedAddr::for_object(&wrapped_dyn).addr,
raw_addr_of_weak(&wrapped_weak) raw_addr_of_weak(&wrapped_weak)
); );
assert_eq!( assert_eq!(
TaggedAddr::for_object(&object_dyn, true).type_id, TaggedAddr::for_object(&object_dyn).type_id,
any::TypeId::of::<ExampleObject>() any::TypeId::of::<ExampleObject>()
); );
assert_eq!( assert_eq!(
TaggedAddr::for_object(&wrapped_dyn, true).type_id, TaggedAddr::for_object(&wrapped_dyn).type_id,
any::TypeId::of::<Wrapper>() any::TypeId::of::<Wrapper>()
); );
assert_eq!( assert_eq!(
TaggedAddr::for_object(&object_dyn, true).addr, TaggedAddr::for_object(&object_dyn).addr,
raw_addr_of(&object) raw_addr_of(&object)
); );
assert_eq!( assert_eq!(
TaggedAddr::for_object(&wrapped_dyn, true).addr, TaggedAddr::for_object(&wrapped_dyn).addr,
raw_addr_of(&wrapped) raw_addr_of(&wrapped)
); );
assert_ne!( assert_ne!(
TaggedAddr::for_object(&object_dyn, true).addr, TaggedAddr::for_object(&object_dyn).addr,
raw_addr_of(&object2) raw_addr_of(&object2)
); );
} }
@ -586,9 +566,11 @@ mod test {
map.assert_okay(); map.assert_okay();
let id1 = map.insert_strong(obj1.clone()); let id1 = map.insert_strong(obj1.clone());
let id2 = map.insert_strong(obj1.clone()); let id2 = map.insert_strong(obj1.clone());
assert_eq!(id1, id2); assert_ne!(id1, id2);
let obj_out = map.lookup(id1).unwrap(); let obj_out1 = map.lookup(id1).unwrap();
assert_eq!(raw_addr_of(&obj1), raw_addr_of(&obj_out)); let obj_out2 = map.lookup(id2).unwrap();
assert_eq!(raw_addr_of(&obj1), raw_addr_of(&obj_out1));
assert_eq!(raw_addr_of(&obj1), raw_addr_of(&obj_out2));
map.assert_okay(); map.assert_okay();
} }
@ -664,7 +646,7 @@ mod test {
} }
{ {
assert_eq!(id1, map.insert_strong(obj1.clone())); assert_ne!(id1, map.insert_strong(obj1.clone()));
assert_ne!(id2, map.insert_strong(obj2.clone())); assert_ne!(id2, map.insert_strong(obj2.clone()));
} }
} }
@ -697,6 +679,7 @@ mod test {
#[test] #[test]
fn tidy() { fn tidy() {
let mut map = ObjMap::new(); let mut map = ObjMap::new();
let mut keep_these = vec![];
let mut s = vec![]; let mut s = vec![];
let mut w = vec![]; let mut w = vec![];
for _ in 0..100 { for _ in 0..100 {
@ -706,7 +689,9 @@ mod test {
w.push(map.insert_weak(o.clone())); w.push(map.insert_weak(o.clone()));
t.push(o); t.push(o);
} }
s.push(map.insert_strong(Arc::new(ExampleObject("cafe".into())))); let obj = Arc::new(ExampleObject("cafe".into()));
keep_these.push(obj.clone());
s.push(map.insert_weak(obj));
drop(t); drop(t);
map.assert_okay(); map.assert_okay();
} }
@ -716,11 +701,11 @@ mod test {
assert!(w.iter().all(|id| map.lookup(*id).is_none())); assert!(w.iter().all(|id| map.lookup(*id).is_none()));
assert!(s.iter().all(|id| map.lookup(*id).is_some())); assert!(s.iter().all(|id| map.lookup(*id).is_some()));
assert_ne!(dbg!(map.arena.len()), 1100); assert_ne!(map.weak_arena.len() + map.strong_arena.len(), 1100);
map.assert_okay(); map.assert_okay();
map.tidy(); map.tidy();
map.assert_okay(); map.assert_okay();
assert_eq!(map.arena.len(), 100); assert_eq!(map.weak_arena.len() + map.strong_arena.len(), 100);
// This number is a bit arbitrary. // This number is a bit arbitrary.
assert!(dbg!(map.n_tidies) < 30); assert!(dbg!(map.n_tidies) < 30);
@ -735,6 +720,34 @@ mod test {
let mut map = ObjMap::new(); let mut map = ObjMap::new();
map.insert_strong(obj); map.insert_strong(obj);
map.insert_strong(wrap); map.insert_strong(wrap);
assert_eq!(map.arena.len(), 2); assert_eq!(map.strong_arena.len(), 2);
}
#[test]
fn objid_encoding() {
use rand::Rng;
fn test_roundtrip(a: usize, b: u64, rng: &mut tor_basic_utils::test_rng::TestingRng) {
let idx = generational_arena::Index::from_raw_parts(a, b);
let idx = if rng.gen_bool(0.5) {
GenIdx::Strong(idx)
} else {
GenIdx::Weak(idx)
};
let s1 = idx.encode_with_rng(rng);
let s2 = idx.encode_with_rng(rng);
assert_ne!(s1, s2);
assert_eq!(idx, GenIdx::try_decode(&s1).unwrap());
assert_eq!(idx, GenIdx::try_decode(&s2).unwrap());
}
let mut rng = tor_basic_utils::test_rng::testing_rng();
test_roundtrip(0, 0, &mut rng);
test_roundtrip(0, 1, &mut rng);
test_roundtrip(1, 0, &mut rng);
test_roundtrip(0xffffffff, 0xffffffffffffffff, &mut rng);
for _ in 0..256 {
test_roundtrip(rng.gen(), rng.gen(), &mut rng);
}
} }
} }

View File

@ -130,7 +130,7 @@ If an Object is not visible in a session,
that session cannot access it. that session cannot access it.
Clients identify each Object within a session Clients identify each Object within a session
by an opaque "Object Identifier". by an opaque string, called an "Object Identifier".
Each identifier may be a "handle" or a "reference". Each identifier may be a "handle" or a "reference".
If a session has a _handle_ to an Object, If a session has a _handle_ to an Object,
Arti won't deliberately discard that Object Arti won't deliberately discard that Object
@ -140,8 +140,9 @@ If a session only has a _reference_ to an Object, however,
that Object might be closed or discarded in the background, that Object might be closed or discarded in the background,
and there is no need to release it. and there is no need to release it.
> For more on how this is implemented, The format of an Object Identifier string is not stable,
> see "Representing Object Identifiers" below. and clients must not rely on it.
## Request and response types ## Request and response types
@ -496,54 +497,15 @@ Therefore a client which sends more than one request at a time
must be prepared to buffer requests at its end, must be prepared to buffer requests at its end,
while concurrently reading arti's replies. while concurrently reading arti's replies.
## Representing Object Identifiers.
> This section describes implementation techniques.
> Applications should not need to care about it.
Here are two ways to provide our Object visibility semantics.
Applications should not care which one Arti uses.
Arti may use both methods for different Objects
in the same session.
In one method,
we use a generational index for each live session
to hold reference-counted pointers
to the Objects visible in the session.
The generational index is the identifier for the Object.
(This method is suitable for representing _handles_
as described above.)
In another method,
when it is more convenient for Arti to access an Object
by a global identifier `GID`,
we use a string `GID:MAC(N_s,GID)` for the Object's Identifier,
where `N_s` is a per-session secret nonce
that Arti generates and does not share with the application.
Arti verifies that the MAC is correct
before looking up the Object by its GID.
(This method is suitable for representing _references_ as
described above.)
Finally, in either method, we use a single fixed identifier
(e.g. `session`)
for the current session.
## Authentication ## Authentication
When a connection is first opened, When a connection is first opened,
only authentication requests may be use only a single "connection" object is available.
until authentication is successful. Its object ID is "`connection`".
The client must authenticate to the connection
in order to receive any other object IDs.
> TODO: Perhaps it would be a good idea to say The pre-authentication methods available on a connection are:
> that when a connection is opened,
> there is an authentication Object (not a session Object)
> and only _that Object_ can be used
> until one of its responses eventually gives the application
> a session Object?
The authentication schemes are:
auth:get_proto auth:get_proto
: Ask Arti which version of the protocol is in use. : Ask Arti which version of the protocol is in use.
@ -814,10 +776,10 @@ The echo command will only work post-authentication.
Here is an example session: Here is an example session:
``` ```
C: { "id":3, "obj": "session", "method":"auth:authenticate", "params": {"method": "inherent:unix_path"} } >>> {"id": 3, "obj": "connection", "method": "auth:authenticate", "params": {"scheme": "inherent:unix_path"}}
S: {"id":3,"result":{}} <<< {"id":3,"result":{"client":"dTewFIaZKQV1N7AUhpkpBIrIT-t5Ztb8"}}
C: { "id":7, "obj": "session", "method":"echo", "params": {"msg": "Hello World"} } >>> {"id": 4, "obj": "dTewFIaZKQV1N7AUhpkpBIrIT-t5Ztb8", "method": "arti:x-echo", "params": {"msg": "Hello World"}}
S: {"id":7,"result":{"msg":"Hello World"}} <<< {"id":4,"result":{"msg":"Hello World"}}
``` ```
Note that the server will currently close your connection Note that the server will currently close your connection