agentic-team-templates 0.9.2 → 0.11.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,204 @@
1
+ # Rust Ownership and Borrowing
2
+
3
+ Ownership is Rust's defining feature. It's not a constraint — it's a design tool that eliminates entire classes of bugs at compile time. Every memory safety guarantee flows from these rules.
4
+
5
+ ## The Three Rules
6
+
7
+ 1. Each value has exactly one owner
8
+ 2. When the owner goes out of scope, the value is dropped
9
+ 3. You can have either one mutable reference OR any number of shared references — never both
10
+
11
+ ## Ownership Patterns
12
+
13
+ ### Move Semantics
14
+
15
+ ```rust
16
+ // Values are moved by default — this is the foundation
17
+ let s = String::from("hello");
18
+ let t = s; // s is MOVED into t
19
+ // println!("{s}"); // Compile error: s has been moved
20
+
21
+ // Functions take ownership unless they borrow
22
+ fn consume(s: String) { /* s is dropped at end of function */ }
23
+ fn borrow(s: &str) { /* s is borrowed — caller keeps ownership */ }
24
+
25
+ // Return values transfer ownership back
26
+ fn create() -> String {
27
+ String::from("created") // Ownership moves to caller
28
+ }
29
+ ```
30
+
31
+ ### Borrowing
32
+
33
+ ```rust
34
+ // Shared references: &T — read-only, multiple allowed
35
+ fn len(s: &str) -> usize {
36
+ s.len()
37
+ }
38
+
39
+ // Mutable references: &mut T — exclusive access
40
+ fn push_greeting(s: &mut String) {
41
+ s.push_str(", world!");
42
+ }
43
+
44
+ // The borrow checker enforces at compile time:
45
+ let mut s = String::from("hello");
46
+ let r1 = &s; // Fine: shared borrow
47
+ let r2 = &s; // Fine: multiple shared borrows
48
+ println!("{r1} {r2}");
49
+ // r1 and r2 are no longer used after this point (NLL)
50
+ let r3 = &mut s; // Fine: no shared borrows are active
51
+ r3.push_str("!");
52
+ ```
53
+
54
+ ### Lifetimes
55
+
56
+ ```rust
57
+ // Lifetimes tell the compiler how long references are valid
58
+ // Most of the time, elision handles this automatically
59
+
60
+ // When the compiler needs help:
61
+ fn longest<'a>(x: &'a str, y: &'a str) -> &'a str {
62
+ if x.len() > y.len() { x } else { y }
63
+ }
64
+ // Meaning: the returned reference lives at least as long as
65
+ // the shorter of x and y
66
+
67
+ // Structs that hold references need lifetime annotations
68
+ struct Excerpt<'a> {
69
+ text: &'a str,
70
+ }
71
+
72
+ // If your struct has many lifetime parameters, consider
73
+ // owning the data instead — complexity isn't always worth it
74
+ struct Article {
75
+ title: String, // Owned — simpler, no lifetime tracking
76
+ body: String,
77
+ }
78
+ ```
79
+
80
+ ### Interior Mutability
81
+
82
+ ```rust
83
+ // When you need mutation behind a shared reference
84
+
85
+ // Cell<T> — for Copy types, single-threaded
86
+ use std::cell::Cell;
87
+ let counter = Cell::new(0);
88
+ counter.set(counter.get() + 1); // Mutation through shared reference
89
+
90
+ // RefCell<T> — runtime borrow checking, single-threaded
91
+ use std::cell::RefCell;
92
+ let data = RefCell::new(vec![1, 2, 3]);
93
+ data.borrow_mut().push(4); // Panics if already borrowed
94
+
95
+ // Mutex<T> / RwLock<T> — thread-safe interior mutability
96
+ use std::sync::Mutex;
97
+ let shared = Mutex::new(Vec::new());
98
+ shared.lock().unwrap().push(42);
99
+
100
+ // Choose the right tool:
101
+ // Cell — Copy types, no overhead, single-threaded
102
+ // RefCell — any type, runtime cost, single-threaded, panics on violation
103
+ // Mutex — any type, blocking, multi-threaded
104
+ // RwLock — any type, read-heavy workloads, multi-threaded
105
+ // Atomic* — primitives, lock-free, multi-threaded
106
+ ```
107
+
108
+ ## Smart Pointers
109
+
110
+ ```rust
111
+ // Box<T> — heap allocation with single ownership
112
+ let boxed: Box<dyn Error> = Box::new(MyError::new("failed"));
113
+ // Use for: trait objects, recursive types, large values you want on heap
114
+
115
+ // Rc<T> — reference-counted, single-threaded shared ownership
116
+ use std::rc::Rc;
117
+ let shared = Rc::new(ExpensiveData::new());
118
+ let clone = Rc::clone(&shared); // Increments count, doesn't clone data
119
+ // Use for: graph structures, multiple owners in single-threaded code
120
+
121
+ // Arc<T> — atomic reference-counted, thread-safe shared ownership
122
+ use std::sync::Arc;
123
+ let shared = Arc::new(Config::load());
124
+ let handle = {
125
+ let shared = Arc::clone(&shared);
126
+ thread::spawn(move || process(&shared))
127
+ };
128
+ // Use for: shared immutable data across threads
129
+
130
+ // Cow<T> — clone-on-write
131
+ use std::borrow::Cow;
132
+ fn process(input: &str) -> Cow<'_, str> {
133
+ if input.contains("bad") {
134
+ Cow::Owned(input.replace("bad", "good"))
135
+ } else {
136
+ Cow::Borrowed(input) // No allocation when not needed
137
+ }
138
+ }
139
+ // Use for: avoiding allocation when mutation is conditional
140
+ ```
141
+
142
+ ## Common Borrow Checker Patterns
143
+
144
+ ### Splitting Borrows
145
+
146
+ ```rust
147
+ // The borrow checker tracks borrows per-field, not per-struct
148
+ struct State {
149
+ buffer: Vec<u8>,
150
+ index: usize,
151
+ }
152
+
153
+ impl State {
154
+ fn process(&mut self) {
155
+ // This works because buffer and index are separate fields
156
+ let buf = &self.buffer[self.index..];
157
+ self.index += buf.len();
158
+ }
159
+ }
160
+ ```
161
+
162
+ ### Temporary Lifetimes
163
+
164
+ ```rust
165
+ // Bad: temporary dropped while borrowed
166
+ // let r = &String::from("temp"); // Won't compile — temporary is dropped
167
+
168
+ // Good: bind to a variable to extend the lifetime
169
+ let s = String::from("temp");
170
+ let r = &s; // s lives as long as the scope
171
+ ```
172
+
173
+ ### Entry API for Maps
174
+
175
+ ```rust
176
+ use std::collections::HashMap;
177
+
178
+ let mut map = HashMap::new();
179
+
180
+ // Entry API avoids double lookup and borrow conflicts
181
+ map.entry("key")
182
+ .and_modify(|v| *v += 1)
183
+ .or_insert(0);
184
+ ```
185
+
186
+ ## Anti-Patterns
187
+
188
+ ```rust
189
+ // Never: Clone to satisfy the borrow checker without understanding why
190
+ let data = expensive_data.clone(); // Are you sure this is necessary?
191
+ // If you're cloning to fix a borrow error, first ask: can I restructure the code?
192
+
193
+ // Never: Leaking memory to avoid lifetimes
194
+ let leaked: &'static str = Box::leak(Box::new(String::from("forever")));
195
+ // This is almost never the right solution
196
+
197
+ // Never: Rc<RefCell<T>> as a default — it's a code smell
198
+ // If everything is Rc<RefCell<T>>, you've recreated garbage-collected mutable state
199
+ // Restructure to use ownership and borrowing properly first
200
+
201
+ // Never: Ignoring the borrow checker by reaching for unsafe
202
+ // If the borrow checker rejects your code, the design usually needs to change
203
+ // unsafe doesn't fix design problems — it hides them
204
+ ```
@@ -0,0 +1,256 @@
1
+ # Rust Performance and Unsafe
2
+
3
+ Rust's zero-cost abstractions mean you rarely need to choose between ergonomics and performance. When you do need to go lower, `unsafe` is the mechanism — and it comes with strict obligations.
4
+
5
+ ## Performance
6
+
7
+ ### Measure First
8
+
9
+ ```bash
10
+ # Profile before optimizing
11
+ cargo bench # Run benchmarks
12
+ cargo flamegraph # Generate flamegraph (needs cargo-flamegraph)
13
+ cargo instruments -t "Time Profiler" # macOS Instruments
14
+
15
+ # Check binary size
16
+ cargo bloat --release --crates # Which crates contribute to binary size
17
+ cargo bloat --release --filter "my_" # Your functions by size
18
+
19
+ # Check compile times
20
+ cargo build --timings # HTML report of compile times per crate
21
+ ```
22
+
23
+ ### Allocation Patterns
24
+
25
+ ```rust
26
+ // Preallocate collections when size is known
27
+ let mut results = Vec::with_capacity(items.len());
28
+ for item in items {
29
+ results.push(process(item));
30
+ }
31
+
32
+ // Use iterators — they often avoid allocation entirely
33
+ let sum: i64 = items.iter()
34
+ .filter(|x| x.is_valid())
35
+ .map(|x| x.value())
36
+ .sum();
37
+
38
+ // Avoid unnecessary clones
39
+ fn process(data: &str) -> Result<Output> { ... } // Borrow when possible
40
+ fn consume(data: String) -> Result<Output> { ... } // Take ownership when needed
41
+
42
+ // Cow for conditional ownership
43
+ use std::borrow::Cow;
44
+ fn normalize(input: &str) -> Cow<'_, str> {
45
+ if input.contains('\t') {
46
+ Cow::Owned(input.replace('\t', " "))
47
+ } else {
48
+ Cow::Borrowed(input) // Zero allocation when no tabs
49
+ }
50
+ }
51
+
52
+ // SmallVec for small, stack-allocated collections
53
+ use smallvec::SmallVec;
54
+ let mut tags: SmallVec<[Tag; 4]> = SmallVec::new();
55
+ // Up to 4 tags on the stack, spills to heap only if exceeded
56
+ ```
57
+
58
+ ### String Performance
59
+
60
+ ```rust
61
+ // String concatenation: use a builder pattern
62
+ use std::fmt::Write;
63
+ let mut output = String::with_capacity(estimated_size);
64
+ for item in items {
65
+ write!(output, "{}: {}\n", item.key, item.value).unwrap();
66
+ }
67
+
68
+ // For byte-level work, operate on &[u8] instead of &str
69
+ // Converting to str requires UTF-8 validation — skip it when you can
70
+
71
+ // Interning for repeated strings
72
+ // Use string interning crates (lasso, string_cache) for heavy deduplication
73
+ ```
74
+
75
+ ### Iterator Optimization
76
+
77
+ ```rust
78
+ // Iterators compile to the same code as manual loops — use them freely
79
+
80
+ // Collect into specific types
81
+ let map: HashMap<_, _> = pairs.into_iter().collect();
82
+ let set: HashSet<_> = items.into_iter().collect();
83
+
84
+ // Avoid collect() when you don't need a collection
85
+ // Bad: allocates a Vec just to iterate it
86
+ let filtered: Vec<_> = items.iter().filter(|x| x.active).collect();
87
+ for item in &filtered { ... }
88
+
89
+ // Good: iterate directly
90
+ for item in items.iter().filter(|x| x.active) { ... }
91
+
92
+ // chunks/windows for batch processing
93
+ for chunk in data.chunks(1024) {
94
+ process_batch(chunk)?;
95
+ }
96
+ ```
97
+
98
+ ### Data Layout
99
+
100
+ ```rust
101
+ // Field ordering affects struct size due to padding
102
+ // Bad: 24 bytes (with padding)
103
+ struct Padded {
104
+ a: u8, // 1 byte + 7 padding
105
+ b: u64, // 8 bytes
106
+ c: u8, // 1 byte + 7 padding
107
+ }
108
+
109
+ // Good: 16 bytes (reordered to minimize padding)
110
+ struct Compact {
111
+ b: u64, // 8 bytes
112
+ a: u8, // 1 byte
113
+ c: u8, // 1 byte + 6 padding
114
+ }
115
+
116
+ // repr(C) for FFI — disables Rust's field reordering
117
+ #[repr(C)]
118
+ struct FfiStruct { ... }
119
+
120
+ // The compiler may reorder fields automatically in default repr(Rust),
121
+ // but being explicit about layout helps readability and FFI correctness
122
+ ```
123
+
124
+ ## Unsafe
125
+
126
+ ### The Contract
127
+
128
+ Every `unsafe` block is a proof obligation. You are telling the compiler: "I have verified that the safety invariants hold here, and I accept responsibility."
129
+
130
+ ```rust
131
+ // ALWAYS document the safety invariant
132
+ // SAFETY: We've verified that `index` is within bounds via the
133
+ // length check on line 42, and the slice is valid for the lifetime
134
+ // of this function.
135
+ unsafe {
136
+ *ptr.add(index) = value;
137
+ }
138
+ ```
139
+
140
+ ### Valid Uses of Unsafe
141
+
142
+ ```rust
143
+ // 1. Calling unsafe functions (FFI)
144
+ extern "C" {
145
+ fn external_function(ptr: *const u8, len: usize) -> i32;
146
+ }
147
+
148
+ pub fn safe_wrapper(data: &[u8]) -> Result<i32> {
149
+ // SAFETY: data.as_ptr() is valid for data.len() bytes,
150
+ // and the external function only reads from the pointer.
151
+ let result = unsafe { external_function(data.as_ptr(), data.len()) };
152
+ if result < 0 {
153
+ Err(Error::ExternalFailure(result))
154
+ } else {
155
+ Ok(result)
156
+ }
157
+ }
158
+
159
+ // 2. Implementing unsafe traits
160
+ // SAFETY: MyType is Send because its internal raw pointer
161
+ // is only accessed from the thread that owns MyType.
162
+ // The pointer is never shared or aliased.
163
+ unsafe impl Send for MyType {}
164
+
165
+ // 3. Accessing mutable statics
166
+ static mut COUNTER: u64 = 0;
167
+ // SAFETY: This is only called from a single thread during initialization.
168
+ unsafe { COUNTER += 1; }
169
+ // Prefer AtomicU64 or OnceLock instead — this is almost always avoidable.
170
+
171
+ // 4. Unchecked operations for performance (after profiling proves it matters)
172
+ // SAFETY: We've validated that all bytes in `data` are valid UTF-8
173
+ // in the validation pass on line 30.
174
+ let s = unsafe { std::str::from_utf8_unchecked(data) };
175
+ ```
176
+
177
+ ### Minimizing Unsafe Surface
178
+
179
+ ```rust
180
+ // Wrap unsafe in safe abstractions with narrow interfaces
181
+ pub struct AlignedBuffer {
182
+ ptr: *mut u8,
183
+ len: usize,
184
+ cap: usize,
185
+ }
186
+
187
+ impl AlignedBuffer {
188
+ pub fn new(capacity: usize, alignment: usize) -> Self {
189
+ let layout = Layout::from_size_align(capacity, alignment).unwrap();
190
+ // SAFETY: layout is valid (non-zero size, power-of-two alignment)
191
+ let ptr = unsafe { alloc::alloc(layout) };
192
+ if ptr.is_null() {
193
+ alloc::handle_alloc_error(layout);
194
+ }
195
+ Self { ptr, len: 0, cap: capacity }
196
+ }
197
+
198
+ // Public API is fully safe — unsafe is encapsulated
199
+ pub fn push(&mut self, byte: u8) {
200
+ assert!(self.len < self.cap, "buffer full");
201
+ // SAFETY: We've verified len < cap, so ptr.add(len) is within allocation
202
+ unsafe { self.ptr.add(self.len).write(byte); }
203
+ self.len += 1;
204
+ }
205
+
206
+ pub fn as_slice(&self) -> &[u8] {
207
+ // SAFETY: ptr is valid for len bytes, all initialized by push()
208
+ unsafe { std::slice::from_raw_parts(self.ptr, self.len) }
209
+ }
210
+ }
211
+
212
+ impl Drop for AlignedBuffer {
213
+ fn drop(&mut self) {
214
+ let layout = Layout::from_size_align(self.cap, /* alignment */).unwrap();
215
+ // SAFETY: ptr was allocated with this layout in new()
216
+ unsafe { alloc::dealloc(self.ptr, layout); }
217
+ }
218
+ }
219
+ ```
220
+
221
+ ### Miri
222
+
223
+ ```bash
224
+ # Miri detects undefined behavior in unsafe code
225
+ cargo +nightly miri test
226
+
227
+ # Miri catches:
228
+ # - Out-of-bounds memory access
229
+ # - Use-after-free
230
+ # - Invalid use of uninitialized data
231
+ # - Violations of aliasing rules (Stacked Borrows)
232
+ # - Data races
233
+
234
+ # Run Miri in CI for any crate with unsafe code
235
+ ```
236
+
237
+ ## Anti-Patterns
238
+
239
+ ```rust
240
+ // Never: unsafe to "shut up the borrow checker"
241
+ // If the borrow checker rejects it, there's a reason. Redesign.
242
+
243
+ // Never: unsafe without a SAFETY comment
244
+ unsafe { ptr::write(dst, src) } // WHY is this safe? Document it.
245
+
246
+ // Never: transmute as a first resort
247
+ let x: u32 = unsafe { std::mem::transmute(my_float) };
248
+ // Use to_bits() / from_bits() instead — safe, clear, correct
249
+
250
+ // Never: Assuming layout without repr(C)
251
+ // Rust's default repr can reorder fields — don't assume memory layout
252
+
253
+ // Never: Dereferencing raw pointers without proving validity
254
+ // A raw pointer might be null, dangling, or misaligned.
255
+ // Prove all three are impossible before dereferencing.
256
+ ```