/// The packages which are currently being built, waiting for a call to
/// `finish`.
pending: HashSet<K>,
+
+ /// Topological depth of each key
+ depth: HashMap<K, usize>,
}
/// Indication of the freshness of a package.
reverse_dep_map: HashMap::new(),
dirty: HashSet::new(),
pending: HashSet::new(),
+ depth: HashMap::new(),
}
}
&mut slot.insert((my_dependencies, value)).1
}
+ /// All nodes have been added, calculate some internal metadata and prepare
+ /// for `dequeue`.
+ pub fn queue_finished(&mut self) {
+ for key in self.dep_map.keys() {
+ depth(key, &self.reverse_dep_map, &mut self.depth);
+ }
+
+ fn depth<K: Hash + Eq + Clone>(
+ key: &K,
+ map: &HashMap<K, HashSet<K>>,
+ results: &mut HashMap<K, usize>,
+ ) -> usize {
+ const IN_PROGRESS: usize = !0;
+
+ if let Some(&depth) = results.get(key) {
+ assert_ne!(depth, IN_PROGRESS, "cycle in DependencyQueue");
+ return depth;
+ }
+
+ results.insert(key.clone(), IN_PROGRESS);
+
+ let depth = 1 + map.get(&key)
+ .into_iter()
+ .flat_map(|it| it)
+ .map(|dep| depth(dep, map, results))
+ .max()
+ .unwrap_or(0);
+
+ *results.get_mut(key).unwrap() = depth;
+
+ depth
+ }
+ }
+
/// Dequeues a package that is ready to be built.
///
/// A package is ready to be built when it has 0 un-built dependencies. If
/// `None` is returned then no packages are ready to be built.
pub fn dequeue(&mut self) -> Option<(Freshness, K, V)> {
- let key = match self.dep_map.iter()
- .find(|&(_, &(ref deps, _))| deps.is_empty())
- .map(|(key, _)| key.clone()) {
+ // Look at all our crates and find everything that's ready to build (no
+ // deps). After we've got that candidate set select the one which has
+ // the maximum depth in the dependency graph. This way we should
+ // hopefully keep CPUs hottest the longest by ensuring that long
+ // dependency chains are scheduled early on in the build process and the
+ // leafs higher in the tree can fill in the cracks later.
+ //
+ // TODO: it'd be best here to throw in a heuristic of crate size as
+ // well. For example how long did this crate historically take to
+ // compile? How large is its source code? etc.
+ let next = self.dep_map.iter()
+ .filter(|&(_, &(ref deps, _))| deps.is_empty())
+ .map(|(key, _)| key.clone())
+ .max_by_key(|k| self.depth[k]);
+ let key = match next {
Some(key) => key,
None => return None
};
}
}
}
+
+#[cfg(test)]
+mod test {
+ use super::{DependencyQueue, Freshness};
+
+ #[test]
+ fn deep_first() {
+ let mut q = DependencyQueue::new();
+
+ q.queue(Freshness::Fresh, 1, (), &[]);
+ q.queue(Freshness::Fresh, 2, (), &[1]);
+ q.queue(Freshness::Fresh, 3, (), &[]);
+ q.queue(Freshness::Fresh, 4, (), &[2, 3]);
+ q.queue(Freshness::Fresh, 5, (), &[4, 3]);
+ q.queue_finished();
+
+ assert_eq!(q.dequeue(), Some((Freshness::Fresh, 1, ())));
+ assert_eq!(q.dequeue(), Some((Freshness::Fresh, 3, ())));
+ assert_eq!(q.dequeue(), None);
+ q.finish(&3, Freshness::Fresh);
+ assert_eq!(q.dequeue(), None);
+ q.finish(&1, Freshness::Fresh);
+ assert_eq!(q.dequeue(), Some((Freshness::Fresh, 2, ())));
+ assert_eq!(q.dequeue(), None);
+ q.finish(&2, Freshness::Fresh);
+ assert_eq!(q.dequeue(), Some((Freshness::Fresh, 4, ())));
+ assert_eq!(q.dequeue(), None);
+ q.finish(&4, Freshness::Fresh);
+ assert_eq!(q.dequeue(), Some((Freshness::Fresh, 5, ())));
+ }
+}