Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 15 additions & 10 deletions src/hst/interleave.cc
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,8 @@ Interleave::initials(std::function<void(Event)> op) const
// ∪ (Ps = {STOP}? {✔}: {}) [rule 4]

bool any_events = false;
for (const Process* p : ps_) {
for (const auto& process_and_count : ps_) {
const Process* p = process_and_count.first;
p->initials([&any_events, &op](Event initial) {
any_events = true;
if (initial == Event::tick()) {
Expand Down Expand Up @@ -119,9 +120,10 @@ Interleave::normal_afters(Event initial,
// basic structure: Ps' = Ps ∖ {P} ∪ {P'}. Each Ps' starts with Ps, so go
// ahead and add that to our Ps' set once.
Process::Bag ps_prime(ps_);
for (const Process* p : ps_) {
for (const auto& process_and_count : ps_) {
const Process* p = process_and_count.first;
// Set Ps' to Ps ∖ {P}
ps_prime.erase(ps_prime.find(p));
ps_prime.erase(p);
// Grab afters(P, a)
p->afters(initial, [this, &op, &ps_prime](const Process& p_prime) {
// ps_prime currently contains Ps. Add P' and remove P to produce
Expand All @@ -130,7 +132,7 @@ Interleave::normal_afters(Event initial,
// Create ⫴ (Ps ∖ {P} ∪ {P'}) as a result.
op(*env_->interleave(ps_prime));
// Reset Ps' back to Ps ∖ {P}.
ps_prime.erase(ps_prime.find(&p_prime));
ps_prime.erase(&p_prime);
});
// Reset Ps' back to Ps.
ps_prime.insert(p);
Expand All @@ -153,7 +155,8 @@ Interleave::tau_afters(Event initial,
// ahead and add that to our Ps' set once.
Process::Bag ps_prime(ps_);
// Find each P ∈ Ps where ✔ ∈ initials(P).
for (const Process* p : ps_) {
for (const auto& process_and_count : ps_) {
const Process* p = process_and_count.first;
bool any_tick = false;
p->initials([&any_tick](Event initial) {
if (initial == Event::tick()) {
Expand All @@ -162,11 +165,11 @@ Interleave::tau_afters(Event initial,
});
if (any_tick) {
// Create Ps ∖ {P} ∪ {STOP}) as a result.
ps_prime.erase(ps_prime.find(p));
ps_prime.erase(p);
ps_prime.insert(env_->stop());
op(*env_->interleave(ps_prime));
// Reset Ps' back to Ps.
ps_prime.erase(ps_prime.find(env_->stop()));
ps_prime.erase(env_->stop());
ps_prime.insert(p);
}
}
Expand All @@ -177,7 +180,8 @@ Interleave::tick_afters(Event initial,
std::function<void(const Process&)> op) const
{
// afters(⫴ {STOP}, ✔) = {STOP} [rule 4]
for (const Process* p : ps_) {
for (const auto& process_and_count : ps_) {
const Process* p = process_and_count.first;
bool any_events = false;
p->initials([&any_events](Event _) { any_events = true; });
if (any_events) {
Expand All @@ -204,8 +208,9 @@ Interleave::afters(Event initial, std::function<void(const Process&)> op) const
void
Interleave::subprocesses(std::function<void(const Process&)> op) const
{
for (const Process* process : ps_) {
op(*process);
for (const auto& process_and_count : ps_) {
const Process* p = process_and_count.first;
op(*p);
}
}

Expand Down
70 changes: 48 additions & 22 deletions src/hst/process.cc
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
#include "hst/process.h"

#include <algorithm>
#include <assert.h>
#include <string>
#include <utility>
#include <vector>
Expand Down Expand Up @@ -44,34 +45,57 @@ NormalizedProcess::afters(Event initial,
}
}

void
Process::Bag::insert(const Process* process)
{
++counts_[process];
}

void
Process::Bag::erase(const Process* process)
{
auto it = counts_.find(process);
assert(it != counts_.end() && it->second != 0);
if (--it->second == 0) {
counts_.erase(it);
}
}

std::size_t
Process::Bag::hash() const
{
static hash_scope scope;
hst::hasher hash(scope);
std::vector<const Process*> sorted(begin(), end());
std::sort(sorted.begin(), sorted.end());
for (const Process* process : sorted) {
for (const Process* process : sorted()) {
hash.add(*process);
}
return hash.value();
}

std::ostream& operator<<(std::ostream& out, const Process::Bag& processes)
std::vector<const Process*>
Process::Bag::sorted() const
{
// We want reproducible output, so we sort the processes in the set before
// rendering them into the stream. We the process's index to print out the
// processes in the order that they were defined.
std::vector<const Process*> sorted_processes(processes.begin(),
processes.end());
std::sort(sorted_processes.begin(), sorted_processes.end(),
std::vector<const Process*> sorted;
for (const auto& process_and_count : *this) {
const Process* process = process_and_count.first;
size_type count = process_and_count.second;
sorted.insert(sorted.end(), count, process);
}
std::sort(sorted.begin(), sorted.end(),
[](const Process* p1, const Process* p2) {
return p1->index() < p2->index();
});
return sorted;
}

std::ostream& operator<<(std::ostream& out, const Process::Bag& processes)
{
// We want reproducible output, so we sort the processes in the set before
// rendering them into the stream. We use the process's index as the sort
// key to print out the processes in the order that they were defined.
bool first = true;
out << "{";
for (const Process* process : sorted_processes) {
for (const Process* process : processes.sorted()) {
if (first) {
first = false;
} else {
Expand All @@ -87,14 +111,23 @@ Process::Set::hash() const
{
static hash_scope scope;
hst::hasher hash(scope);
std::vector<const Process*> sorted(begin(), end());
std::sort(sorted.begin(), sorted.end());
for (const Process* process : sorted) {
for (const Process* process : sorted()) {
hash.add(*process);
}
return hash.value();
}

std::vector<const Process*>
Process::Set::sorted() const
{
std::vector<const Process*> sorted(begin(), end());
std::sort(sorted.begin(), sorted.end(),
[](const Process* p1, const Process* p2) {
return p1->index() < p2->index();
});
return sorted;
}

void
Process::Set::tau_close()
{
Expand All @@ -118,16 +151,9 @@ std::ostream& operator<<(std::ostream& out, const Process::Set& processes)
// We want reproducible output, so we sort the processes in the set before
// rendering them into the stream. We the process's index to print out the
// processes in the order that they were defined.
std::vector<const Process*> sorted_processes(processes.begin(),
processes.end());
std::sort(sorted_processes.begin(), sorted_processes.end(),
[](const Process* p1, const Process* p2) {
return p1->index() < p2->index();
});

bool first = true;
out << "{";
for (const Process* process : sorted_processes) {
for (const Process* process : processes.sorted()) {
if (first) {
first = false;
} else {
Expand Down
52 changes: 43 additions & 9 deletions src/hst/process.h
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,11 @@
#include <algorithm>
#include <assert.h>
#include <functional>
#include <initializer_list>
#include <memory>
#include <ostream>
#include <string>
#include <unordered_map>
#include <unordered_set>
#include <vector>

Expand Down Expand Up @@ -123,14 +125,48 @@ class NormalizedProcess : public Process {
void bfs(std::function<void(const NormalizedProcess&)> op) const;
};

class Process::Bag : public std::unordered_multiset<const Process*> {
class Process::Bag {
private:
using Parent = std::unordered_multiset<const Process*>;
using size_type = unsigned int;
using CountMap = std::unordered_map<const Process*, size_type>;

public:
using Parent::unordered_multiset;
using const_iterator = CountMap::const_iterator;
using iterator = CountMap::iterator;

Bag() = default;
Bag(const Bag& other) = default;
explicit Bag(std::initializer_list<const Process*> elements)
{
for (const Process* process : elements) {
insert(process);
}
}

void insert(const Process* process);

// Unlike the STL multimap classes, this only removes ONE copy of process.
void erase(const Process* process);

std::size_t hash() const;

// Returns a vector containing a sorted copy of the bag. Duplicate elements
// will be added to the vector multiple times.
std::vector<const Process*> sorted() const;

bool operator==(const Bag& other) const { return counts_ == other.counts_; }
bool operator!=(const Bag& other) const { return counts_ != other.counts_; }

// Returns an iterator of (Process, count) pairs
iterator begin() { return counts_.begin(); }
iterator end() { return counts_.end(); }
const_iterator begin() const { return counts_.begin(); }
const_iterator end() const { return counts_.end(); }
const_iterator cbegin() const { return counts_.begin(); }
const_iterator cend() const { return counts_.end(); }

private:
CountMap counts_;
};

std::ostream& operator<<(std::ostream& out, const Process::Bag& processes);
Expand All @@ -144,6 +180,9 @@ class Process::Set : public std::unordered_set<const Process*> {

std::size_t hash() const;

// Returns a vector containing a sorted copy of the set.
std::vector<const Process*> sorted() const;

// Updates this set of processes to be τ-closed. (That is, we add any
// additional processes you can reach by following τ one or more times.)
void tau_close();
Expand Down Expand Up @@ -271,12 +310,7 @@ Process::print_subprocesses(std::ostream& out, const T& processes,
// We want reproducible output, so we sort the processes in the set before
// rendering them into the stream. We the process's index to print out the
// processes in the order that they were defined.
std::vector<const Process*> sorted_processes(processes.begin(),
processes.end());
std::sort(sorted_processes.begin(), sorted_processes.end(),
[](const Process* p1, const Process* p2) {
return p1->index() < p2->index();
});
std::vector<const Process*> sorted_processes = processes.sorted();

if (sorted_processes.size() == 2) {
const Process* lhs = sorted_processes[0];
Expand Down