aboutsummaryrefslogtreecommitdiffstats
path: root/common/nextpnr.h
diff options
context:
space:
mode:
Diffstat (limited to 'common/nextpnr.h')
-rw-r--r--common/nextpnr.h170
1 files changed, 138 insertions, 32 deletions
diff --git a/common/nextpnr.h b/common/nextpnr.h
index efcab9fc..c3fb913c 100644
--- a/common/nextpnr.h
+++ b/common/nextpnr.h
@@ -26,6 +26,7 @@
#include <unordered_map>
#include <unordered_set>
#include <vector>
+#include <boost/thread/shared_mutex.hpp>
#ifndef NEXTPNR_H
#define NEXTPNR_H
@@ -248,21 +249,37 @@ struct UIUpdatesRequired
std::unordered_set<GroupId> groupUIReload;
};
-struct BaseCtx
+class ReadContext;
+class MutateContext;
+class BaseReadCtx;
+class BaseMutateCtx;
+
+// Data that every architecture object should contain.
+class BaseCtx
{
- // --------------------------------------------------------------
+ friend class ReadContext;
+ friend class MutateContext;
+ friend class BaseReadCtx;
+ friend class BaseMutateCtx;
+private:
+ mutable boost::shared_mutex mtx_;
- mutable std::unordered_map<std::string, int> *idstring_str_to_idx;
- mutable std::vector<const std::string *> *idstring_idx_to_str;
+ bool allUiReload = false;
+ bool frameUiReload = false;
+ std::unordered_set<BelId> belUiReload;
+ std::unordered_set<WireId> wireUiReload;
+ std::unordered_set<PipId> pipUiReload;
+ std::unordered_set<GroupId> groupUiReload;
+public:
IdString id(const std::string &s) const { return IdString(this, s); }
-
IdString id(const char *s) const { return IdString(this, s); }
- // --------------------------------------------------------------
-
+ // TODO(q3k): These need to be made private.
std::unordered_map<IdString, std::unique_ptr<NetInfo>> nets;
std::unordered_map<IdString, std::unique_ptr<CellInfo>> cells;
+ mutable std::unordered_map<std::string, int> *idstring_str_to_idx;
+ mutable std::vector<const std::string *> *idstring_idx_to_str;
BaseCtx()
{
@@ -286,41 +303,83 @@ struct BaseCtx
// --------------------------------------------------------------
- bool allUiReload = false;
- bool frameUiReload = false;
- std::unordered_set<BelId> belUiReload;
- std::unordered_set<WireId> wireUiReload;
- std::unordered_set<PipId> pipUiReload;
- std::unordered_set<GroupId> groupUiReload;
+ // Get a readwrite proxy to arch - this will keep a readwrite lock on the
+ // entire architecture until the proxy object goes out of scope.
+ MutateContext rwproxy(void);
+ // Get a read-only proxy to arch - this will keep a read lock on the
+ // entire architecture until the proxy object goes out of scope. Other read
+ // locks can be taken while this one still exists. Ie., the UI can draw
+ // elements while the PnR is going a RO operation.
+ ReadContext rproxy(void) const;
+
+};
- void refreshUi() { allUiReload = true; }
+// State-accessing read-only methods that every architecture object should
+// contain.
+class BaseReadCtx
+{
+protected:
+ const BaseCtx *base_;
+public:
+ BaseReadCtx(const BaseCtx *base) : base_(base) {}
+};
+
+// State-accesssing read/write methods that every architecture object should
+// contain.
+class BaseMutateCtx
+{
+protected:
+ BaseCtx *base_;
+
+public:
+ BaseMutateCtx(BaseCtx *base) : base_(base) {}
+
+ void refreshUi(void)
+ {
+ base_->allUiReload = true;
+ }
- void refreshUiFrame() { frameUiReload = true; }
+ void refreshUiFrame(void)
+ {
+ base_->frameUiReload = true;
+ }
- void refreshUiBel(BelId bel) { belUiReload.insert(bel); }
+ void refreshUiBel(BelId bel)
+ {
+ base_->belUiReload.insert(bel);
+ }
- void refreshUiWire(WireId wire) { wireUiReload.insert(wire); }
+ void refreshUiWire(WireId wire)
+ {
+ base_->wireUiReload.insert(wire);
+ }
- void refreshUiPip(PipId pip) { pipUiReload.insert(pip); }
+ void refreshUiPip(PipId pip)
+ {
+ base_->pipUiReload.insert(pip);
+ }
- void refreshUiGroup(GroupId group) { groupUiReload.insert(group); }
+ void refreshUiGroup(GroupId group)
+ {
+ base_->groupUiReload.insert(group);
+ }
UIUpdatesRequired getUIUpdatesRequired(void)
{
UIUpdatesRequired req;
- req.allUIReload = allUiReload;
- req.frameUIReload = frameUiReload;
- req.belUIReload = belUiReload;
- req.wireUIReload = wireUiReload;
- req.pipUIReload = pipUiReload;
- req.groupUIReload = groupUiReload;
-
- allUiReload = false;
- frameUiReload = false;
- belUiReload.clear();
- wireUiReload.clear();
- pipUiReload.clear();
- groupUiReload.clear();
+ req.allUIReload = base_->allUiReload;
+ req.frameUIReload = base_->frameUiReload;
+ req.belUIReload = base_->belUiReload;
+ req.wireUIReload = base_->wireUiReload;
+ req.pipUIReload = base_->pipUiReload;
+ req.groupUIReload = base_->groupUiReload;
+
+ base_->allUiReload = false;
+ base_->frameUiReload = false;
+ base_->belUiReload.clear();
+ base_->wireUiReload.clear();
+ base_->pipUiReload.clear();
+ base_->groupUiReload.clear();
return req;
}
};
@@ -331,6 +390,53 @@ NEXTPNR_NAMESPACE_END
NEXTPNR_NAMESPACE_BEGIN
+// Read proxy to access ReadMethods while holding lock on underlying BaseCtx.
+class ReadContext : public ArchReadMethods
+{
+ friend class BaseCtx;
+private:
+ boost::shared_mutex *lock_;
+ ReadContext(const Arch *parent) : ArchReadMethods(parent), lock_(&parent->mtx_)
+ {
+ lock_->lock_shared();
+ }
+public:
+ ~ReadContext()
+ {
+ if (lock_ != nullptr) {
+ lock_->unlock_shared();
+ }
+ }
+ ReadContext(ReadContext &&other): ArchReadMethods(other), lock_(other.lock_)
+ {
+ other.lock_ = nullptr;
+ }
+};
+
+// Read proxy to access MutateMethods while holding lock on underlying BaseCtx.
+class MutateContext : public ArchReadMethods, public ArchMutateMethods
+{
+ friend class BaseCtx;
+private:
+ boost::shared_mutex *lock_;
+ MutateContext(Arch *parent) : ArchReadMethods(parent), ArchMutateMethods(parent), lock_(&parent->mtx_)
+ {
+ lock_->lock();
+ }
+public:
+ ~MutateContext()
+ {
+ if (lock_ != nullptr) {
+ lock_->unlock();
+ }
+ }
+ MutateContext(MutateContext &&other): ArchReadMethods(other), ArchMutateMethods(other), lock_(other.lock_)
+ {
+ other.lock_ = nullptr;
+ }
+};
+
+
struct Context : Arch
{
bool verbose = false;