MultiAgentDecisionProcess
Release 0.2.1
Main Page
Namespaces
Classes
Files
File List
File Members
DecPOMDPDiscrete.cpp
Go to the documentation of this file.
1
28
#include "
DecPOMDPDiscrete.h
"
29
#include "
RGet.h
"
30
#include "
RewardModelMapping.h
"
31
#include "
RewardModelMappingSparse.h
"
32
33
using namespace
std;
34
35
#define DEBUG_DPOMDPD 0
36
//Debug Fill DecPOMDP Discrete - functions to initialize the DecPOMDPDiscrete
37
//(typically from a file)
38
#define DEBUG_FILLDPD 0
39
40
DecPOMDPDiscrete::DecPOMDPDiscrete
(
string
name,
string
descr,
string
pf) :
41
MultiAgentDecisionProcessDiscrete
(name, descr, pf)
42
{
43
_m_initialized
=
false
;
44
_m_p_rModel
= 0;
45
}
46
47
DecPOMDPDiscrete::~DecPOMDPDiscrete
()
48
{
49
if
(
DEBUG_DPOMDPD
)
50
cout <<
"deleting DecPOMDPDiscrete (deleting rewards)"
<<endl;
51
delete
(
_m_p_rModel
);
52
}
53
54
bool
DecPOMDPDiscrete::SetInitialized
(
bool
b)
55
{
56
if
(
MultiAgentDecisionProcessDiscrete::SetInitialized
(
true
) )
57
{
58
_m_initialized
= b;
59
return
(
true
);
60
}
61
else
62
return
(
false
);
63
}
64
65
void
DecPOMDPDiscrete::CreateNewRewardModel
()
66
{
67
if
(
_m_initialized
)
68
delete
(
_m_p_rModel
);
69
70
if
(
GetSparse
())
71
_m_p_rModel
=
new
RewardModelMappingSparse
(
GetNrStates
(),
GetNrJointActions
());
72
else
73
_m_p_rModel
=
new
RewardModelMapping
(
GetNrStates
(),
GetNrJointActions
());
74
}
75
76
RGet
*
DecPOMDPDiscrete::GetRGet
()
const
77
{
78
return
new
RGet_RewardModelMapping
(
79
((
RewardModelMapping
*)
_m_p_rModel
) );
80
}
81
82
string
DecPOMDPDiscrete::SoftPrint
()
const
83
{
84
stringstream ss;
85
ss <<
MultiAgentDecisionProcessDiscrete::SoftPrint
();
86
ss <<
DecPOMDP::SoftPrint
();
87
88
if
(
_m_initialized
)
89
{
90
ss <<
"Reward model: "
<< endl;
91
ss <<
_m_p_rModel
->
SoftPrint
();
92
}
93
else
94
throw
E
(
"DecPOMDPDiscrete components (reward model) not initialized"
);
95
96
return
(ss.str());
97
}
98
99
void
DecPOMDPDiscrete::SetReward
(
Index
sI,
Index
jaI,
Index
sucSI,
double
r)
100
{
101
double
rOld=
GetReward
(sI,jaI),
102
rExp=
GetTransitionProbability
(sI,jaI,sucSI)*r;
103
SetReward
(sI,jaI,rOld+rExp);
104
}
105
106
void
DecPOMDPDiscrete::SetReward
(
Index
sI,
Index
jaI,
Index
sucSI,
107
Index
joI,
double
r)
108
{
109
throw
(
E
(
"DecPOMDPDiscrete::SetReward(sI,jaI,sucSI,joI,r) not implemented"
));
110
}
111
112
void
113
DecPOMDPDiscrete::ExtractMADPDiscrete
(
MultiAgentDecisionProcessDiscrete
*madp)
114
{
115
madp->
SetNrAgents
(
GetNrAgents
());
116
madp->
SetName
(
GetName
());
117
madp->
SetDescription
(
GetDescription
());
118
119
// transition model
120
madp->
SetTransitionModelPtr
(
121
const_cast<TransitionModelDiscrete*>(
GetTransitionModelDiscretePtr
()));
122
123
// observation model
124
madp->
SetObservationModelPtr
(
125
const_cast<ObservationModelDiscrete*>(
GetObservationModelDiscretePtr
()));
126
127
// MADPComponentDiscreteStates
128
for
(
Index
s=0;s!=
GetNrStates
();++s)
129
madp->
AddState
(
GetState
(s)->
GetName
());
130
131
madp->
SetISD
(
GetISD
());
132
133
// MADPComponentDiscreteObservations
134
for
(
Index
id
=0;
id
!=
GetNrAgents
();++id)
135
for
(
Index
o=0;o!=
GetNrObservations
(
id
);++o)
136
madp->
AddObservation
(
id
,
GetObservation
(
id
,o)->
GetName
());
137
madp->
ConstructJointObservations
();
138
139
// MADPComponentDiscreteActions
140
for
(
Index
id
=0;
id
!=
GetNrAgents
();++id)
141
for
(
Index
o=0;o!=
GetNrActions
(
id
);++o)
142
madp->
AddAction
(
id
,
GetAction
(
id
,o)->
GetName
());
143
madp->
ConstructJointActions
();
144
145
madp->
Initialize
();
146
}
src
base
DecPOMDPDiscrete.cpp
Generated on Mon Sep 23 2013 14:50:05 for MultiAgentDecisionProcess by
1.8.1.2