mirror of
https://github.com/gsi-upm/soil
synced 2025-09-19 06:32:21 +00:00
Compare commits
4 Commits
1cf85ea450
...
0.14.6
Author | SHA1 | Date | |
---|---|---|---|
|
c8b8149a17 | ||
|
6690b6ee5f | ||
|
97835b3d10 | ||
|
b0add8552e |
@@ -1,2 +1,4 @@
|
|||||||
**/soil_output
|
**/soil_output
|
||||||
.*
|
.*
|
||||||
|
__pycache__
|
||||||
|
*.pyc
|
||||||
|
21
CHANGELOG.md
21
CHANGELOG.md
@@ -3,6 +3,27 @@ All notable changes to this project will be documented in this file.
|
|||||||
|
|
||||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||||
|
|
||||||
|
## [0.14.6]
|
||||||
|
### Fixed
|
||||||
|
* Bug with newer versions of networkx (0.24) where the Graph.node attribute has been removed. We have updated our calls, but the code in nxsim is not under our control, so we have pinned the networkx version until that issue is solved.
|
||||||
|
### Changed
|
||||||
|
* Explicit yaml.SafeLoader to avoid deprecation warnings when using yaml.load. It should not break any existing setups, but we could move to the FullLoader in the future if needed.
|
||||||
|
|
||||||
|
## [0.14.4]
|
||||||
|
### Fixed
|
||||||
|
* Bug in `agent.get_agents()` when `state_id` is passed as a string. The tests have been modified accordingly.
|
||||||
|
## [0.14.3]
|
||||||
|
### Fixed
|
||||||
|
* Incompatibility with py3.3-3.6 due to ModuleNotFoundError and TypeError in DryRunner
|
||||||
|
## [0.14.2]
|
||||||
|
### Fixed
|
||||||
|
* Output path for exporters is now soil_output
|
||||||
|
### Changed
|
||||||
|
* CSV output to stdout in dry_run mode
|
||||||
|
## [0.14.1]
|
||||||
|
### Changed
|
||||||
|
* Exporter names in lower case
|
||||||
|
* Add default exporter in runs
|
||||||
## [0.14.0]
|
## [0.14.0]
|
||||||
### Added
|
### Added
|
||||||
* Loading configuration from template definitions in the yaml, in preparation for SALib support.
|
* Loading configuration from template definitions in the yaml, in preparation for SALib support.
|
||||||
|
@@ -323,7 +323,7 @@ Let's run our simulation:
|
|||||||
|
|
||||||
.. code:: ipython3
|
.. code:: ipython3
|
||||||
|
|
||||||
soil.simulation.run_from_config(config, dump=False)
|
soil.simulation.run_from_config(config)
|
||||||
|
|
||||||
|
|
||||||
.. parsed-literal::
|
.. parsed-literal::
|
||||||
|
@@ -59,7 +59,7 @@ class Patron(FSM):
|
|||||||
2) Look for a bar where the agent and other agents in the same group can get in.
|
2) Look for a bar where the agent and other agents in the same group can get in.
|
||||||
3) While in the bar, patrons only drink, until they get drunk and taken home.
|
3) While in the bar, patrons only drink, until they get drunk and taken home.
|
||||||
'''
|
'''
|
||||||
level = logging.INFO
|
level = logging.DEBUG
|
||||||
|
|
||||||
defaults = {
|
defaults = {
|
||||||
'pub': None,
|
'pub': None,
|
||||||
@@ -113,7 +113,8 @@ class Patron(FSM):
|
|||||||
@state
|
@state
|
||||||
def at_home(self):
|
def at_home(self):
|
||||||
'''The end'''
|
'''The end'''
|
||||||
self.debug('Life sucks. I\'m home!')
|
others = self.get_agents(state_id=Patron.at_home.id, limit_neighbors=True)
|
||||||
|
self.debug('I\'m home. Just like {} of my friends'.format(len(others)))
|
||||||
|
|
||||||
def drink(self):
|
def drink(self):
|
||||||
self['pints'] += 1
|
self['pints'] += 1
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
nxsim>=0.1.2
|
nxsim>=0.1.2
|
||||||
simpy
|
simpy
|
||||||
networkx>=2.0
|
networkx>=2.0,<2.4
|
||||||
numpy
|
numpy
|
||||||
matplotlib
|
matplotlib
|
||||||
pyyaml>=5.1
|
pyyaml>=5.1
|
||||||
|
@@ -1 +1 @@
|
|||||||
0.13.8
|
0.14.6
|
@@ -57,11 +57,11 @@ def main():
|
|||||||
logging.info('Loading config file: {}'.format(args.file))
|
logging.info('Loading config file: {}'.format(args.file))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
exporters = list(args.exporter or [])
|
exporters = list(args.exporter or ['default', ])
|
||||||
if args.csv:
|
if args.csv:
|
||||||
exporters.append('CSV')
|
exporters.append('csv')
|
||||||
if args.graph:
|
if args.graph:
|
||||||
exporters.append('Gexf')
|
exporters.append('gexf')
|
||||||
exp_params = {}
|
exp_params = {}
|
||||||
if args.dry_run:
|
if args.dry_run:
|
||||||
exp_params['copy_to'] = sys.stdout
|
exp_params['copy_to'] = sys.stdout
|
||||||
|
@@ -476,10 +476,7 @@ class Geo(NetworkAgent):
|
|||||||
|
|
||||||
def select(agents, state_id=None, agent_type=None, ignore=None, iterator=False, **kwargs):
|
def select(agents, state_id=None, agent_type=None, ignore=None, iterator=False, **kwargs):
|
||||||
|
|
||||||
if state_id is not None:
|
if state_id is not None and not isinstance(state_id, (tuple, list)):
|
||||||
try:
|
|
||||||
state_id = tuple(state_id)
|
|
||||||
except TypeError:
|
|
||||||
state_id = tuple([state_id])
|
state_id = tuple([state_id])
|
||||||
if agent_type is not None:
|
if agent_type is not None:
|
||||||
try:
|
try:
|
||||||
|
@@ -20,7 +20,7 @@ def _read_data(pattern, *args, from_csv=False, process_args=None, **kwargs):
|
|||||||
process_args = {}
|
process_args = {}
|
||||||
for folder in glob.glob(pattern):
|
for folder in glob.glob(pattern):
|
||||||
config_file = glob.glob(join(folder, '*.yml'))[0]
|
config_file = glob.glob(join(folder, '*.yml'))[0]
|
||||||
config = yaml.load(open(config_file))
|
config = yaml.load(open(config_file), Loader=yaml.SafeLoader)
|
||||||
df = None
|
df = None
|
||||||
if from_csv:
|
if from_csv:
|
||||||
for trial_data in sorted(glob.glob(join(folder,
|
for trial_data in sorted(glob.glob(join(folder,
|
||||||
@@ -133,7 +133,7 @@ def get_count(df, *keys):
|
|||||||
def get_value(df, *keys, aggfunc='sum'):
|
def get_value(df, *keys, aggfunc='sum'):
|
||||||
if keys:
|
if keys:
|
||||||
df = df[list(keys)]
|
df = df[list(keys)]
|
||||||
return df.groupby(axis=1, level=0).agg(aggfunc, axis=1)
|
return df.groupby(axis=1, level=0).agg(aggfunc)
|
||||||
|
|
||||||
|
|
||||||
def plot_all(*args, **kwargs):
|
def plot_all(*args, **kwargs):
|
||||||
|
@@ -87,7 +87,7 @@ class Environment(nxsim.NetworkEnvironment):
|
|||||||
@property
|
@property
|
||||||
def network_agents(self):
|
def network_agents(self):
|
||||||
for i in self.G.nodes():
|
for i in self.G.nodes():
|
||||||
node = self.G.node[i]
|
node = self.G.nodes[i]
|
||||||
if 'agent' in node:
|
if 'agent' in node:
|
||||||
yield node['agent']
|
yield node['agent']
|
||||||
|
|
||||||
@@ -212,12 +212,12 @@ class Environment(nxsim.NetworkEnvironment):
|
|||||||
return self[key] if key in self else default
|
return self[key] if key in self else default
|
||||||
|
|
||||||
def get_agent(self, agent_id):
|
def get_agent(self, agent_id):
|
||||||
return self.G.node[agent_id]['agent']
|
return self.G.nodes[agent_id]['agent']
|
||||||
|
|
||||||
def get_agents(self, nodes=None):
|
def get_agents(self, nodes=None):
|
||||||
if nodes is None:
|
if nodes is None:
|
||||||
return list(self.agents)
|
return list(self.agents)
|
||||||
return [self.G.node[i]['agent'] for i in nodes]
|
return [self.G.nodes[i]['agent'] for i in nodes]
|
||||||
|
|
||||||
def dump_csv(self, f):
|
def dump_csv(self, f):
|
||||||
with utils.open_or_reuse(f, 'w') as f:
|
with utils.open_or_reuse(f, 'w') as f:
|
||||||
@@ -231,9 +231,9 @@ class Environment(nxsim.NetworkEnvironment):
|
|||||||
# Workaround for geometric models
|
# Workaround for geometric models
|
||||||
# See soil/soil#4
|
# See soil/soil#4
|
||||||
for node in G.nodes():
|
for node in G.nodes():
|
||||||
if 'pos' in G.node[node]:
|
if 'pos' in G.nodes[node]:
|
||||||
G.node[node]['viz'] = {"position": {"x": G.node[node]['pos'][0], "y": G.node[node]['pos'][1], "z": 0.0}}
|
G.nodes[node]['viz'] = {"position": {"x": G.nodes[node]['pos'][0], "y": G.nodes[node]['pos'][1], "z": 0.0}}
|
||||||
del (G.node[node]['pos'])
|
del (G.nodes[node]['pos'])
|
||||||
|
|
||||||
nx.write_gexf(G, f, version="1.2draft")
|
nx.write_gexf(G, f, version="1.2draft")
|
||||||
|
|
||||||
|
@@ -50,7 +50,7 @@ class Exporter:
|
|||||||
|
|
||||||
def __init__(self, simulation, outdir=None, dry_run=None, copy_to=None):
|
def __init__(self, simulation, outdir=None, dry_run=None, copy_to=None):
|
||||||
self.sim = simulation
|
self.sim = simulation
|
||||||
outdir = outdir or os.getcwd()
|
outdir = outdir or os.path.join(os.getcwd(), 'soil_output')
|
||||||
self.outdir = os.path.join(outdir,
|
self.outdir = os.path.join(outdir,
|
||||||
simulation.group or '',
|
simulation.group or '',
|
||||||
simulation.name)
|
simulation.name)
|
||||||
@@ -78,8 +78,8 @@ class Exporter:
|
|||||||
return open_or_reuse(f, mode=mode, **kwargs)
|
return open_or_reuse(f, mode=mode, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
class Default(Exporter):
|
class default(Exporter):
|
||||||
'''Default exporter. Writes CSV and sqlite results, as well as the simulation YAML'''
|
'''Default exporter. Writes sqlite results, as well as the simulation YAML'''
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
if not self.dry_run:
|
if not self.dry_run:
|
||||||
@@ -96,25 +96,29 @@ class Default(Exporter):
|
|||||||
env.dump_sqlite(f)
|
env.dump_sqlite(f)
|
||||||
|
|
||||||
|
|
||||||
class CSV(Exporter):
|
class csv(Exporter):
|
||||||
|
'''Export the state of each environment (and its agents) in a separate CSV file'''
|
||||||
def trial_end(self, env):
|
def trial_end(self, env):
|
||||||
if not self.dry_run:
|
with timer('[CSV] Dumping simulation {} trial {} @ dir {}'.format(self.sim.name,
|
||||||
with timer('[CSV] Dumping simulation {} trial {}'.format(self.sim.name,
|
env.name,
|
||||||
env.name)):
|
self.outdir)):
|
||||||
with self.output('{}.csv'.format(env.name)) as f:
|
with self.output('{}.csv'.format(env.name)) as f:
|
||||||
env.dump_csv(f)
|
env.dump_csv(f)
|
||||||
|
|
||||||
|
|
||||||
class Gexf(Exporter):
|
class gexf(Exporter):
|
||||||
def trial_end(self, env):
|
def trial_end(self, env):
|
||||||
if not self.dry_run:
|
if self.dry_run:
|
||||||
with timer('[CSV] Dumping simulation {} trial {}'.format(self.sim.name,
|
logger.info('Not dumping GEXF in dry_run mode')
|
||||||
|
return
|
||||||
|
|
||||||
|
with timer('[GEXF] Dumping simulation {} trial {}'.format(self.sim.name,
|
||||||
env.name)):
|
env.name)):
|
||||||
with self.output('{}.gexf'.format(env.name), mode='wb') as f:
|
with self.output('{}.gexf'.format(env.name), mode='wb') as f:
|
||||||
env.dump_gexf(f)
|
env.dump_gexf(f)
|
||||||
|
|
||||||
|
|
||||||
class Dummy(Exporter):
|
class dummy(Exporter):
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
with self.output('dummy', 'w') as f:
|
with self.output('dummy', 'w') as f:
|
||||||
@@ -131,7 +135,7 @@ class Dummy(Exporter):
|
|||||||
f.write('simulation ended @ {}\n'.format(time.time()))
|
f.write('simulation ended @ {}\n'.format(time.time()))
|
||||||
|
|
||||||
|
|
||||||
class Distribution(Exporter):
|
class distribution(Exporter):
|
||||||
'''
|
'''
|
||||||
Write the distribution of agent states at the end of each trial,
|
Write the distribution of agent states at the end of each trial,
|
||||||
the mean value, and its deviation.
|
the mean value, and its deviation.
|
||||||
@@ -165,7 +169,7 @@ class Distribution(Exporter):
|
|||||||
with self.output('metrics.csv') as f:
|
with self.output('metrics.csv') as f:
|
||||||
dfm.to_csv(f)
|
dfm.to_csv(f)
|
||||||
|
|
||||||
class GraphDrawing(Exporter):
|
class graphdrawing(Exporter):
|
||||||
|
|
||||||
def trial_end(self, env):
|
def trial_end(self, env):
|
||||||
# Outside effects
|
# Outside effects
|
||||||
|
@@ -11,6 +11,7 @@ logger = logging.getLogger(__name__)
|
|||||||
from collections import UserDict, namedtuple
|
from collections import UserDict, namedtuple
|
||||||
|
|
||||||
from . import serialization
|
from . import serialization
|
||||||
|
from .utils import open_or_reuse
|
||||||
|
|
||||||
|
|
||||||
class History:
|
class History:
|
||||||
@@ -236,7 +237,7 @@ class History:
|
|||||||
|
|
||||||
def dump(self, f):
|
def dump(self, f):
|
||||||
self._close()
|
self._close()
|
||||||
for line in open(self.db_path, 'rb'):
|
for line in open_or_reuse(self.db_path, 'rb'):
|
||||||
f.write(line)
|
f.write(line)
|
||||||
|
|
||||||
|
|
||||||
|
@@ -186,7 +186,7 @@ def deserializer(type_, known_modules=[]):
|
|||||||
module = importlib.import_module(modname)
|
module = importlib.import_module(modname)
|
||||||
cls = getattr(module, tname)
|
cls = getattr(module, tname)
|
||||||
return getattr(cls, 'deserialize', cls)
|
return getattr(cls, 'deserialize', cls)
|
||||||
except (ModuleNotFoundError, AttributeError) as ex:
|
except (ImportError, AttributeError) as ex:
|
||||||
errors.append((modname, tname, ex))
|
errors.append((modname, tname, ex))
|
||||||
raise Exception('Could not find type {}. Tried: {}'.format(type_, errors))
|
raise Exception('Could not find type {}. Tried: {}'.format(type_, errors))
|
||||||
|
|
||||||
|
@@ -153,11 +153,11 @@ class Simulation(NetworkSimulation):
|
|||||||
**kwargs)
|
**kwargs)
|
||||||
|
|
||||||
def _run_simulation_gen(self, *args, parallel=False, dry_run=False,
|
def _run_simulation_gen(self, *args, parallel=False, dry_run=False,
|
||||||
exporters=None, outdir=None, exporter_params={}, **kwargs):
|
exporters=['default', ], outdir=None, exporter_params={}, **kwargs):
|
||||||
logger.info('Using exporters: %s', exporters or [])
|
logger.info('Using exporters: %s', exporters or [])
|
||||||
logger.info('Output directory: %s', outdir)
|
logger.info('Output directory: %s', outdir)
|
||||||
exporters = exporters_for_sim(self,
|
exporters = exporters_for_sim(self,
|
||||||
exporters or [],
|
exporters,
|
||||||
dry_run=dry_run,
|
dry_run=dry_run,
|
||||||
outdir=outdir,
|
outdir=outdir,
|
||||||
**exporter_params)
|
**exporter_params)
|
||||||
|
@@ -2,6 +2,8 @@ import logging
|
|||||||
import time
|
import time
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
from shutil import copyfile
|
||||||
|
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
|
|
||||||
logger = logging.getLogger('soil')
|
logger = logging.getLogger('soil')
|
||||||
@@ -23,15 +25,26 @@ def timer(name='task', pre="", function=logger.info, to_object=None):
|
|||||||
to_object.end = end
|
to_object.end = end
|
||||||
|
|
||||||
|
|
||||||
def safe_open(path, *args, **kwargs):
|
def safe_open(path, mode='r', backup=True, **kwargs):
|
||||||
outdir = os.path.dirname(path)
|
outdir = os.path.dirname(path)
|
||||||
if outdir and not os.path.exists(outdir):
|
if outdir and not os.path.exists(outdir):
|
||||||
os.makedirs(outdir)
|
os.makedirs(outdir)
|
||||||
return open(path, *args, **kwargs)
|
if backup and 'w' in mode and os.path.exists(path):
|
||||||
|
creation = os.path.getctime(path)
|
||||||
|
stamp = time.strftime('%Y-%m-%d_%H:%M', time.localtime(creation))
|
||||||
|
|
||||||
|
backup_dir = os.path.join(outdir, stamp)
|
||||||
|
if not os.path.exists(backup_dir):
|
||||||
|
os.makedirs(backup_dir)
|
||||||
|
newpath = os.path.join(backup_dir, os.path.basename(path))
|
||||||
|
if os.path.exists(newpath):
|
||||||
|
newpath = '{}@{}'.format(newpath, time.time())
|
||||||
|
copyfile(path, newpath)
|
||||||
|
return open(path, mode=mode, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def open_or_reuse(f, *args, **kwargs):
|
def open_or_reuse(f, *args, **kwargs):
|
||||||
try:
|
try:
|
||||||
return safe_open(f, *args, **kwargs)
|
return safe_open(f, *args, **kwargs)
|
||||||
except TypeError:
|
except (AttributeError, TypeError):
|
||||||
return f
|
return f
|
||||||
|
@@ -118,9 +118,9 @@ class SocketHandler(tornado.websocket.WebSocketHandler):
|
|||||||
elif msg['type'] == 'download_gexf':
|
elif msg['type'] == 'download_gexf':
|
||||||
G = self.trials[ int(msg['data']) ].history_to_graph()
|
G = self.trials[ int(msg['data']) ].history_to_graph()
|
||||||
for node in G.nodes():
|
for node in G.nodes():
|
||||||
if 'pos' in G.node[node]:
|
if 'pos' in G.nodes[node]:
|
||||||
G.node[node]['viz'] = {"position": {"x": G.node[node]['pos'][0], "y": G.node[node]['pos'][1], "z": 0.0}}
|
G.nodes[node]['viz'] = {"position": {"x": G.nodes[node]['pos'][0], "y": G.nodes[node]['pos'][1], "z": 0.0}}
|
||||||
del (G.node[node]['pos'])
|
del (G.nodes[node]['pos'])
|
||||||
writer = nx.readwrite.gexf.GEXFWriter(version='1.2draft')
|
writer = nx.readwrite.gexf.GEXFWriter(version='1.2draft')
|
||||||
writer.add_graph(G)
|
writer.add_graph(G)
|
||||||
self.write_message({'type': 'download_gexf',
|
self.write_message({'type': 'download_gexf',
|
||||||
@@ -130,9 +130,9 @@ class SocketHandler(tornado.websocket.WebSocketHandler):
|
|||||||
elif msg['type'] == 'download_json':
|
elif msg['type'] == 'download_json':
|
||||||
G = self.trials[ int(msg['data']) ].history_to_graph()
|
G = self.trials[ int(msg['data']) ].history_to_graph()
|
||||||
for node in G.nodes():
|
for node in G.nodes():
|
||||||
if 'pos' in G.node[node]:
|
if 'pos' in G.nodes[node]:
|
||||||
G.node[node]['viz'] = {"position": {"x": G.node[node]['pos'][0], "y": G.node[node]['pos'][1], "z": 0.0}}
|
G.nodes[node]['viz'] = {"position": {"x": G.nodes[node]['pos'][0], "y": G.nodes[node]['pos'][1], "z": 0.0}}
|
||||||
del (G.node[node]['pos'])
|
del (G.nodes[node]['pos'])
|
||||||
self.write_message({'type': 'download_json',
|
self.write_message({'type': 'download_json',
|
||||||
'filename': self.config['name'] + '_trial_' + str(msg['data']),
|
'filename': self.config['name'] + '_trial_' + str(msg['data']),
|
||||||
'data': nx.node_link_data(G) })
|
'data': nx.node_link_data(G) })
|
||||||
|
@@ -60,7 +60,7 @@ class Exporters(TestCase):
|
|||||||
}
|
}
|
||||||
output = io.StringIO()
|
output = io.StringIO()
|
||||||
s = simulation.from_config(config)
|
s = simulation.from_config(config)
|
||||||
s.run_simulation(exporters=[exporters.Distribution], dry_run=True, exporter_params={'copy_to': output})
|
s.run_simulation(exporters=[exporters.distribution], dry_run=True, exporter_params={'copy_to': output})
|
||||||
result = output.getvalue()
|
result = output.getvalue()
|
||||||
assert 'count' in result
|
assert 'count' in result
|
||||||
assert 'SEED,Noneexporter_sim_trial_3,1,,1,1,1,1' in result
|
assert 'SEED,Noneexporter_sim_trial_3,1,,1,1,1,1' in result
|
||||||
@@ -83,10 +83,10 @@ class Exporters(TestCase):
|
|||||||
s = simulation.from_config(config)
|
s = simulation.from_config(config)
|
||||||
tmpdir = tempfile.mkdtemp()
|
tmpdir = tempfile.mkdtemp()
|
||||||
envs = s.run_simulation(exporters=[
|
envs = s.run_simulation(exporters=[
|
||||||
exporters.Default,
|
exporters.default,
|
||||||
exporters.CSV,
|
exporters.csv,
|
||||||
exporters.Gexf,
|
exporters.gexf,
|
||||||
exporters.Distribution,
|
exporters.distribution,
|
||||||
],
|
],
|
||||||
outdir=tmpdir,
|
outdir=tmpdir,
|
||||||
exporter_params={'copy_to': output})
|
exporter_params={'copy_to': output})
|
||||||
|
@@ -16,10 +16,15 @@ ROOT = os.path.abspath(os.path.dirname(__file__))
|
|||||||
EXAMPLES = join(ROOT, '..', 'examples')
|
EXAMPLES = join(ROOT, '..', 'examples')
|
||||||
|
|
||||||
|
|
||||||
class CustomAgent(agents.BaseAgent):
|
class CustomAgent(agents.FSM):
|
||||||
def step(self):
|
@agents.default_state
|
||||||
self.state['neighbors'] = self.count_agents(state_id=0,
|
@agents.state
|
||||||
|
def normal(self):
|
||||||
|
self.state['neighbors'] = self.count_agents(state_id='normal',
|
||||||
limit_neighbors=True)
|
limit_neighbors=True)
|
||||||
|
@agents.state
|
||||||
|
def unreachable(self):
|
||||||
|
return
|
||||||
|
|
||||||
class TestMain(TestCase):
|
class TestMain(TestCase):
|
||||||
|
|
||||||
@@ -134,8 +139,7 @@ class TestMain(TestCase):
|
|||||||
},
|
},
|
||||||
'network_agents': [{
|
'network_agents': [{
|
||||||
'agent_type': CustomAgent,
|
'agent_type': CustomAgent,
|
||||||
'weight': 1,
|
'weight': 1
|
||||||
'state': {'id': 0}
|
|
||||||
|
|
||||||
}],
|
}],
|
||||||
'max_time': 10,
|
'max_time': 10,
|
||||||
@@ -145,6 +149,9 @@ class TestMain(TestCase):
|
|||||||
s = simulation.from_config(config)
|
s = simulation.from_config(config)
|
||||||
env = s.run_simulation(dry_run=True)[0]
|
env = s.run_simulation(dry_run=True)[0]
|
||||||
assert env.get_agent(0).state['neighbors'] == 1
|
assert env.get_agent(0).state['neighbors'] == 1
|
||||||
|
assert env.get_agent(0).state['neighbors'] == 1
|
||||||
|
assert env.get_agent(1).count_agents(state_id='normal') == 2
|
||||||
|
assert env.get_agent(1).count_agents(state_id='normal', limit_neighbors=True) == 1
|
||||||
|
|
||||||
def test_torvalds_example(self):
|
def test_torvalds_example(self):
|
||||||
"""A complete example from a documentation should work."""
|
"""A complete example from a documentation should work."""
|
||||||
@@ -179,7 +186,7 @@ class TestMain(TestCase):
|
|||||||
with utils.timer('serializing'):
|
with utils.timer('serializing'):
|
||||||
serial = s.to_yaml()
|
serial = s.to_yaml()
|
||||||
with utils.timer('recovering'):
|
with utils.timer('recovering'):
|
||||||
recovered = yaml.load(serial)
|
recovered = yaml.load(serial, Loader=yaml.SafeLoader)
|
||||||
with utils.timer('deleting'):
|
with utils.timer('deleting'):
|
||||||
del recovered['topology']
|
del recovered['topology']
|
||||||
assert config == recovered
|
assert config == recovered
|
||||||
@@ -233,7 +240,7 @@ class TestMain(TestCase):
|
|||||||
env[0, 0, 'testvalue'] = 'start'
|
env[0, 0, 'testvalue'] = 'start'
|
||||||
env[0, 10, 'testvalue'] = 'finish'
|
env[0, 10, 'testvalue'] = 'finish'
|
||||||
nG = env.history_to_graph()
|
nG = env.history_to_graph()
|
||||||
values = nG.node[0]['attr_testvalue']
|
values = nG.nodes[0]['attr_testvalue']
|
||||||
assert ('start', 0, 10) in values
|
assert ('start', 0, 10) in values
|
||||||
assert ('finish', 10, None) in values
|
assert ('finish', 10, None) in values
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user