diff --git a/.coveragerc b/.coveragerc index a9a72c98..50f806cf 100644 --- a/.coveragerc +++ b/.coveragerc @@ -3,3 +3,6 @@ omit = */tests/* micropsi_server/bottle.py */conftest.py + micropsi_server/minidoc.py + */_test_* + */_demo_* diff --git a/.gitignore b/.gitignore index da83dc01..88c9fd6c 100644 --- a/.gitignore +++ b/.gitignore @@ -26,3 +26,6 @@ micropsi.log /test-data/ pip-selfcheck.json +/cherrypy +/.pycharmvenv/ +/.cache/ diff --git a/.idea/dictionaries/joscha.xml b/.idea/dictionaries/joscha.xml deleted file mode 100644 index a40a29ef..00000000 --- a/.idea/dictionaries/joscha.xml +++ /dev/null @@ -1,3 +0,0 @@ - - - \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 61248979..150e6a66 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,36 @@ +0.12-alpha10 (2017-09-04) +========== + * Improved user prompts + * Inf/NaN guard for flowmodules + * Realtime world support + * Reliable code reloading + + +0.11-alpha9 (2017-06-26) +========== + * Add adhoc-monitors for plotting simple scalar values + * Node states now support numpy data structrues + * Add autosave-functionality for nodenets + * Change the stepping order from `nodenet, world, sleep` to `nodenet, sleep, world` + + +0.10-alpha8 (2017-06-06) +========== + * use matplotlib's webagg for plotting + + +0.9-alpha7 (2017-02-13) +========== + * New structure for native module / recipe discovery + * Live reloading of World and worldadapter code + * Outsourced world and worldadapter definitions + * Flowmodules for theano_engine + * Recorders for theano_engine + * High-dimensional native modules for theano_engine + * Configurable Worldadapters + + 0.8-alpha6 (2016-04-22) ========== @@ -41,20 +73,20 @@ 0.4-alpha2 (2015-06-05) ========== -* Introduced Comment nodes -* Introduced global modulators and Doernerian emotional model -* Introduced por/ret link decay -* Introduced recipes: python scripts that can be run from the nodenet editor and have netapi access -* Copy & paste functionality in nodenet editor -* Snap to grid in nodenet editor -* Nodenet editor setting for rendering links: always, for currently selected nodes, or never -* Nodenet editor can link multiple selected nodes at once -* Improved nodenet editor user experience when zoomed out -* Additional monitor types, including link weight monitors -* Display origin-gate & target-slot in link-sidebar - -* Introduced theano_engine, an engine for running nodenets on top of numpy/theano (requires additional configuration) -* Introduced Minecraft connectivity (requires additional configuration) + * Introduced Comment nodes + * Introduced global modulators and Doernerian emotional model + * Introduced por/ret link decay + * Introduced recipes: python scripts that can be run from the nodenet editor and have netapi access + * Copy & paste functionality in nodenet editor + * Snap to grid in nodenet editor + * Nodenet editor setting for rendering links: always, for currently selected nodes, or never + * Nodenet editor can link multiple selected nodes at once + * Improved nodenet editor user experience when zoomed out + * Additional monitor types, including link weight monitors + * Display origin-gate & target-slot in link-sidebar + + * Introduced theano_engine, an engine for running nodenets on top of numpy/theano (requires additional configuration) + * Introduced Minecraft connectivity (requires additional configuration) 0.3-alpha1 (2014-06-30) diff --git a/Makefile b/Makefile index ec457e7e..e7d87edb 100644 --- a/Makefile +++ b/Makefile @@ -29,5 +29,7 @@ test-toolkit: test-agents: bin/py.test --agents +vrep: + bin/pip install -e git+git@github.com:micropsi-industries/vrep-interface.git#egg=vrep-interface-dev .PHONY: run diff --git a/README.md b/README.md index f43b21c4..768fb833 100644 --- a/README.md +++ b/README.md @@ -1,34 +1,43 @@ About ----- -An artificial intelligence toolkit implementing concepts from the cognitive architecture MicroPsi. +A Python implementation of the cognitive architecture MicroPsi. -For more information on the cognitive architecture visit [micropsi.com](http://www.micropsi.com); e.g., the [publications ](http://www.micropsi.com/publications/publications.html) page. +For more information visit [cognitive-ai.com](http://cognitive-ai.com), for instance the [publications ](http://cognitive-ai.com/publications/publications.html) page. For a one-paper introduction see [The AEP Toolkit for Agent Design and Simulation](http://cognitive-ai.com/publications/assets/BachVuineMates2003.pdf). +You can also take a look at the [BDK Documentation](http://www.micropsi-industries.com/documentation/introduction) Prerequisites ----- -* Python3 (tested with 3.4.3 and 3.5.1) -* On Windows, we recommend downloading and installing [WinPython 3.4.3.7](http://winpython.github.io/) + * Python3 (tested with 3.4.3 and 3.5.1) + * On Windows, we recommend downloading and installing [WinPython 3.4.3.7](http://winpython.github.io/) Run on OS X or Linux: ----- -* Run `./run.sh` -* View in browser at [http://localhost:6543/](http://localhost:6543/) + * Run `./run.sh` + * View in browser at [http://localhost:6543/](http://localhost:6543/) Run on Windows: ----- -* Add the winpython folders `python-3.4.3` and `python-3.4.3\Scripts` to your PATH environment variable -* On the Windows command-line, "cd" to the microps2 folder and run `python start_micropsi_server.py` -* View in browser at [http://localhost:6543/](http://localhost:6543/) + * Add the winpython folders `python-3.4.3` and `python-3.4.3\Scripts` to your PATH environment variable + * On the Windows command-line, "cd" to the microps2 folder and run `python start_micropsi_server.py` + * View in browser at [http://localhost:6543/](http://localhost:6543/) + + +Installing environments +----- + * [Download a zip-file of the Island World](http://micropsi.industries/tech/island_world.zip) + * Unzip in your `micropsi_code` folder + * Restart micropsi, or click `Reload code` + * You can find further environments on the [micropsi industries download page](http://www.micropsi-industries.com/download) Attribution ----- [micropsi2](https://github.com/joschabach/micropsi2) uses -* [bottle](https://github.com/defnull/bottle) -* [spock](https://github.com/nickelpro/spock) -* [paperjs](http://github.com/paperjs/paper.js) -* [theano](https://github.com/Theano/Theano) + * [bottle](https://github.com/defnull/bottle) + * [spock](https://github.com/nickelpro/spock) + * [paperjs](http://github.com/paperjs/paper.js) + * [theano](https://github.com/Theano/Theano) diff --git a/appdirs.py b/appdirs.py new file mode 100644 index 00000000..0c588378 --- /dev/null +++ b/appdirs.py @@ -0,0 +1,803 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# pylint: disable=C0103 +# Copyright (c) 2005-2010 ActiveState Software Inc. +# Copyright (c) 2013 Eddy Petrișor + +"""Utilities for determining application-specific dirs. + +See for details and usage. +""" +# Dev Notes: +# - MSDN on where to store app data files: +# http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120 +# https://msdn.microsoft.com/en-us/library/windows/desktop/dd378457(v=vs.85).aspx +# - Mac OS X: +# https://developer.apple.com/library/content/documentation/FileManagement/Conceptual/FileSystemProgrammingGuide/FileSystemOverview/FileSystemOverview.html +# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html + +__version_info__ = (1, 4, 1) +__version__ = '.'.join(map(str, __version_info__)) + + +import sys +import os + +PY3 = sys.version_info[0] == 3 + +if PY3: + unicode = str + +if sys.platform.startswith('java'): + import platform + os_name = platform.java_ver()[3][0] + if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc. + system = 'win32' + elif os_name.startswith('Mac'): # "Mac OS X", etc. + system = 'darwin' + else: # "Linux", "SunOS", "FreeBSD", etc. + # Setting this to "linux2" is not ideal, but only Windows or Mac + # are actually checked for and the rest of the module expects + # *sys.platform* style strings. + system = 'linux2' +else: + system = sys.platform + + + +def user_data_dir(appname=None, appauthor=None, version=None, roaming=False): + r"""Return full path to the user-specific data dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "roaming" (boolean, default False) can be set True to use the Windows + roaming appdata directory. That means that for users on a Windows + network setup for roaming profiles, this user data will be + sync'd on login. See + + for a discussion of issues. + + Typical user data directories are: + Mac OS X: ~/Library/Application Support// + Unix: ~/.local/share/ # or in $XDG_DATA_HOME, if defined + Win 7 (not roaming): C:\Users\\AppData\Local\\ + Win 7 (roaming): C:\Users\\AppData\Roaming\\ + + For Unix, we follow the XDG spec and support $XDG_DATA_HOME. + That means, by default "~/.local/share/". + """ + if system == "win32": + if appauthor is None: + appauthor = appname + if roaming: + path = os.getenv('APPDATA', _get_win_folder_from_knownid('{3EB685DB-65F9-4CF6-A03A-E3EF65729F3D}')) + else: + path = os.getenv('LOCALAPPDATA', _get_win_folder_from_knownid('{F1B32785-6FBA-4FCF-9D55-7B8E7F157091}')) + if appname: + if appauthor: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + elif system == 'darwin': + path = os.path.expanduser('~/Library/Application Support/') + if appname: + if appauthor: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + else: + path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share")) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, str(version)) + return path + +def site_data_dir(appname=None, appauthor=None, version=None, multipath=False): + r"""Return full path to the user-shared data dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "multipath" is an optional parameter only applicable to *nix + which indicates that the entire list of data dirs should be + returned. By default, the first item from XDG_DATA_DIRS is + returned, or '/usr/local/share/', + if XDG_DATA_DIRS is not set + + Typical site data directories are: + Mac OS X: /Library/Application Support// + Unix: /usr/local/share/ or /usr/share/ + Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) + Win 7: C:\ProgramData\\ # Hidden, but writeable on Win 7. + + For Unix, this is using the $XDG_DATA_DIRS[0] default. + + WARNING: Do not use this on Windows. See the Vista-Fail note above for why. + """ + if system == "win32": + if appauthor is None: + appauthor = appname + path = os.getenv('ALLUSERSPROFILE', _get_win_folder_from_knownid('{62AB5D82-FDC1-4DC3-A9DD-070D1D495D97}')) + if appname: + if appauthor: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + elif system == 'darwin': + path = os.path.expanduser('/Library/Application Support') + if appname: + if appauthor: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + else: + # XDG default for $XDG_DATA_DIRS + # only first, if multipath is False + path = os.getenv('XDG_DATA_DIRS', + os.pathsep.join(['/usr/local/share', '/usr/share'])) + pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] + if appname: + if version: + appname = os.path.join(appname, str(version)) + pathlist = [os.sep.join([x, appname]) for x in pathlist] + + if multipath: + path = os.pathsep.join(pathlist) + else: + path = pathlist[0] + return path + + if appname and version: + path = os.path.join(path, str(version)) + return path + + +def user_config_dir(appname=None, appauthor=None, version=None, roaming=False): + r"""Return full path to the user-specific config dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "roaming" (boolean, default False) can be set True to use the Windows + roaming appdata directory. That means that for users on a Windows + network setup for roaming profiles, this user data will be + sync'd on login. See + + for a discussion of issues. + + Typical site data directories are: + Mac OS X: ~/Library/Preferences// + Unix: ~/.config/ # or in $XDG_CONFIG_HOME, if defined + Win *: same as user_data_dir + + For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. + That means, by default "~/.config/". + """ + if system == "win32": + path = user_data_dir(appname, appauthor, None, roaming) + elif system == 'darwin': + path = os.path.expanduser('~/Library/Preferences/') + if appname: + if appauthor: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + else: + path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config")) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, str(version)) + return path + + +def site_config_dir(appname=None, appauthor=None, version=None, multipath=False): + r"""Return full path to the user-shared data dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "multipath" is an optional parameter only applicable to *nix + which indicates that the entire list of config dirs should be + returned. By default, the first item from XDG_CONFIG_DIRS is + returned, or '/etc/xdg/', if XDG_CONFIG_DIRS is not set + + Typical site data directories are: + Mac OS X: /Library/Preferences// + Unix: /etc/xdg/ or $XDG_CONFIG_DIRS[i]/ for each value in + $XDG_CONFIG_DIRS + Win *: same as site_data_dir + Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) + + For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False + + WARNING: Do not use this on Windows. See the Vista-Fail note above for why. + """ + if system == 'win32': + path = site_data_dir(appname, appauthor) + if appname and version: + path = os.path.join(path, str(version)) + elif system == 'darwin': + path = os.path.expanduser('/Library/Preferences') + if appname: + if appauthor: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + if version: + path = os.path.join(path, str(version)) + else: + # XDG default for $XDG_CONFIG_DIRS + # only first, if multipath is False + path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg') + pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] + if appname: + if version: + appname = os.path.join(appname, str(version)) + pathlist = [os.sep.join([x, appname]) for x in pathlist] + + if multipath: + path = os.pathsep.join(pathlist) + else: + path = pathlist[0] + return path + + +def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True): + r"""Return full path to the user-specific cache dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "opinion" (boolean) can be False to disable the appending of + "Cache" to the base app data dir for Windows. See + discussion below. + + Typical user cache directories are: + Mac OS X: ~/Library/Caches// + Unix: ~/.cache/ (XDG default) + Vista: C:\Users\\AppData\Local\\\Cache + + On Windows the only suggestion in the MSDN docs is that local settings go in + the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming + app data dir (the default returned by `user_data_dir` above). Apps typically + put cache data somewhere *under* the given dir here. Some examples: + ...\Mozilla\Firefox\Profiles\\Cache + ...\Acme\SuperApp\Cache\1.0 + OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value. + This can be disabled with the `opinion=False` option. + """ + if system == "win32": + if appauthor is None: + appauthor = appname + path = os.getenv('LOCALAPPDATA', _get_win_folder_from_knownid('{F1B32785-6FBA-4FCF-9D55-7B8E7F157091}')) + if appname: + if appauthor: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + if opinion: + path = os.path.join(path, "Cache") + elif system == 'darwin': + path = os.path.expanduser('~/Library/Caches') + if appname: + if appauthor: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + else: + path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache')) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, str(version)) + return path + +def user_state_dir(appname=None, appauthor=None, version=None): + r"""Return full path to the user-specific state dir for this application. + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + Typical user state directories are: + Mac OS X: same as user_data_dir + Unix: ~/.local/state/ # or in $XDG_STATE_HOME, if defined + Win *: same as user_data_dir + For Unix, we follow this Debian proposal + to extend the XDG spec and support $XDG_STATE_HOME. + That means, by default "~/.local/state/". + """ + if system in ["win32", "darwin"]: + path = user_data_dir(appname, appauthor, version=None, roaming=False) + else: + path = os.getenv('XDG_STATE_HOME', os.path.expanduser("~/.local/state")) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, str(version)) + return path + +def user_log_dir(appname=None, appauthor=None, version=None, opinion=True): + r"""Return full path to the user-specific log dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "opinion" (boolean) can be False to disable the appending of + "Logs" to the base app data dir for Windows, and "log" to the + base cache dir for Unix. See discussion below. + + Typical user log directories are: + Mac OS X: ~/Library/Logs// + Unix: ~/.cache//log # or under $XDG_CACHE_HOME if defined + Vista: C:\Users\\AppData\Local\\\Logs + + On Windows the only suggestion in the MSDN docs is that local settings + go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in + examples of what some windows apps use for a logs dir.) + + OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA` + value for Windows and appends "log" to the user cache dir for Unix. + This can be disabled with the `opinion=False` option. + """ + if system == "darwin": + path = os.path.expanduser('~/Library/Logs') + if appname: + if appauthor: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + elif system == "win32": + path = user_data_dir(appname, appauthor, str(version)) + version = False + if opinion: + path = os.path.join(path, "Logs") + else: + path = user_cache_dir(appname, appauthor, str(version)) + version = False + if opinion: + path = os.path.join(path, "log") + if appname and version: + path = os.path.join(path, str(version)) + return path + +def user_desktop_dir(): + r"""Return full path to the user's desktop directory. + + Typical user desktop directories are: + Mac OS X: ~/Desktop + Unix: ~/Desktop # or under $XDG_DESKTOP_DIR if defined + Windows: C:\Users\\Desktop + + Returns + ------- + path : str + Returns a full path to the directory. + """ + if system == "darwin": + path = os.path.expanduser('~/Desktop') + elif system == "win32": + path = _get_win_folder_from_knownid('{B4BFCC3A-DB2C-424C-B029-7FE99A87C641}') + else: + path = os.getenv('XDG_DESKTOP_DIR', xdg_user_dirs['XDG_DESKTOP_DIR']) + return path + +def user_documents_dir(appname=None, appauthor=None, version=None): + r"""Return full path to the user's documents directory. + + Typical user documents directories are: + Mac OS X: ~/Documents + Unix: ~/Documents # or under $XDG_DOCUMENTS_DIR if defined + Windows: C:\Users\\Documents + + Params + ------ + appname : str, optional + The name of the application, if None just the documents directory + is returned. + appauthor : str, optional + The name of the appauthor or distributing body. Not used on Linux. + version : str, optional + The version of the application. + Returns + ------- + path : str + Returns a full path to the directory. + """ + if system == "darwin": + path = os.path.expanduser('~/Documents') + elif system == "win32": + path = _get_win_folder_from_knownid('{FDD39AD0-238F-46AF-ADB4-6C85480369C7}') + else: + path = os.getenv('XDG_DOCUMENTS_DIR', xdg_user_dirs['XDG_DOCUMENTS_DIR']) + return path + +def user_download_dir(): + r"""Return full path to the user's downloads directory. + + Typical user desktop directories are: + Mac OS X: ~/Downloads + Unix: ~/Downloads # or under $XDG_DOWNLOAD_DIR if defined + Windows: C:\Users\\Downloads + + Returns + ------- + path : str + Returns a full path to the directory. + """ + if system == "darwin": + path = os.path.expanduser('~/Downloads') + elif system == "win32": + path = _get_win_folder_from_knownid('{374DE290-123F-4565-9164-39C4925E467B}') + else: + path = os.getenv('XDG_DOWNLOAD_DIR', xdg_user_dirs['XDG_DOWNLOAD_DIR']) + return path + +def user_music_dir(): + r"""Return full path to the user's music directory. + + Typical user desktop directories are: + Mac OS X: ~/Music + Unix: ~/Music # or under $XDG_MUSIC_DIR if defined + Windows: C:\Users\\Music + + Returns + ------- + path : str + Returns a full path to the directory. + """ + if system == "darwin": + path = os.path.expanduser('~/Music') + elif system == "win32": + path = _get_win_folder_from_knownid('{4BD8D571-6D19-48D3-BE97-422220080E43}') + else: + path = os.getenv('XDG_MUSIC_DIR', xdg_user_dirs['XDG_MUSIC_DIR']) + return path + +def user_pictures_dir(): + r"""Return full path to the user's pictures directory. + + Typical user desktop directories are: + Mac OS X: ~/Pictures + Unix: ~/Pictures # or under $XDG_PICTURES_DIR if defined + Windows: C:\Users\\Pictures + + Returns + ------- + path : str + Returns a full path to the directory. + """ + if system == "darwin": + path = os.path.expanduser('~/Pictures') + elif system == "win32": + path = _get_win_folder_from_knownid('{33E28130-4E1E-4676-835A-98395C3BC3BB}') + else: + path = os.getenv('XDG_PICTURES_DIR', xdg_user_dirs['XDG_PICTURES_DIR']) + return path + +def user_publicshare_dir(): + r"""Return full path to the user's public directory. + + Typical user desktop directories are: + Mac OS X: ~/Public + Unix: ~/Public # or under $XDG_PUBLICSHARE_DIR if defined + Windows: C:\Users\Public + + .. note:: Not the same sort directory on Linux/OS X and Windows, + On Windows it's a seperate user, on OSX/Linux it's a + directory in the home folder. + + Returns + ------- + path : str + Returns a full path to the directory. + """ + if system == "darwin": + path = os.path.expanduser('~/Public') + elif system == "win32": + path = _get_win_folder_from_knownid('{DFDF76A2-C82A-4D63-906A-5644AC457385}') + else: + path = os.getenv('XDG_PUBLICSHARE_DIR', xdg_user_dirs['XDG_PUBLICSHARE_DIR']) + return path + +def user_templates_dir(appname=None, appauthor=None, version=None): + r"""Return full path to the user's template directory. + + Typical user desktop directories are: + Mac OS X: ~/Desktop + Unix: ~/Templates # or under $XDG_TEMPLATE_DIR if defined + Windows: C:\Users\\Desktop + + .. note:: Not the same sort directory on Windows/Linux and OS X, + On Mac OS X these templates can only be used by the application that + created them. On Windows/Linux they are added to the context menu. + + Params + ------ + appname : str, optional + The name of the application. If None, just the system directory + is returned. Only used on Mac OS X. + appauthor : str, optional + The name of the appauthor or distributing body for this application. Typically + it is the owning company name. + version : str or int, optional + An optional version path element to append to the path. You might + want to use this if you want multiple versions of your app to be able + to run independently. If used, this would typically be ".". + Only used on Mac OS X. + + Returns + ------- + path : str + Returns a full path to the directory. + + Notes + ----- + The Mac OS X implementation is based on Microsoft Office' template directory [2]_ + + References + ---------- + .. [2] https://support.office.com/en-us/article/Create-and-use-your-own-template-a1b72758-61a0-4215-80eb-165c6c4bed04 + """ + if system == "darwin": + path = os.path.join(user_data_dir(appname=appname, appauthor=appauthor, version=version), 'User Templates') + elif system == "win32": + path = _get_win_folder_from_knownid('{A63293E8-664E-48DB-A079-DF759E0509F7}') + else: + path = os.getenv('XDG_TEMPLATES_DIR', xdg_user_dirs['XDG_TEMPLATES_DIR']) + return path + +def user_videos_dir(): + r"""Return full path to the user's videos directory. + + Typical user desktop directories are: + Mac OS X: ~/Desktop + Unix: ~/Desktop # or under $XDG_DESKTOP_DIR if defined + Windows: C:\Users\\Desktop + + Returns + ------- + path : str + Returns a full path to the directory. + """ + if system == "darwin": + path = os.path.expanduser('~/Videos') + elif system == "win32": + path = _get_win_folder_from_knownid('{18989B1D-99B5-455B-841C-AB7C74E4DDFC}') + else: + path = os.getenv('XDG_VIDEOS_DIR', xdg_user_dirs['XDG_VIDEOS_DIR']) + return path + + +class AppDirs(object): + """Convenience wrapper for getting application dirs.""" + def __init__(self, appname=None, appauthor=None, version=None, + roaming=False, multipath=False): + self.appname = appname + self.appauthor = appauthor + self.version = version + self.roaming = roaming + self.multipath = multipath + + @property + def user_data_dir(self): + """Return full path to the user-specific data dir for this application.""" + return user_data_dir(self.appname, self.appauthor, + version=self.version, roaming=self.roaming) + + @property + def site_data_dir(self): + """Return full path to the user-shared data dir for this application.""" + return site_data_dir(self.appname, self.appauthor, + version=self.version, multipath=self.multipath) + + @property + def user_config_dir(self): + """Return full path to the user-specific config dir for this application.""" + return user_config_dir(self.appname, self.appauthor, + version=self.version, roaming=self.roaming) + + @property + def site_config_dir(self): + """Return full path to the user-shared data dir for this application.""" + return site_config_dir(self.appname, self.appauthor, + version=self.version, multipath=self.multipath) + + @property + def user_cache_dir(self): + """Return full path to the user-specific cache dir for this application.""" + return user_cache_dir(self.appname, self.appauthor, + version=self.version) + + @property + def user_state_dir(self): + """Return full path to the user-specific state dir for this application.""" + return user_state_dir(self.appname, self.appauthor, + version=self.version) + + @property + def user_log_dir(self): + """Return full path to the user-specific log dir for this application.""" + return user_log_dir(self.appname, self.appauthor, + version=self.version) + + @property + def user_desktop_dir(self): + """Return full path to the user's desktop dir.""" + return user_desktop_dir() + + @property + def user_documents_dir(self): + """Return full path to the user's documents dir.""" + return user_documents_dir() + + @property + def user_download_dir(self): + """Return full path to the user's download dir.""" + return user_download_dir() + + @property + def user_music_dir(self): + """Return full path to the user's music dir.""" + return user_music_dir() + + @property + def user_pictures_dir(self): + """Return full path to the user's pictures dir.""" + return user_pictures_dir() + + @property + def user_publicshare_dir(self): + """Return full path to the user's public dir.""" + return user_publicshare_dir() + + @property + def user_templates_dir(self): + """Return full path to the user's templates dir.""" + return user_templates_dir(self.appname, self.appauthor, self.version) + + @property + def user_videos_dir(self): + """Return full path to the user's videos dir.""" + return user_videos_dir() + +#---- Internal support stuff +def _get_win_folder_from_knownid(folderid, userhandle=0): + """Get folder path from KNOWNFOLDERID. + + Based of code by mkropat [https://gist.github.com/mkropat/7550097] licensed under MIT. + + Params + ------ + userhandle: + 0 for current user, -1 for common/shared folder + folderid: + A GUID listed at [https://msdn.microsoft.com/en-us/library/windows/desktop/dd378457.aspx] + """ + import ctypes + from ctypes import windll, wintypes + from uuid import UUID + + + class GUID(ctypes.Structure): + _fields_ = [ + ("Data1", wintypes.DWORD), + ("Data2", wintypes.WORD), + ("Data3", wintypes.WORD), + ("Data4", wintypes.BYTE * 8) + ] + + def __init__(self, uuid_): + ctypes.Structure.__init__(self) + self.Data1, self.Data2, self.Data3, self.Data4[0], self.Data4[1], rest = uuid_.fields + for i in range(2, 8): + self.Data4[i] = rest>>(8 - i - 1)*8 & 0xff + + _CoTaskMemFree = windll.ole32.CoTaskMemFree + _CoTaskMemFree.restype = None + _CoTaskMemFree.argtypes = [ctypes.c_void_p] + + _SHGetKnownFolderPath = windll.shell32.SHGetKnownFolderPath + _SHGetKnownFolderPath.argtypes = [ + ctypes.POINTER(GUID), wintypes.DWORD, wintypes.HANDLE, ctypes.POINTER(ctypes.c_wchar_p) + ] + + fid = GUID(UUID(folderid)) + pPath = ctypes.c_wchar_p() + if _SHGetKnownFolderPath(ctypes.byref(fid), 0, userhandle, ctypes.byref(pPath)) != 0: + raise WindowsError("Path not found for folderid: %s and userhandle: %s" % (folderid, userhandle)) + path = pPath.value + _CoTaskMemFree(pPath) + return path + +if system.startswith('linux'): # For reading from user-dirs.dirs file + xdg_user_dirs = { + "XDG_DESKTOP_DIR": os.path.expanduser("~/Desktop"), + "XDG_DOCUMENTS_DIR": os.path.expanduser("~/Documents"), + "XDG_DOWNLOAD_DIR": os.path.expanduser("~/Downloads"), + "XDG_MUSIC_DIR": os.path.expanduser("~/Music"), + "XDG_PICTURES_DIR": os.path.expanduser("~/Pictures"), + "XDG_PUBLICSHARE_DIR": os.path.expanduser("~/Public"), + "XDG_TEMPLATES_DIR": os.path.expanduser("~/Templates"), + "XDG_VIDEOS_DIR": os.path.expanduser("~/Videos") + } + try: + with open(os.path.join(user_config_dir(), 'user-dirs.dirs')) as f: + for shvar in f.readlines(): + if shvar.startswith('#'): # Skip comments + continue + shvar = shvar.rstrip() # Remove newlines + key = shvar.split('=')[0] + value = shvar.split('=')[1].strip("\"'") + while '$' in value: + start = value.find('$') + a = value.find('/', start) + b = value.find('\\', start) + if a > b: end = a + if a < b: end = b + if a == b: end = len(value) + try: + value = value[:start] + os.environ[value[start+1:end]] + value[end:] + except KeyError: + continue + xdg_user_dirs[key] = value + f.close() + except IOError: + pass diff --git a/config.default.ini b/config.default.ini index e5289296..13773886 100644 --- a/config.default.ini +++ b/config.default.ini @@ -10,7 +10,13 @@ # the directory where your nodenet-data, world-data # native modules and nodefunctions reside -data_directory = ~/micropsi2_data/ +data_directory = ~/micropsi_code + +# optional fine-grained folder configurations overwriting +# the default specified in data_directory +; agent_directory = ~/micropsi_code +; world_directory = ~/micropsi_code +; persistency_directory = ~/micropsi_code # the port on your machine where the micropsi # toolkit is served @@ -21,12 +27,6 @@ port = 6543 # 0.0.0.0 serves for everybody host = localhost -# which server to use. -# a single threaded devel server named "wsgiref" is bundled and runs out of the box -# if you installed requirements.txt, use "cherrypy", a multi-threaded stable server. -# check here for a list of other supported servers: http://bottlepy.org/docs/0.12/deployment.html#switching-the-server-backend -server = wsgiref - # single_agent_mode means that only one nodenet can be loaded at a time. # if you use the minecraft connection, this is recommended ; single_agent_mode = 1 @@ -37,6 +37,13 @@ server = wsgiref # python profiler for recipes and the nodenet-runner ; profile_runner = 1 +# automatically save running nodenets every X steps: +auto_save_intervals = 1000,10000,1000000 + +# port for the webagg backend of matplotlib. +webagg_port = 6545 + + [minecraft] # use your minecraft.net username with password, respective diff --git a/configuration.py b/configuration.py index 6ff5d240..738d98f1 100644 --- a/configuration.py +++ b/configuration.py @@ -9,30 +9,65 @@ __date__ = '03.12.12' import os -import configparser import warnings +import configparser +from appdirs import AppDirs + + +def makedirs(path): + if not os.access(path, os.W_OK): + try: + os.makedirs(path) + except OSError as e: + print("Fatal Error: Can not write to the configured data-directory") + raise e + + +dirinfo = AppDirs("MicroPsi Runtime", appauthor=False, roaming=True) + +configini = os.path.join(dirinfo.user_data_dir, "config.ini") +using_default = False + +print("MicroPsi configuration directory: ", dirinfo.user_data_dir) -if os.path.isfile('config.ini'): - filename = 'config.ini' +makedirs(dirinfo.user_data_dir) + +if not os.path.isfile(configini): + if os.path.isfile(os.path.abspath('config.ini')): + configini = os.path.abspath('config.ini') + print("Using local custom config") + else: + configini = os.path.join(os.path.dirname(os.path.realpath(__file__)), "config.default.ini") + using_default = True + print("Using default configuration") else: - filename = 'config.default.ini' + print("Using custom configuration") + try: config = configparser.ConfigParser() - with open(filename) as fp: + with open(configini) as fp: config.read_file(fp) except OSError: - warnings.warn('Can not read config from inifile %s' % filename) - raise RuntimeError('Can not read config from inifile %s' % filename) + warnings.warn('Can not read config from inifile %s' % configini) + raise RuntimeError('Can not read config from inifile %s' % configini) -config['micropsi2']['version'] = "0.8-alpha6" +config['micropsi2']['version'] = "0.12-alpha10" config['micropsi2']['apptitle'] = "MicroPsi" -homedir = config['micropsi2']['data_directory'].startswith('~') +data_path = os.path.expanduser(config['micropsi2']['data_directory']) +data_path = os.path.abspath(data_path) -if homedir: - data_path = os.path.expanduser(config['micropsi2']['data_directory']) -else: - data_path = config['micropsi2']['data_directory'] +config.add_section('paths') +config['paths']['usermanager_path'] = os.path.join(dirinfo.user_data_dir, 'user-db.json') +config['paths']['server_settings_path'] = os.path.join(dirinfo.user_data_dir, 'server-config.json') + +for key in ['agent_directory', 'world_directory', 'persistency_directory']: + if key in config['micropsi2']: + path = os.path.expanduser(config['micropsi2'][key]) + config['paths'][key] = os.path.abspath(path) + else: + config['paths'][key] = data_path + makedirs(config['paths'][key]) if 'logging' not in config: config['logging'] = {} @@ -41,8 +76,3 @@ if level not in config['logging']: warnings.warn('logging level for %s not set in config.ini - defaulting to WARNING' % level) config['logging'][level] = 'WARNING' - -config.add_section('paths') -config['paths']['data_directory'] = os.path.join(os.path.dirname(__file__), data_path) -config['paths']['usermanager_path'] = os.path.join(os.path.dirname(__file__), 'resources', 'user-db.json') -config['paths']['server_settings_path'] = os.path.join(os.path.dirname(__file__), 'resources', 'server-config.json') diff --git a/conftest.py b/conftest.py index f321b8a9..9d6e7f3a 100644 --- a/conftest.py +++ b/conftest.py @@ -3,6 +3,7 @@ import shutil import pytest import logging +import tempfile try: import theano @@ -11,23 +12,30 @@ engine_defaults = "dict_engine" -testpath = os.path.abspath(os.path.join('.', 'test-data')) -try: - shutil.rmtree(testpath) -except OSError: - pass +directory = tempfile.TemporaryDirectory() +testpath = directory.name +print("test data directory:", testpath) + +from micropsi_core import runtime as micropsi_runtime +from configuration import config as cfg -from micropsi_core import runtime as micropsi -from micropsi_core.runtime import cfg -original_ini_data_directory = cfg['paths']['data_directory'] +orig_agent_dir = cfg['paths']['agent_directory'] +orig_world_dir = cfg['paths']['world_directory'] -cfg['paths']['data_directory'] = testpath +cfg['paths']['agent_directory'] = testpath +cfg['paths']['world_directory'] = testpath +cfg['paths']['persistency_directory'] = testpath cfg['paths']['server_settings_path'] = os.path.join(testpath, 'server_cfg.json') cfg['paths']['usermanager_path'] = os.path.join(testpath, 'user-db.json') + +if 'logfile' in cfg['logging']: + del cfg['logging']['logfile'] cfg['micropsi2']['single_agent_mode'] = '' if 'theano' in cfg: cfg['theano']['initial_number_of_nodes'] = '50' - +if 'on_exception' in cfg['micropsi2']: + cfg['micropsi2']['on_exception'] = '' +cfg['micropsi2']['auto_save_intervals'] = '100' world_uid = 'WorldOfPain' nn_uid = 'Testnet' @@ -38,23 +46,36 @@ def pytest_addoption(parser): parser.addoption("--engine", action="store", default=engine_defaults, help="The engine that should be used for this testrun.") parser.addoption("--agents", action="store_true", - help="Only test agents-code from the data_directory") + help="Only runt tests from the agent_directory") + parser.addoption("--worlds", action="store_true", + help="Only runt tests from the world_directory") def pytest_cmdline_main(config): """ called for performing the main command line action. The default implementation will invoke the configure hooks and runtest_mainloop. """ if config.getoption('agents'): - config.args = [original_ini_data_directory] - micropsi.initialize(persistency_path=testpath, resource_path=original_ini_data_directory) + config.args = [orig_agent_dir] + config._inicache['python_functions'] = [] + config.addinivalue_line('python_files', '*.py') + config.addinivalue_line('python_functions', '_test*') + config.addinivalue_line('norecursedirs', 'experiments') + cfg['paths']['agent_directory'] = orig_agent_dir + micropsi_runtime.initialize(config=cfg) + elif config.getoption('worlds'): + config.args = [orig_world_dir] + config.addinivalue_line('python_functions', 'test_*') + cfg['paths']['world_directory'] = orig_world_dir + micropsi_runtime.initialize(config=cfg) else: - micropsi.initialize(persistency_path=testpath) - from micropsi_server.micropsi_app import usermanager - - usermanager.create_user('Pytest User', 'test', 'Administrator', uid='Pytest User') - usermanager.start_session('Pytest User', 'test', True) + config.addinivalue_line('python_functions', 'test_*') + micropsi_runtime.initialize(config=cfg) + from micropsi_server.micropsi_app import usermanager + usermanager.create_user('Pytest User', 'test', 'Administrator', uid='Pytest User') + usermanager.start_session('Pytest User', 'test', True) set_logging_levels() + micropsi_runtime.set_runner_properties(1, True) def pytest_configure(config): @@ -63,6 +84,10 @@ def pytest_configure(config): "engine(name): mark test to run only on the specified engine") +def pytest_unconfigure(config): + directory.cleanup() + + def pytest_generate_tests(metafunc): if 'engine' in metafunc.fixturenames: engines = [] @@ -80,87 +105,106 @@ def pytest_runtest_setup(item): engine_marker = engine_marker.args[0] if engine_marker != item.callspec.params['engine']: pytest.skip("test requires engine %s" % engine_marker) + for uid in list(micropsi_runtime.nodenets.keys()): + micropsi_runtime.stop_nodenetrunner(uid) + for uid in list(micropsi_runtime.nodenets.keys()): + micropsi_runtime.delete_nodenet(uid) + for uid in list(micropsi_runtime.worlds.keys()): + micropsi_runtime.delete_world(uid) + for item in os.listdir(testpath): - if item != 'worlds' and item != 'nodenets': - path = os.path.join(testpath, item) - if os.path.isdir(path): - shutil.rmtree(path) - else: - os.remove(path) - os.mkdir(os.path.join(testpath, 'Test')) - open(os.path.join(testpath, 'Test', '__init__.py'), 'w').close() - micropsi.reload_native_modules() - micropsi.logger.clear_logs() - micropsi.set_runner_properties(1, 1) + path = os.path.join(testpath, item) + if os.path.isdir(path): + shutil.rmtree(path) + else: + os.remove(path) + + os.mkdir(os.path.join(testpath, 'worlds')) + os.mkdir(os.path.join(testpath, 'nodenets')) + os.mkdir(os.path.join(testpath, 'nodenets', '__autosave__')) + os.mkdir(os.path.join(testpath, 'nodetypes')) + os.mkdir(os.path.join(testpath, 'recipes')) + os.mkdir(os.path.join(testpath, 'operations')) + os.mkdir(os.path.join(testpath, 'nodetypes', 'Test')) + open(os.path.join(testpath, 'nodetypes', 'Test', '__init__.py'), 'w').close() + micropsi_runtime.reload_code() + micropsi_runtime.logger.clear_logs() + micropsi_runtime.set_runner_properties(0, True) set_logging_levels() def pytest_internalerror(excrepr, excinfo): """ called for internal errors. """ - micropsi.kill_runners() - shutil.rmtree(testpath) + micropsi_runtime.kill_runners() + directory.cleanup() def pytest_keyboard_interrupt(excinfo): """ called for keyboard interrupt. """ - micropsi.kill_runners() - shutil.rmtree(testpath) + micropsi_runtime.kill_runners() + directory.cleanup() def set_logging_levels(): """ sets the logging levels of the default loggers back to WARNING """ logging.getLogger('system').setLevel(logging.WARNING) logging.getLogger('world').setLevel(logging.WARNING) - micropsi.cfg['logging']['level_agent'] = 'WARNING' + micropsi_runtime.runtime_config['logging']['level_agent'] = 'WARNING' @pytest.fixture(scope="session") def resourcepath(): """ Fixture: the resource path """ - return micropsi.RESOURCE_PATH + return micropsi_runtime.RESOURCE_PATH @pytest.fixture(scope="session") def runtime(): """ Fixture: The micropsi runtime """ - return micropsi + return micropsi_runtime @pytest.yield_fixture(scope="function") -def test_world(request): +def default_world(request): """ Fixture: A test world of type Island """ global world_uid - success, world_uid = micropsi.new_world("World of Pain", "Island", "Pytest User", uid=world_uid) + success, world_uid = micropsi_runtime.new_world("World of Pain", "DefaultWorld", "Pytest User") yield world_uid try: - micropsi.delete_world(world_uid) + micropsi_runtime.delete_world(world_uid) except: pass -@pytest.fixture(scope="function") -def default_world(request): +@pytest.yield_fixture(scope="function") +def default_nodenet(request): """ - Fixture: A test world of type Island + A nodenet with the default engine + Use this for tests that are engine-agnostic """ - for uid in micropsi.worlds: - if micropsi.worlds[uid].data['world_type'] == 'World': - return uid + success, nn_uid = micropsi_runtime.new_nodenet("Defaultnet", owner="Pytest User") + micropsi_runtime.save_nodenet(nn_uid) + yield nn_uid + try: + micropsi_runtime.delete_nodenet(nn_uid) + except: + pass @pytest.yield_fixture(scope="function") -def test_nodenet(request, test_world, engine): +def test_nodenet(request, default_world, engine): """ - Fixture: A completely empty nodenet without a worldadapter + An empty nodenet, with the currently tested engine. + Use this for tests that should run in both engines """ global nn_uid - success, nn_uid = micropsi.new_nodenet("Testnet", engine=engine, owner="Pytest User", uid='Testnet') - micropsi.save_nodenet(nn_uid) + success, nn_uid = micropsi_runtime.new_nodenet("Testnet", engine=engine, owner="Pytest User") + micropsi_runtime.save_nodenet(nn_uid) yield nn_uid try: - micropsi.delete_nodenet(nn_uid) + micropsi_runtime.delete_nodenet(nn_uid) except: pass @@ -170,6 +214,6 @@ def node(request, test_nodenet): """ Fixture: A Pipe node with a genloop """ - res, uid = micropsi.add_node(test_nodenet, 'Pipe', [10, 10, 10], name='N1') - micropsi.add_link(test_nodenet, uid, 'gen', uid, 'gen') + res, uid = micropsi_runtime.add_node(test_nodenet, 'Pipe', [10, 10, 10], name='N1') + micropsi_runtime.add_link(test_nodenet, uid, 'gen', uid, 'gen') return uid diff --git a/demo_data/nodenets/b6f40e6417ee11e4bbe920c9d087b4b7.json b/demo_data/nodenets/b6f40e6417ee11e4bbe920c9d087b4b7.json deleted file mode 100644 index a9bc046f..00000000 --- a/demo_data/nodenets/b6f40e6417ee11e4bbe920c9d087b4b7.json +++ /dev/null @@ -1,254 +0,0 @@ -{ - "current_step": 0, - "engine": "dict_engine", - "is_active": false, - "links": { - "9ce2b617-37c2-4ed5-95b1-ed2fd17601c0:gen:gen:3f1d7b8a-789b-40e9-a5f5-665c4ed7db16": { - "certainty": 1, - "source_gate_name": "gen", - "source_node_uid": "9ce2b617-37c2-4ed5-95b1-ed2fd17601c0", - "target_node_uid": "3f1d7b8a-789b-40e9-a5f5-665c4ed7db16", - "target_slot_name": "gen", - "uid": "9ce2b617-37c2-4ed5-95b1-ed2fd17601c0:gen:gen:3f1d7b8a-789b-40e9-a5f5-665c4ed7db16", - "weight": 1 - }, - "f9f75add-93da-4a59-a620-5988f34fba3c:gen:gen:029ba031-24af-4d77-bd6a-6d0210d50d0d": { - "certainty": 1, - "source_gate_name": "gen", - "source_node_uid": "f9f75add-93da-4a59-a620-5988f34fba3c", - "target_node_uid": "029ba031-24af-4d77-bd6a-6d0210d50d0d", - "target_slot_name": "gen", - "uid": "f9f75add-93da-4a59-a620-5988f34fba3c:gen:gen:029ba031-24af-4d77-bd6a-6d0210d50d0d", - "weight": 1 - } - }, - "max_coords": { - "x": 0, - "y": 0 - }, - "modulators": {}, - "monitors": { - "b2516fb6a2a211e4832820c9d087b4b7": { - "classname": "NodeMonitor", - "name": "right eye", - "node_uid": "f9f75add-93da-4a59-a620-5988f34fba3c", - "sheaf": "default", - "target": "gen", - "type": "gate", - "uid": "b2516fb6a2a211e4832820c9d087b4b7", - "values": {} - } - }, - "name": "Braitenberg", - "nodes": { - "029ba031-24af-4d77-bd6a-6d0210d50d0d": { - "activation": 0.0, - "gate_activations": { - "gen": { - "default": { - "activation": 0.0, - "name": "default", - "uid": "default" - } - } - }, - "gate_functions": { - "gen": "identity" - }, - "gate_parameters": { - "gen": { - "amplification": 1.0, - "certainty": 1.0, - "decay": 0.0, - "maximum": 1.0, - "minimum": -1.0, - "rho": 0.0, - "spreadsheaves": 0.0, - "theta": 0.0, - "threshold": 0.0 - } - }, - "index": 4, - "name": "left wheel", - "parameters": { - "datatarget": "engine_l" - }, - "parent_nodespace": "Root", - "position": [ - 250, - 340 - ], - "sheaves": { - "default": { - "activation": 0.0, - "name": "default", - "uid": "default" - } - }, - "state": {}, - "type": "Actor", - "uid": "029ba031-24af-4d77-bd6a-6d0210d50d0d" - }, - "3f1d7b8a-789b-40e9-a5f5-665c4ed7db16": { - "activation": 0.0, - "gate_activations": { - "gen": { - "default": { - "activation": 0.0, - "name": "default", - "uid": "default" - } - } - }, - "gate_functions": { - "gen": "identity" - }, - "gate_parameters": { - "gen": { - "amplification": 1.0, - "certainty": 1.0, - "decay": 0.0, - "maximum": 1.0, - "minimum": -1.0, - "rho": 0.0, - "spreadsheaves": 0.0, - "theta": 0.0, - "threshold": 0.0 - } - }, - "index": 5, - "name": "right wheel", - "parameters": { - "datatarget": "engine_r" - }, - "parent_nodespace": "Root", - "position": [ - 512.5, - 335 - ], - "sheaves": { - "default": { - "activation": 0.0, - "name": "default", - "uid": "default" - } - }, - "state": {}, - "type": "Actor", - "uid": "3f1d7b8a-789b-40e9-a5f5-665c4ed7db16" - }, - "9ce2b617-37c2-4ed5-95b1-ed2fd17601c0": { - "activation": 0.0, - "gate_activations": { - "gen": { - "default": { - "activation": 0.0, - "name": "default", - "uid": "default" - } - } - }, - "gate_functions": { - "gen": "identity" - }, - "gate_parameters": { - "gen": { - "amplification": 1.0, - "certainty": 1.0, - "decay": 0.0, - "maximum": 1.0, - "minimum": -1.0, - "rho": 0.0, - "spreadsheaves": 0.0, - "theta": 0.0, - "threshold": 0.0 - } - }, - "index": 2, - "name": "left eye", - "parameters": { - "datasource": "brightness_l" - }, - "parent_nodespace": "Root", - "position": [ - 257.5, - 118.75 - ], - "sheaves": { - "default": { - "activation": 0.0, - "name": "default", - "uid": "default" - } - }, - "state": {}, - "type": "Sensor", - "uid": "9ce2b617-37c2-4ed5-95b1-ed2fd17601c0" - }, - "f9f75add-93da-4a59-a620-5988f34fba3c": { - "activation": 0.0, - "gate_activations": { - "gen": { - "default": { - "activation": 0.0, - "name": "default", - "uid": "default" - } - } - }, - "gate_functions": { - "gen": "identity" - }, - "gate_parameters": { - "gen": { - "amplification": 1.0, - "certainty": 1.0, - "decay": 0.0, - "maximum": 1.0, - "minimum": -1.0, - "rho": 0.0, - "spreadsheaves": 0.0, - "theta": 0.0, - "threshold": 0.0 - } - }, - "index": 3, - "name": "right eye", - "parameters": { - "datasource": "brightness_r" - }, - "parent_nodespace": "Root", - "position": [ - 510, - 112.5 - ], - "sheaves": { - "default": { - "activation": 0.0, - "name": "default", - "uid": "default" - } - }, - "state": {}, - "type": "Sensor", - "uid": "f9f75add-93da-4a59-a620-5988f34fba3c" - } - }, - "nodespaces": { - "Root": { - "index": 0, - "name": "Root", - "parent_nodespace": null, - "position": [ - 0, - 0 - ], - "uid": "Root" - } - }, - "owner": "admin", - "uid": "b6f40e6417ee11e4bbe920c9d087b4b7", - "version": 1, - "world": "d4b3f5740adc11e5b9fe20c9d087b4b7", - "worldadapter": "Braitenberg" -} \ No newline at end of file diff --git a/demo_data/worlds/ac7c4fb40adc11e58caa20c9d087b4b7.json b/demo_data/worlds/ac7c4fb40adc11e58caa20c9d087b4b7.json deleted file mode 100644 index 941e7100..00000000 --- a/demo_data/worlds/ac7c4fb40adc11e58caa20c9d087b4b7.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "filename": "/Users/dwelland/workspace/tmp/micropsi_alpha2/./resources/worlds/ac7c4fb40adc11e58caa20c9d087b4b7.json", - "name": "default", - "uid": "ac7c4fb40adc11e58caa20c9d087b4b7", - "version": 1 -} \ No newline at end of file diff --git a/demo_data/worlds/d4b3f5740adc11e5b9fe20c9d087b4b7.json b/demo_data/worlds/d4b3f5740adc11e5b9fe20c9d087b4b7.json deleted file mode 100644 index f4391cc3..00000000 --- a/demo_data/worlds/d4b3f5740adc11e5b9fe20c9d087b4b7.json +++ /dev/null @@ -1,137 +0,0 @@ -{ - "agents": { - "b6f40e6417ee11e4bbe920c9d087b4b7": { - "name": "Braitenberg", - "position": [ - 700, - 400 - ], - "type": "Braitenberg", - "uid": "b6f40e6417ee11e4bbe920c9d087b4b7" - } - }, - "assets": { - "background": "island/psi_1.png", - "icons": { - "Boulder": "island/boulder.png", - "Braintree": "island/braintree.png", - "Braitenberg": "island/braitenberg.png", - "Champignon": "island/boletus-edulis.png", - "FlyAgaric": "island/fly-agaris.png", - "Juniper": "island/juniper-berries.png", - "Lightsource": "island/lamp.png", - "Maple": "island/maple.png", - "Menhir": "island/menhir.png", - "PalmTree": "island/palm-tree.png", - "Stone": "island/rock.png", - "Survivor": "island/Micropsi.png", - "Thornbush": "island/unknownbox.png", - "Waterhole": "island/well.png", - "Wirselkraut": "island/wirselkraut.png" - }, - "js": "island/island.js", - "template": "island/island.tpl", - "x": 2048, - "y": 2048 - }, - "available_worldadapters": [ - "Braitenberg", - "Survivor", - "StructuredObjects" - ], - "available_worldobjects": [ - "Wirselkraut", - "Lightsource", - "Default", - "Maple", - "Stone", - "PalmTree", - "Thornbush", - "Juniper", - "Boulder", - "Waterhole", - "Champignon", - "Braintree", - "Menhir", - "FlyAgaric" - ], - "current_step": 0, - "filename": "/Users/dwelland/workspace/tmp/micropsi_alpha2/./resources/worlds/d4b3f5740adc11e5b9fe20c9d087b4b7.json", - "name": "Doerner-Island", - "objects": { - "4be1f5260ade11e5a03620c9d087b4b7": { - "name": "", - "orientation": 0.0, - "parameters": null, - "position": [ - 505, - 547 - ], - "type": "Lightsource", - "uid": "4be1f5260ade11e5a03620c9d087b4b7" - } - }, - "owner": "admin", - "uid": "d4b3f5740adc11e5b9fe20c9d087b4b7", - "version": 1, - "world_type": "Island", - "worldadapters": { - "Braitenberg": { - "datasources": [ - "brightness_l", - "brightness_r" - ], - "datatargets": [ - "engine_l", - "engine_r" - ] - }, - "StructuredObjects": { - "datasources": [ - "fov-x", - "fov-y", - "major-newscene", - "fovea-com", - "presence-com", - "fovea-cir", - "presence-cir", - "fovea-ver", - "presence-ver", - "fovea-green", - "presence-green", - "fovea-white", - "presence-white", - "fovea-brown", - "presence-brown", - "fovea-charcoal", - "presence-charcoal", - "fovea-purple", - "presence-purple", - "fovea-navy", - "presence-navy", - "fovea-red", - "presence-red" - ], - "datatargets": [ - "fov_x", - "fov_y", - "fov_reset" - ] - }, - "Survivor": { - "datasources": [ - "body-energy", - "body-water", - "body-integrity" - ], - "datatargets": [ - "action_eat", - "action_drink", - "loco_north", - "loco_south", - "loco_east", - "loco_west" - ] - } - } -} \ No newline at end of file diff --git a/micropsi_core/_runtime_api_monitors.py b/micropsi_core/_runtime_api_monitors.py index 2cfc7afa..bdc1c4ac 100644 --- a/micropsi_core/_runtime_api_monitors.py +++ b/micropsi_core/_runtime_api_monitors.py @@ -10,28 +10,27 @@ import micropsi_core -def add_gate_monitor(nodenet_uid, node_uid, gate, sheaf=None, name=None, color=None): +def add_gate_monitor(nodenet_uid, node_uid, gate, name=None, color=None): """Adds a continuous monitor to the activation of a gate. The monitor will collect the activation value in every calculation step. Returns the uid of the new monitor.""" nodenet = micropsi_core.runtime.get_nodenet(nodenet_uid) - return nodenet.add_gate_monitor(node_uid, gate, sheaf=sheaf, name=name, color=color) + return nodenet.add_gate_monitor(node_uid, gate, name=name, color=color) -def add_slot_monitor(nodenet_uid, node_uid, slot, sheaf=None, name=None, color=None): +def add_slot_monitor(nodenet_uid, node_uid, slot, name=None, color=None): """Adds a continuous monitor to the activation of a slot. The monitor will collect the activation value in every calculation step. Returns the uid of the new monitor.""" nodenet = micropsi_core.runtime.get_nodenet(nodenet_uid) - return nodenet.add_slot_monitor(node_uid, slot, sheaf=sheaf, name=name, color=color) + return nodenet.add_slot_monitor(node_uid, slot, name=name, color=color) -def add_link_monitor(nodenet_uid, source_node_uid, gate_type, target_node_uid, slot_type, property, name, color=None): - """Adds a continuous monitor to a link. You can choose to monitor either weight (default) or certainty - The monitor will collect respective value in every calculation step. +def add_link_monitor(nodenet_uid, source_node_uid, gate_type, target_node_uid, slot_type, name, color=None): + """Adds a continuous weightmonitor to a link. The monitor will record the linkweight in every calculation step. Returns the uid of the new monitor.""" nodenet = micropsi_core.runtime.get_nodenet(nodenet_uid) - return nodenet.add_link_monitor(source_node_uid, gate_type, target_node_uid, slot_type, property, name, color=color) + return nodenet.add_link_monitor(source_node_uid, gate_type, target_node_uid, slot_type, name, color=color) def add_modulator_monitor(nodenet_uid, modulator, name, color=None): @@ -50,6 +49,13 @@ def add_custom_monitor(nodenet_uid, function, name, color=None): return nodenet.add_custom_monitor(function, name, color=color) +def add_group_monitor(nodenet_uid, nodespace, name, node_name_prefix='', node_uids=[], gate='gen', color=None): + """Adds a group monitor, that tracks the activations of the given group + Returns the uid of the new monitor.""" + nodenet = micropsi_core.runtime.get_nodenet(nodenet_uid) + return nodenet.add_group_monitor(nodespace, name, node_name_prefix=node_name_prefix, node_uids=node_uids, gate=gate, color=color) + + def remove_monitor(nodenet_uid, monitor_uid): """Deletes an activation monitor.""" micropsi_core.runtime.get_nodenet(nodenet_uid).remove_monitor(monitor_uid) @@ -62,43 +68,12 @@ def clear_monitor(nodenet_uid, monitor_uid): return True -def export_monitor_data(nodenet_uid, monitor_uid=None, monitor_from=0, monitor_count=-1): - """Returns a string with all currently stored monitor data for the given nodenet.""" - nodenet = micropsi_core.runtime.get_nodenet(nodenet_uid) - if monitor_from == 0 and monitor_count > 0: - monitor_count = min(nodenet.current_step + 1, monitor_count) - monitor_from = max(0, nodenet.current_step + 1 - monitor_count) - if monitor_from > 0: - if monitor_count < 1: - monitor_count = (nodenet.current_step + 1 - monitor_from) - elif monitor_from + monitor_count > nodenet.current_step: - monitor_from = max(nodenet.current_step + 1 - monitor_count, 0) - if monitor_uid is not None: - data = nodenet.construct_monitors_dict()[monitor_uid] - if monitor_from > 0 or monitor_count > 0: - values = {} - i = monitor_from - while i < monitor_count + monitor_from: - values[i] = data['values'].get(i) - i += 1 - data['values'] = values - else: - data = nodenet.construct_monitors_dict() - if monitor_from > 0 or monitor_count > 0: - for uid in data: - values = {} - i = monitor_from - while i < monitor_count + monitor_from: - values[i] = data[uid]['values'].get(i) - i += 1 - data[uid]['values'] = values - return data - - -def get_monitor_data(nodenet_uid, step=0, monitor_from=0, monitor_count=-1): +def get_monitor_data(nodenet_uid, step=0, from_step=0, count=-1, with_recorders=False): """Returns monitor and nodenet data for drawing monitor plots for the current step, if the current step is newer than the supplied calculation step.""" nodenet = micropsi_core.runtime.get_nodenet(nodenet_uid) + if nodenet is None: + return {} data = { 'nodenet_running': nodenet.is_active, 'current_step': nodenet.current_step @@ -106,5 +81,81 @@ def get_monitor_data(nodenet_uid, step=0, monitor_from=0, monitor_count=-1): if step > data['current_step']: return data else: - data['monitors'] = micropsi_core.runtime.export_monitor_data(nodenet_uid, None, monitor_from=monitor_from, monitor_count=monitor_count) + monitor_data = {} + if from_step == 0 and count > 0: + count = min(nodenet.current_step + 1, count) + from_step = max(0, nodenet.current_step + 1 - count) + if from_step > 0: + if count < 1: + count = (nodenet.current_step + 1 - from_step) + elif from_step + count > nodenet.current_step: + from_step = max(nodenet.current_step + 1 - count, 0) + monitor_data = nodenet.construct_monitors_dict() + monitor_data.update(nodenet.construct_adhoc_monitors_dict()) + if from_step > 0 or count > 0: + for uid in monitor_data: + values = {} + i = from_step + while i < count + from_step: + values[i] = monitor_data[uid]['values'].get(i) + i += 1 + monitor_data[uid]['values'] = values + data['monitors'] = monitor_data + if with_recorders: + data['recorders'] = nodenet.construct_recorders_dict() return data + + +def add_gate_activation_recorder(nodenet_uid, group_definition, name, interval=1): + """ Adds an activation recorder to a group of nodes.""" + nodenet = micropsi_core.runtime.get_nodenet(nodenet_uid) + rec = nodenet.add_gate_activation_recorder(group_definition, name, interval) + return True, rec.uid + + +def add_node_activation_recorder(nodenet_uid, group_definition, name, interval=1): + """ Adds an activation recorder to a group of nodes.""" + nodenet = micropsi_core.runtime.get_nodenet(nodenet_uid) + rec = nodenet.add_node_activation_recorder(group_definition, name, interval) + return True, rec.uid + + +def add_linkweight_recorder(nodenet_uid, from_group_definition, to_group_definition, name, interval=1): + """ Adds a linkweight recorder to links between to groups.""" + nodenet = micropsi_core.runtime.get_nodenet(nodenet_uid) + rec = nodenet.add_linkweight_recorder(from_group_definition, to_group_definition, name, interval) + return True, rec.uid + + +def remove_recorder(nodenet_uid, recorder_uid): + """Deletes a recorder.""" + micropsi_core.runtime.get_nodenet(nodenet_uid).remove_recorder(recorder_uid) + return True + + +def clear_recorder(nodenet_uid, recorder_uid): + """Leaves the recorder intact, but deletes the current list of stored values.""" + micropsi_core.runtime.get_nodenet(nodenet_uid).get_recorder(recorder_uid).clear() + return True + + +def get_recorder_data(nodenet_uid): + return True, micropsi_core.runtime.get_nodenet(nodenet_uid).construct_recorders_dict() + + +def get_recorder(nodenet_uid, recorder_uid): + return micropsi_core.runtime.get_nodenet(nodenet_uid).get_recorder(recorder_uid) + + +def export_recorders(nodenet_uid, recorder_uids): + """ Returns a bytestream containing an npz export for the given recorders""" + import numpy as np + from io import BytesIO + nodenet = micropsi_core.runtime.get_nodenet(nodenet_uid) + data = {} + stream = BytesIO() + for uid in recorder_uids: + recorder = nodenet.get_recorder(uid) + data.update(recorder.export_data()) + np.savez(stream, **data) + return stream.getvalue() diff --git a/micropsi_core/_runtime_api_world.py b/micropsi_core/_runtime_api_world.py index c8a088ed..e0ddc5f6 100644 --- a/micropsi_core/_runtime_api_world.py +++ b/micropsi_core/_runtime_api_world.py @@ -6,6 +6,7 @@ import json import os +import logging import micropsi_core from micropsi_core import tools from micropsi_core.tools import Bunch @@ -23,10 +24,19 @@ def get_available_worlds(owner=None): owner (optional): when submitted, the list is filtered by this owner """ if owner: - return dict((uid, micropsi_core.runtime.worlds[uid]) for uid in micropsi_core.runtime.worlds if - micropsi_core.runtime.worlds[uid].owner == owner) + return dict((uid, micropsi_core.runtime.world_data[uid]) for uid in micropsi_core.runtime.world_data if + micropsi_core.runtime.world_data[uid].get('owner') is None or micropsi_core.runtime.world_data[uid].get('owner') == owner) else: - return micropsi_core.runtime.worlds + return micropsi_core.runtime.world_data + + +def get_world_uid_by_name(name): + """ Returns the uid of the world with the given name, or None if no matching world was found """ + for uid in micropsi_core.runtime.world_data: + if micropsi_core.runtime.world_data[uid]['name'] == name: + return uid + else: + return None def get_world_properties(world_uid): @@ -53,12 +63,19 @@ def get_worldadapters(world_uid, nodenet_uid=None): """ Returns the world adapters available in the given world. Provide an optional nodenet_uid of an agent in the given world to obtain datasources and datatargets for the agent's worldadapter""" data = {} + worlddata = micropsi_core.runtime.world_data[world_uid] + supported_worldadapters = get_world_class_from_name(worlddata.get('world_type', 'World')).get_supported_worldadapters() + for name, worldadapter in supported_worldadapters.items(): + data[name] = { + 'name': worldadapter.__name__, + 'description': worldadapter.__doc__, + 'config_options': worldadapter.get_config_options() + } if world_uid in micropsi_core.runtime.worlds: world = micropsi_core.runtime.worlds[world_uid] - for name, worldadapter in world.supported_worldadapters.items(): - data[name] = {'description': worldadapter.__doc__} if nodenet_uid and nodenet_uid in world.agents: agent = world.agents[nodenet_uid] + data[agent.__class__.__name__]['config'] = micropsi_core.runtime.nodenets[nodenet_uid].metadata['worldadapter_config'] data[agent.__class__.__name__]['datasources'] = agent.get_available_datasources() data[agent.__class__.__name__]['datatargets'] = agent.get_available_datatargets() return data @@ -74,9 +91,9 @@ def delete_worldobject(world_uid, object_uid): return micropsi_core.runtime.worlds[world_uid].delete_object(object_uid) -def add_worldobject(world_uid, type, position, orientation=0.0, name="", parameters=None, uid=None): +def add_worldobject(world_uid, type, position, orientation=0.0, name="", parameters=None): return micropsi_core.runtime.worlds[world_uid].add_object(type, position, orientation=orientation, name=name, - parameters=parameters, uid=uid) + parameters=parameters) def set_worldobject_properties(world_uid, uid, position=None, orientation=None, name=None, parameters=None): @@ -88,49 +105,64 @@ def set_worldagent_properties(world_uid, uid, position=None, orientation=None, n return micropsi_core.runtime.worlds[world_uid].set_agent_properties(uid, position, orientation, name, parameters) -def new_world(world_name, world_type, owner="", uid=None, config={}): +def new_world(world_name, world_type, owner="admin", config={}): """Creates a new world and registers it. Arguments: - world_name: the name of the world - world_type: the type of the world - owner (optional): the creator of this world + world_name (string): the name of the world + world_type (string): the type of the world + owner (string, optional): the creator of this world + config (dict, optional): configuration for the new world instance Returns world_uid if successful, None if failure """ - if uid is None: - uid = tools.generate_uid() + uid = tools.generate_uid() if world_type.startswith('Minecraft'): for uid in micropsi_core.runtime.worlds: if micropsi_core.runtime.worlds[uid].__class__.__name__.startswith('Minecraft'): - raise RuntimeError("Only one instance of a minecraft world is supported right now") + raise RuntimeError("Only one instance of a minecraft environment is supported right now") + + world_class = get_world_class_from_name(world_type) + + # default missing config values + for item in world_class.get_config_options(): + if item['name'] not in config: + config[item['name']] = item.get('default') filename = os.path.join(micropsi_core.runtime.PERSISTENCY_PATH, micropsi_core.runtime.WORLD_DIRECTORY, uid + ".json") micropsi_core.runtime.world_data[uid] = Bunch(uid=uid, name=world_name, world_type=world_type, filename=filename, - version=1, - owner=owner, config=config) - with open(filename, 'w+') as fp: + version=world.WORLD_VERSION, owner=owner, config=config) + with open(filename, 'w+', encoding="utf-8") as fp: fp.write(json.dumps(micropsi_core.runtime.world_data[uid], sort_keys=True, indent=4)) try: kwargs = micropsi_core.runtime.world_data[uid] - micropsi_core.runtime.worlds[uid] = get_world_class_from_name(world_type)(**kwargs) - except AttributeError: - return False, "World type unknown" + micropsi_core.runtime.worlds[uid] = world_class(**kwargs) + except Exception as e: + os.remove(filename) + raise e return True, uid def delete_world(world_uid): """Removes the world with the given uid from the server (and unloads it from memory if it is running.)""" - world = micropsi_core.runtime.worlds[world_uid] - for uid in list(world.agents.keys()): - world.unregister_nodenet(uid) - micropsi_core.runtime.nodenets[uid].worldadapter_instance = None - micropsi_core.runtime.nodenets[uid].world = None - micropsi_core.runtime.worlds[world_uid].__del__() - del micropsi_core.runtime.worlds[world_uid] + + if world_uid not in micropsi_core.runtime.world_data: + raise KeyError("Environment not found") + + # remove a running instance if there should be one + if world_uid in micropsi_core.runtime.worlds: + world = micropsi_core.runtime.worlds[world_uid] + for uid in list(world.agents.keys()): + world.unregister_nodenet(uid) + micropsi_core.runtime.nodenets[uid].worldadapter_instance = None + micropsi_core.runtime.nodenets[uid].world = None + micropsi_core.runtime.worlds[world_uid].__del__() + del micropsi_core.runtime.worlds[world_uid] + + # delete metadata os.remove(micropsi_core.runtime.world_data[world_uid].filename) del micropsi_core.runtime.world_data[world_uid] return True @@ -139,51 +171,98 @@ def delete_world(world_uid): def get_world_view(world_uid, step): """Returns the current state of the world for UI purposes, if current step is newer than the supplied one.""" if world_uid not in micropsi_core.runtime.worlds: - raise KeyError("World not found") + raise KeyError("Environment not found") if world_uid in micropsi_core.runtime.MicropsiRunner.last_world_exception: e = micropsi_core.runtime.MicropsiRunner.last_world_exception[world_uid] del micropsi_core.runtime.MicropsiRunner.last_world_exception[world_uid] - raise Exception("Error while stepping world").with_traceback(e[2]) from e[1] + raise Exception("Error while stepping environment").with_traceback(e[2]) from e[1] if step <= micropsi_core.runtime.worlds[world_uid].current_step: return micropsi_core.runtime.worlds[world_uid].get_world_view(step) return {} -def set_world_properties(world_uid, world_name=None, owner=None): +def set_world_properties(world_uid, world_name=None, owner=None, config=None): """Sets the supplied parameters (and only those) for the world with the given uid.""" - if world_uid not in micropsi_core.runtime.worlds: - raise KeyError("World not found") - micropsi_core.runtime.worlds[world_uid].name = world_name - micropsi_core.runtime.worlds[world_uid].owner = owner + if world_uid not in micropsi_core.runtime.world_data: + raise KeyError("Environment not found") + + if world_name is not None: + micropsi_core.runtime.world_data[world_uid]['name'] = world_name + if owner is not None: + micropsi_core.runtime.world_data[world_uid]['owner'] = owner + if config is not None: + micropsi_core.runtime.world_data[world_uid]['config'].update(config) + + filename = os.path.join(micropsi_core.runtime.PERSISTENCY_PATH, micropsi_core.runtime.WORLD_DIRECTORY, world_uid) + with open(filename + '.json', 'w+', encoding="utf-8") as fp: + fp.write(json.dumps(micropsi_core.runtime.world_data[world_uid], sort_keys=True, indent=4)) + + # if this world is running, revert to new settings and re-register agents + if world_uid in micropsi_core.runtime.worlds: + + agent_data = {} + for uid, net in micropsi_core.runtime.nodenets.items(): + if net.world == world_uid: + agent_data[uid] = { + 'nodenet_name': net.name, + 'worldadapter': net.worldadapter, + 'config': net.metadata['worldadapter_config'] + } + + micropsi_core.runtime.revert_world(world_uid) + # re-register all agents: + for uid, data in agent_data.items(): + result, worldadapter_instance = micropsi_core.runtime.worlds[world_uid].register_nodenet(data.pop('worldadapter'), uid, **data) + if result: + micropsi_core.runtime.nodenets[uid].worldadapter_instance = worldadapter_instance + else: + micropsi_core.runtime.nodenets[uid].worldadapter_instance = None + logging.getLogger("system").warning("Could not spawn agent %s in environment %s" % (uid, world_uid)) return True def set_world_data(world_uid, data): """ Sets some data for the world. Whatever the world supports""" if world_uid not in micropsi_core.runtime.worlds: - raise KeyError("World not found") + raise KeyError("Environment not found") micropsi_core.runtime.worlds[world_uid].set_user_data(data) return True def revert_world(world_uid): """Reverts the world to the last saved state.""" - data = micropsi_core.runtime.world_data[world_uid] + unload_world(world_uid) + load_world(world_uid) + return True + + +def unload_world(world_uid): if world_uid in micropsi_core.runtime.worlds: micropsi_core.runtime.worlds[world_uid].__del__() del micropsi_core.runtime.worlds[world_uid] - if data.get('world_type'): - micropsi_core.runtime.worlds[world_uid] = get_world_class_from_name(data.world_type)(**data) - else: - micropsi_core.runtime.worlds[world_uid] = world.World(**data) return True +def load_world(world_uid): + if world_uid not in micropsi_core.runtime.worlds: + if world_uid in micropsi_core.runtime.world_data: + data = micropsi_core.runtime.world_data[world_uid] + if "world_type" in data: + try: + micropsi_core.runtime.worlds[world_uid] = get_world_class_from_name(data.world_type)(**data) + except Exception as e: + logging.getLogger("system").error("Could not load world %s: %s - %s" % (data.world_type, e.__class__.__name__, str(e))) + else: + micropsi_core.runtime.worlds[world_uid] = world.World(**data) + return micropsi_core.runtime.worlds.get(world_uid) + + def save_world(world_uid): """Stores the world state on the server.""" - with open(os.path.join(micropsi_core.runtime.PERSISTENCY_PATH, micropsi_core.runtime.WORLD_DIRECTORY, - world_uid) + '.json', 'w+') as fp: - fp.write(json.dumps(micropsi_core.runtime.worlds[world_uid].data, sort_keys=True, indent=4)) + data = micropsi_core.runtime.worlds[world_uid].data + filename = os.path.join(micropsi_core.runtime.PERSISTENCY_PATH, micropsi_core.runtime.WORLD_DIRECTORY, world_uid) + with open(filename + '.json', 'w+', encoding="utf-8") as fp: + fp.write(json.dumps(data, sort_keys=True, indent=4)) return True @@ -199,12 +278,12 @@ def import_world(worlddata, owner=None): data['uid'] = tools.generate_uid() else: if data['uid'] in micropsi_core.runtime.worlds: - raise RuntimeError("A world with this ID already exists.") + raise RuntimeError("An environment with this ID already exists.") if owner is not None: data['owner'] = owner filename = os.path.join(micropsi_core.runtime.PERSISTENCY_PATH, micropsi_core.runtime.WORLD_DIRECTORY, data['uid'] + '.json') data['filename'] = filename - with open(filename, 'w+') as fp: + with open(filename, 'w+', encoding="utf-8") as fp: fp.write(json.dumps(data)) micropsi_core.runtime.world_data[data['uid']] = micropsi_core.runtime.parse_definition(data, filename) micropsi_core.runtime.worlds[data['uid']] = get_world_class_from_name( @@ -213,16 +292,23 @@ def import_world(worlddata, owner=None): return data['uid'] -def get_world_class_from_name(world_type): +def get_world_class_from_name(world_type, case_sensitive=True): """Returns the class from a world type, if it is known""" from micropsi_core.world.world import World - - worldclasses = {cls.__name__: cls for cls in tools.itersubclasses(World)} - return worldclasses.get(world_type, World) + if case_sensitive: + return micropsi_core.runtime.world_classes[world_type] + else: + for key in micropsi_core.runtime.world_classes: + if key.lower() == world_type.lower(): + return micropsi_core.runtime.world_classes[key] def get_available_world_types(): """Returns a mapping of the available world type names to their classes""" - import importlib - from micropsi_core.world.world import World - return dict((cls.__name__, cls) for cls in tools.itersubclasses(vars()['World'])) + data = {} + for name, cls in micropsi_core.runtime.world_classes.items(): + data[name] = { + 'class': cls, + 'config': cls.get_config_options(), + } + return data diff --git a/micropsi_core/benchmark_system.py b/micropsi_core/benchmark_system.py new file mode 100644 index 00000000..6c5349df --- /dev/null +++ b/micropsi_core/benchmark_system.py @@ -0,0 +1,73 @@ +#!/usr/bin/env python3 + + +def benchmark_system(n=1000, repeat=100): + + import numpy as np + import theano + import scipy + import timeit + + result = [""] + + result.append("numpy version: %s" % np.__version__) + result.append("scipy version: %s" % scipy.__version__) + result.append("theano version: %s" % theano.__version__) + result.append("") + result.append("theano device: %s" % theano.config.device) + result.append("theano blas: %s" % theano.config.blas.ldflags) + result.append("") + + # numpy dot + setup = "import numpy as np; x = np.random.random(({0}, {0})).astype(np.float32)".format(n) + statement = "np.dot(x, x.T)" + timer = timeit.Timer(statement, setup=setup) + t = timer.timeit(repeat) / repeat + f = 2 * n ** 3 / t / 1e9 + result.append("numpy dot %.4f seconds; flop rate = %.2f Gflops/s" % (t, f)) + + # scipy dot + setup = "import scipy; import numpy as np; x = np.random.random(({0}, {0})).astype(np.float32)".format(n) + statement = "scipy.dot(x, x.T)" + timer = timeit.Timer(statement, setup=setup) + t = timer.timeit(repeat) / repeat + f = 2 * n ** 3 / t / 1e9 + result.append("scipy dot %.4f seconds; flop rate = %.2f Gflops/s" % (t, f)) + + # Fortran dot + setup = "from scipy import linalg; import numpy as np; x = np.random.random(({0}, {0})).astype(np.float32)".format(n) + statement = "linalg.blas.dgemm(1.0, x, x.T)" + timer = timeit.Timer(statement, setup=setup) + t = timer.timeit(repeat) / repeat + f = 2 * n ** 3 / t / 1e9 + result.append("scipy dgemm %.4f seconds; flop rate = %.2f Gflops/s" % (t, f)) + + # theano dot + setup = "import theano; import theano.tensor as T; x = T.matrix(); \ + dot = x.dot(x.T); func = theano.function([x], dot); \ + import numpy as np; x = np.random.random(({0}, {0})).astype(np.float32)".format(n) + statement = "func(x)" + timer = timeit.Timer(statement, setup=setup) + t = timer.timeit(repeat) / repeat + f = 2 * n ** 3 / t / 1e9 + result.append("theano dot %.4f seconds; flop rate = %.2f Gflops/s" % (t, f)) + + # theano using a shared variable + setup = "import theano; import numpy as np; x = np.random.random(({0}, {0})).astype(np.float32); \ + X = theano.shared(x); z = theano.tensor.dot(X, X.T); f = theano.function([], z)".format(n) + statement = "f()" + timer = timeit.Timer(statement, setup=setup) + # theano_times_list = timer.repeat(num_repeats, 1) + t = timer.timeit(repeat) / repeat + f = 2 * n ** 3 / t / 1e9 + result.append("theano shared %.4f seconds; flop rate = %.2f Gflops/s" % (t, f)) + + result.append("") + + return "\n".join(result) + + +if __name__ == "__main__": # pragma: no cover + + results = benchmark_system() + print(results) diff --git a/micropsi_core/config.py b/micropsi_core/config.py index e5d62559..1c2afd06 100644 --- a/micropsi_core/config.py +++ b/micropsi_core/config.py @@ -58,7 +58,9 @@ def __init__(self, config_path="config-data.json", auto_save=True): self.key = absolute_path # set up persistence - os.makedirs(os.path.dirname(config_path), exist_ok=True) + dirpath = os.path.dirname(config_path) + if not os.path.isdir(dirpath): + os.makedirs(dirpath, exist_ok=True) self.config_file_name = config_path self.auto_save = auto_save @@ -74,18 +76,18 @@ def __del__(self): def load_configs(self): """load configuration data""" try: - with open(self.config_file_name) as file: + with open(self.config_file_name, encoding="utf-8") as file: self.data = json.load(file) return True except ValueError: - logging.getLogger("system").warn("Could not read config data at %s" % self.config_file_name) + logging.getLogger("system").warning("Could not read config data at %s" % self.config_file_name) except IOError: logging.getLogger("system").info("No readable config data file, attempting to create one") return False def save_configs(self): """saves the config data to a file""" - with open(self.config_file_name, mode='w+') as file: + with open(self.config_file_name, mode='w+', encoding="utf-8") as file: json.dump(self.data, file, indent=4) def __setitem__(self, key, value): diff --git a/micropsi_core/emoexpression.py b/micropsi_core/emoexpression.py index d26d95f7..166b59ae 100644 --- a/micropsi_core/emoexpression.py +++ b/micropsi_core/emoexpression.py @@ -22,8 +22,8 @@ def calc_emoexpression_parameters(nodenet): integrity = 1 if nodenet.world is not None and "health" in nodenet.worldadapter_instance.get_available_datasources(): - if nodenet.worldadapter_instance.get_datasource("health"): - integrity = nodenet.worldadapter_instance.get_datasource("health") + if nodenet.worldadapter_instance.get_datasource_value("health"): + integrity = nodenet.worldadapter_instance.get_datasource_value("health") exp_pain = 1 - integrity exp_activation = emo_activation diff --git a/micropsi_core/micropsi_logger.py b/micropsi_core/micropsi_logger.py index 2d0716e3..59c49704 100644 --- a/micropsi_core/micropsi_logger.py +++ b/micropsi_core/micropsi_logger.py @@ -12,6 +12,12 @@ MAX_RECORDS_PER_STORAGE = 1000 +# optionally color log console log output +try: + import coloredlogs +except ImportError: + coloredlogs = None + class RecordWebStorageHandler(logging.Handler): @@ -77,6 +83,8 @@ def __init__(self, default_logging_levels={}, log_to_file=False): datefmt='%d.%m. %H:%M:%S' ) + logging.getLogger().handlers = [] + self.log_to_file = log_to_file self.filehandler = None if log_to_file: @@ -85,6 +93,7 @@ def __init__(self, default_logging_levels={}, log_to_file=False): self.filehandler = logging.FileHandler(self.log_to_file, mode='a') formatter = logging.Formatter(self.default_format) self.filehandler.setFormatter(formatter) + self.filehandler.set_name('filehandler') self.register_logger("system", self.logging_levels.get(default_logging_levels.get('system', {}), logging.WARNING)) self.register_logger("world", self.logging_levels.get(default_logging_levels.get('world', {}), logging.WARNING)) @@ -92,15 +101,17 @@ def __init__(self, default_logging_levels={}, log_to_file=False): def register_logger(self, name, level): self.loggers[name] = logging.getLogger(name) self.loggers[name].setLevel(level) + self.loggers[name].propagate = False self.record_storage[name] = [] self.handlers[name] = RecordWebStorageHandler(self.record_storage, name) - formatter = logging.Formatter(self.default_format) self.handlers[name].setFormatter(formatter) logging.getLogger(name).addHandler(self.handlers[name]) if self.filehandler: logging.getLogger(name).addHandler(self.filehandler) self.loggers[name].debug("Logger %s ready" % name) + if coloredlogs is not None: + coloredlogs.install(logger=self.loggers[name], level=level) def unregister_logger(self, name): logging.getLogger(name).removeHandler(self.handlers[name]) diff --git a/micropsi_core/nodenet/dict_engine/dict_link.py b/micropsi_core/nodenet/dict_engine/dict_link.py index dc13ece4..d3102c33 100644 --- a/micropsi_core/nodenet/dict_engine/dict_link.py +++ b/micropsi_core/nodenet/dict_engine/dict_link.py @@ -24,10 +24,6 @@ class DictLink(Link): def weight(self): return self.__weight - @property - def certainty(self): - return self.__certainty - @property def source_node(self): return self.__source_node @@ -44,16 +40,16 @@ def target_node(self): def target_slot(self): return self.__target_slot - def __init__(self, source_node, source_gate_name, target_node, target_slot_name, weight=1, certainty=1): + def __init__(self, source_node, source_gate_name, target_node, target_slot_name, weight=1): """create a link between the source_node and the target_node, from the source_gate to the target_slot. Note: you should make sure that no link between source and gate exists. Attributes: weight (optional): the weight of the link (default is 1) """ - self.link(source_node, source_gate_name, target_node, target_slot_name, weight, certainty) + self.link(source_node, source_gate_name, target_node, target_slot_name, weight) - def link(self, source_node, source_gate_name, target_node, target_slot_name, weight=1, certainty=1): + def link(self, source_node, source_gate_name, target_node, target_slot_name, weight=1): """link between source and target nodes, from a gate to a slot. You may call this function to change the connections of an existing link. If the link is already @@ -64,7 +60,6 @@ def link(self, source_node, source_gate_name, target_node, target_slot_name, wei self.__source_gate = source_node.get_gate(source_gate_name) self.__target_slot = target_node.get_slot(target_slot_name) self.__weight = weight - self.__certainty = certainty self.__source_gate._register_outgoing(self) self.__target_slot._register_incoming(self) @@ -75,6 +70,5 @@ def remove(self): self.__source_gate._unregister_outgoing(self) self.__target_slot._unregister_incoming(self) - def _set_weight(self, weight, certainty=1): + def _set_weight(self, weight): self.__weight = weight - self.__certainty = certainty diff --git a/micropsi_core/nodenet/dict_engine/dict_netentity.py b/micropsi_core/nodenet/dict_engine/dict_netentity.py index ac0cbaac..ee8b5878 100644 --- a/micropsi_core/nodenet/dict_engine/dict_netentity.py +++ b/micropsi_core/nodenet/dict_engine/dict_netentity.py @@ -17,7 +17,6 @@ class NetEntity(object): uid: the unique identifier of the net entity index: an attempt at creating an ordering criterion for net entities name: a human readable name (optional) - position: a pair of coordinates on the screen nodenet: the node net in which the entity resides parent_nodespace: the node space this entity is contained in """ @@ -38,17 +37,6 @@ def index(self): def index(self, index): self.__index = index - @property - def position(self): - return self.__position - - @position.setter - def position(self, position): - position = list(position) - position = (position + [0] * 3)[:3] - self.__position = position - self.last_changed = self.nodenet.current_step - @property def name(self): return self.__name @@ -61,21 +49,19 @@ def name(self, name): def parent_nodespace(self): return self.__parent_nodespace - def __init__(self, nodenet, parent_nodespace, position, name="", entitytype="abstract_entities", + def __init__(self, nodenet, parent_nodespace, name="", entitytype="abstract_entities", uid=None, index=None): - """create a net entity at a certain position and in a given node space""" + """create a net entity in a given node space""" self.__uid = None self.__index = 0 self.__name = None self.__parent_nodespace = None - self.__position = None self.uid = uid or micropsi_core.tools.generate_uid() self.nodenet = nodenet self.index = index or len(nodenet.get_node_uids()) + len(nodenet.get_nodespace_uids()) self.entitytype = entitytype self.name = name - self.position = position if parent_nodespace: self.__parent_nodespace = parent_nodespace nodespace = self.nodenet.get_nodespace(parent_nodespace) diff --git a/micropsi_core/nodenet/dict_engine/dict_node.py b/micropsi_core/nodenet/dict_engine/dict_node.py index e5ea9934..fb7ac59a 100644 --- a/micropsi_core/nodenet/dict_engine/dict_node.py +++ b/micropsi_core/nodenet/dict_engine/dict_node.py @@ -11,8 +11,6 @@ """ -import copy - from micropsi_core.nodenet.node import Node, Gate, Slot from .dict_link import DictLink from micropsi_core.nodenet.dict_engine.dict_netentity import NetEntity @@ -22,9 +20,6 @@ __date__ = '09.05.12' -emptySheafElement = dict(uid="default", name="default", activation=0) - - class DictNode(NetEntity, Node): """A net entity with slots and gates and a node function. @@ -40,97 +35,65 @@ class DictNode(NetEntity, Node): """ @property - def activation(self): - return self.sheaves['default']['activation'] + def position(self): + return self.__position + + @position.setter + def position(self, position): + position = list(position) + position = (position + [0] * 3)[:3] + self.__position = position + self.last_changed = self.nodenet.current_step @property - def activations(self): - return dict((k, v['activation']) for k, v in self.sheaves.items()) + def activation(self): + return self.__activation @activation.setter def activation(self, activation): - self.set_sheaf_activation(activation) - - def set_sheaf_activation(self, activation, sheaf="default"): - sheaves_to_calculate = self.get_sheaves_to_calculate() - if sheaf not in sheaves_to_calculate: - raise "Sheaf " + sheaf + " can not be set as it hasn't been propagated to any slot" - - if activation is None: - activation = 0 - - self.sheaves[sheaf]['activation'] = float(activation) - if 'gen' in self.nodetype.gatetypes: - self.set_gate_activation('gen', activation, sheaf) + #activation_to_set = float(activation) + #gengate = self.get_gate('gen') + #if gengate is not None: + # activation_to_set = gengate.gate_function(float(activation)) + #self.__activation = activation_to_set + self.__activation = float(activation) + gengate = self.get_gate('gen') + if gengate is not None: + gengate.activation = float(activation) def __init__(self, nodenet, parent_nodespace, position, state=None, activation=0, - name="", type="Concept", uid=None, index=None, parameters=None, gate_parameters=None, gate_activations=None, gate_functions=None, **_): - if not gate_parameters: - gate_parameters = {} + name="", type="Concept", uid=None, index=None, parameters=None, gate_activations=None, gate_configuration=None, **_): if nodenet.is_node(uid): raise KeyError("Node with uid %s already exists" % uid) - Node.__init__(self, type, nodenet.get_nodetype(type)) + Node.__init__(self, nodenet, type, nodenet.get_nodetype(type)) - NetEntity.__init__(self, nodenet, parent_nodespace, position, + NetEntity.__init__(self, nodenet, parent_nodespace, name=name, entitytype="nodes", uid=uid, index=index) - self.__non_default_gate_parameters = {} + self.position = position self.__state = {} + self.__activation = 0 self.__gates = {} self.__slots = {} - self.__gatefunctions = {} - if gate_functions is None: - gate_functions = {} + self._gate_configuration = gate_configuration or {} + self.__parameters = dict((key, self.nodetype.parameter_defaults.get(key)) for key in self.nodetype.parameters) if parameters is not None: for key in parameters: - if parameters[key] is not None: + if parameters[key] is not None and key in self.nodetype.parameters: self.set_parameter(key, parameters[key]) - for gate_name in gate_parameters: - for key in gate_parameters[gate_name]: - if gate_parameters[gate_name][key] != self.nodetype.gate_defaults[gate_name].get(key, None): - if gate_name not in self.__non_default_gate_parameters: - self.__non_default_gate_parameters[gate_name] = {} - self.__non_default_gate_parameters[gate_name][key] = gate_parameters[gate_name][key] - - gate_parameters = copy.deepcopy(self.nodetype.gate_defaults) - for gate_name in gate_parameters: - if gate_name in self.__non_default_gate_parameters: - gate_parameters[gate_name].update(self.__non_default_gate_parameters[gate_name]) - - gate_parameters_for_validation = copy.deepcopy(gate_parameters) - for gate_name in gate_parameters_for_validation: - for key in gate_parameters_for_validation[gate_name]: - if key in self.nodetype.gate_defaults: - try: - gate_parameters[gate_name][key] = float(gate_parameters[gate_name][key]) - except: - self.logger.warn('Invalid gate parameter value for gate %s, param %s, node %s' % (gate_name, key, self.uid)) - gate_parameters[gate_name][key] = self.nodetype.gate_defaults[gate_name].get(key, 0) - else: - gate_parameters[gate_name][key] = float(gate_parameters[gate_name][key]) - for gate in self.nodetype.gatetypes: - if gate not in gate_functions: - self.__gatefunctions[gate] = gatefunctions.identity - else: - self.__gatefunctions[gate] = getattr(gatefunctions, gate_functions[gate]) - if gate_activations is None or gate not in gate_activations: - sheaves_to_use = None - else: - sheaves_to_use = gate_activations[gate] - self.__gates[gate] = DictGate(gate, self, sheaves=sheaves_to_use, parameters=gate_parameters.get(gate)) + self.__gates[gate] = DictGate(gate, self) for slot in self.nodetype.slottypes: self.__slots[slot] = DictSlot(slot, self) if state: self.__state = state nodenet._register_node(self) - self.sheaves = {"default": emptySheafElement.copy()} self.activation = activation def node_function(self): @@ -148,45 +111,21 @@ def node_function(self): # call nodefunction of my node type if self.nodetype and self.nodetype.nodefunction is not None: - sheaves_to_calculate = self.get_sheaves_to_calculate() - - # find node activation to carry over - node_activation_to_carry_over = {} - for id in self.sheaves: - if id in sheaves_to_calculate: - node_activation_to_carry_over[id] = self.sheaves[id] - # clear activation states for gatename in self.get_gate_types(): gate = self.get_gate(gatename) - gate.sheaves = {} - self.sheaves = {} - - # calculate activation states for all open sheaves - for sheaf_id in sheaves_to_calculate: - - # prepare sheaves - for gatename in self.get_gate_types(): - gate = self.get_gate(gatename) - gate.sheaves[sheaf_id] = sheaves_to_calculate[sheaf_id].copy() - if sheaf_id in node_activation_to_carry_over: - self.sheaves[sheaf_id] = node_activation_to_carry_over[sheaf_id].copy() - self.set_sheaf_activation(node_activation_to_carry_over[sheaf_id]['activation'], sheaf_id) - else: - self.sheaves[sheaf_id] = sheaves_to_calculate[sheaf_id].copy() - self.set_sheaf_activation(0, sheaf_id) - - # and actually calculate new values for them - for sheaf_id in sheaves_to_calculate: - - try: - self.nodetype.nodefunction(netapi=self.nodenet.netapi, node=self, sheaf=sheaf_id, **self.__parameters) - except Exception: - self.nodenet.is_active = False - self.activation = -1 - raise + gate.__activation = 0 + + #call node function + try: + params = self.clone_parameters() + self.nodetype.nodefunction(netapi=self.nodenet.netapi, node=self, **params) + except Exception: + self.nodenet.is_active = False + self.activation = -1 + raise else: - # default node function (only using the "default" sheaf) + # default node function if len(self.get_slot_types()): self.activation = sum([self.get_slot(slot).activation for slot in self.get_slot_types()]) if len(self.get_gate_types()): @@ -205,89 +144,46 @@ def get_slot(self, slotname): except KeyError: return None - def set_gate_activation(self, gatetype, activation, sheaf="default"): + def set_gate_activation(self, gatetype, activation): """ sets the activation of the given gate""" - activation = float(activation) + if gatetype == 'gen': + self.activation = float(activation) gate = self.get_gate(gatetype) if gate is not None: - gate.sheaves[sheaf]['activation'] = activation - - def get_sheaves_to_calculate(self): - sheaves_to_calculate = {} - for slotname in self.get_slot_types(): - for uid in self.get_slot(slotname).sheaves: - sheaves_to_calculate[uid] = self.get_slot(slotname).sheaves[uid].copy() - sheaves_to_calculate[uid]['activation'] = 0 - if 'default' not in sheaves_to_calculate: - sheaves_to_calculate['default'] = emptySheafElement.copy() - return sheaves_to_calculate - - def get_gate_parameters(self): - """Looks into the gates and returns gate parameters if these are defined""" - gate_parameters = {} - for gatetype in self.get_gate_types(): - if self.get_gate(gatetype).parameters: - gate_parameters[gatetype] = self.get_gate(gatetype).parameters - if len(gate_parameters): - return gate_parameters + gate.activation = activation + + def set_gate_configuration(self, gate_type, gatefunction, gatefunction_parameters={}): + gatefuncs = self.nodenet.get_available_gatefunctions() + if gatefunction == 'identity' or gatefunction is None: + self._gate_configuration.pop(gate_type, None) + elif gatefunction in gatefuncs: + for param, default in gatefuncs[gatefunction].items(): + if param not in gatefunction_parameters: + gatefunction_parameters[param] = default + self._gate_configuration[gate_type] = { + 'gatefunction': gatefunction, + 'gatefunction_parameters': gatefunction_parameters + } else: - return None + raise NameError("Unknown Gatefunction") - def clone_non_default_gate_parameters(self, gate_type=None): + def get_gate_configuration(self, gate_type=None): if gate_type is None: - return self.__non_default_gate_parameters.copy() - if gate_type not in self.__non_default_gate_parameters: - return None - return { - gate_type: self.__non_default_gate_parameters[gate_type].copy() - } - - def set_gate_parameter(self, gate_type, parameter, value): - if self.__non_default_gate_parameters is None: - self.__non_default_gate_parameters = {} - if parameter in self.nodetype.gate_defaults[gate_type]: - if value is None: - value = self.nodetype.gate_defaults[gate_type][parameter] - else: - value = float(value) - if value != self.nodetype.gate_defaults[gate_type][parameter]: - if gate_type not in self.__non_default_gate_parameters: - self.__non_default_gate_parameters[gate_type] = {} - self.__non_default_gate_parameters[gate_type][parameter] = value - elif parameter in self.__non_default_gate_parameters.get(gate_type, {}): - del self.__non_default_gate_parameters[gate_type][parameter] - self.get_gate(gate_type).parameters[parameter] = value - - def get_gatefunction(self, gate_type): - if self.get_gate(gate_type): - return self.__gatefunctions[gate_type] - raise KeyError("Wrong Gatetype") - - def get_gatefunction_name(self, gate_type): - if self.get_gate(gate_type): - return self.__gatefunctions[gate_type].__name__ - raise KeyError("Wrong Gatetype") - - def set_gatefunction_name(self, gate_type, gatefunction): - if self.get_gate(gate_type): - if gatefunction is None: - self.__gatefunctions[gate_type] = gatefunctions.identity - elif hasattr(gatefunctions, gatefunction): - self.__gatefunctions[gate_type] = getattr(gatefunctions, gatefunction) + return self._gate_configuration + elif self.get_gate(gate_type): + if gate_type in self._gate_configuration: + return self._gate_configuration[gate_type] else: - raise NameError("Unknown Gatefunction") + return { + 'gatefunction': 'identity', + 'gatefunction_parameters': {} + } else: raise KeyError("Wrong Gatetype") - def get_gatefunction_names(self): - ret = {} - for key in self.__gatefunctions: - ret[key] = self.__gatefunctions[key].__name__ - return ret - def reset_slots(self): for slottype in self.get_slot_types(): - self.get_slot(slottype).sheaves = {"default": emptySheafElement.copy()} + self.get_slot(slottype).reset() def get_parameter(self, parameter): if parameter in self.__parameters: @@ -308,14 +204,14 @@ def set_parameter(self, parameter, value): value = self.nodetype.parameter_defaults[parameter] else: value = None - self.__parameters[parameter] = value + if parameter in self.nodetype.parameters: + self.__parameters[parameter] = value + else: + raise NameError("Parameter %s not defined for node %s" % (parameter, str(self))) def clone_parameters(self): return self.__parameters.copy() - def clone_sheaves(self): - return self.sheaves.copy() - def get_state(self, state_element): if state_element in self.__state: return self.__state[state_element] @@ -328,10 +224,10 @@ def set_state(self, state_element, value): def clone_state(self): return self.__state.copy() - def link(self, gate_name, target_node_uid, slot_name, weight=1, certainty=1): - """Ensures a link exists with the given parameters and returns it - Only one link between a node/gate and a node/slot can exist, its parameters will be updated with the - given parameters if a link existed prior to the call of this method + def link(self, gate_name, target_node_uid, slot_name, weight=1): + """Ensures a link exists with the given weight and returns it + Only one link between a node/gate and a node/slot can exist, its weight will be updated with the + given value if a link existed prior to the call of this method Will return None if no such link can be created. """ @@ -342,8 +238,8 @@ def link(self, gate_name, target_node_uid, slot_name, weight=1, certainty=1): self.last_changed = self.nodenet.current_step target.last_changed = self.nodenet.current_step - self.nodenet.get_nodespace(self.parent_nodespace).content_last_changed = self.nodenet.current_step - self.nodenet.get_nodespace(target.parent_nodespace).content_last_changed = self.nodenet.current_step + self.nodenet.get_nodespace(self.parent_nodespace).contents_last_changed = self.nodenet.current_step + self.nodenet.get_nodespace(target.parent_nodespace).contents_last_changed = self.nodenet.current_step if slot_name not in target.get_slot_types(): raise ValueError("Node %s has no slot %s" % (target_node_uid, slot_name)) @@ -359,13 +255,13 @@ def link(self, gate_name, target_node_uid, slot_name, weight=1, certainty=1): if link is None: link = DictLink(self, gate_name, target, slot_name) - link._set_weight(weight, certainty) + link._set_weight(weight) return link def unlink_completely(self): """Deletes all links originating from this node or ending at this node""" self.last_changed = self.nodenet.current_step - self.nodenet.get_nodespace(self.parent_nodespace).content_last_changed = self.nodenet.current_step + self.nodenet.get_nodespace(self.parent_nodespace).contents_last_changed = self.nodenet.current_step links_to_delete = set() for gate_name_candidate in self.get_gate_types(): @@ -376,12 +272,12 @@ def unlink_completely(self): links_to_delete.add(link_candidate) for link in links_to_delete: link.target_node.last_changed = self.nodenet.current_step - self.nodenet.get_nodespace(link.target_node.parent_nodespace).content_last_changed = self.nodenet.current_step + self.nodenet.get_nodespace(link.target_node.parent_nodespace).contents_last_changed = self.nodenet.current_step link.remove() def unlink(self, gate_name=None, target_node_uid=None, slot_name=None): self.last_changed = self.nodenet.current_step - self.nodenet.get_nodespace(self.parent_nodespace).content_last_changed = self.nodenet.current_step + self.nodenet.get_nodespace(self.parent_nodespace).contents_last_changed = self.nodenet.current_step links_to_delete = set() for gate_name_candidate in self.get_gate_types(): @@ -392,7 +288,7 @@ def unlink(self, gate_name=None, target_node_uid=None, slot_name=None): links_to_delete.add(link_candidate) for link in links_to_delete: link.target_node.last_changed = self.nodenet.current_step - self.nodenet.get_nodespace(link.target_node.parent_nodespace).content_last_changed = self.nodenet.current_step + self.nodenet.get_nodespace(link.target_node.parent_nodespace).contents_last_changed = self.nodenet.current_step link.remove() @@ -402,7 +298,6 @@ class DictGate(Gate): Attributes: type: a string that determines the type of the gate node: the parent node of the gate - sheaves: a dict of sheaves this gate initially has to support parameters: a dictionary of values used by the gate function """ @@ -420,30 +315,23 @@ def empty(self): @property def activation(self): - return self.sheaves['default']['activation'] + return self.__activation - @property - def activations(self): - return dict((k, v['activation']) for k, v in self.sheaves.items()) + @activation.setter + def activation(self, activation): + self.__activation = activation - def __init__(self, type, node, sheaves=None, parameters=None): + def __init__(self, type, node): """create a gate. Parameters: type: a string that refers to a node type node: the parent node - parameters: an optional dictionary of parameters for the gate function """ self.__type = type self.__node = node - if sheaves is None: - self.sheaves = {"default": emptySheafElement.copy()} - else: - self.sheaves = {} - for key in sheaves: - self.sheaves[key] = dict(uid=sheaves[key]['uid'], name=sheaves[key]['name'], activation=sheaves[key]['activation']) + self.__activation = 0 self.__outgoing = {} - self.parameters = parameters.copy() self.monitor = None def get_links(self): @@ -458,10 +346,7 @@ def _register_outgoing(self, link): def _unregister_outgoing(self, link): del self.__outgoing[link.signature] - def clone_sheaves(self): - return self.sheaves.copy() - - def gate_function(self, input_activation, sheaf="default"): + def gate_function(self, input_activation): """This function sets the activation of the gate. The gate function should be called by the node function, and can be replaced by different functions @@ -478,41 +363,16 @@ def gate_function(self, input_activation, sheaf="default"): else: gate_factor = 1.0 if gate_factor == 0.0: - self.sheaves[sheaf]['activation'] = 0 + self.__activation = 0.0 return 0 # if the gate is closed, we don't need to execute the gate function - gatefunction = self.__node.get_gatefunction(self.__type) - - if gatefunction: - activation = gatefunction(input_activation, self.parameters.get('rho', 0), self.parameters.get('theta', 0)) - else: - activation = input_activation - - if activation * gate_factor < self.parameters['threshold']: - activation = 0 - else: - activation = activation * self.parameters["amplification"] * gate_factor - - activation = min(self.parameters["maximum"], max(self.parameters["minimum"], activation)) - - self.sheaves[sheaf]['activation'] = activation - - return activation + config = self.__node.get_gate_configuration(self.__type) - def open_sheaf(self, input_activation, sheaf="default"): - """This function opens a new sheaf and calls the gate function for the newly opened sheaf - """ - if sheaf is "default": - sheaf_uid_prefix = "default" + "-" - sheaf_name_prefix = "" - else: - sheaf_uid_prefix = sheaf + "-" - sheaf_name_prefix = self.sheaves[sheaf]['name'] + "-" + gatefunction = getattr(gatefunctions, config['gatefunction']) - new_sheaf = dict(uid=sheaf_uid_prefix + self.node.uid, name=sheaf_name_prefix + self.node.name, activation=0) - self.sheaves[new_sheaf['uid']] = new_sheaf + self.__activation = gate_factor * gatefunction(input_activation, **config['gatefunction_parameters']) - self.gate_function(input_activation, new_sheaf['uid']) + return self.__activation class DictSlot(Slot): @@ -540,11 +400,7 @@ def empty(self): @property def activation(self): - return self.sheaves['default']['activation'] - - @property - def activations(self): - return dict((k, v['activation']) for k, v in self.sheaves.items()) + return self.__activation def __init__(self, type, node): """create a slot. @@ -556,14 +412,18 @@ def __init__(self, type, node): self.__type = type self.__node = node self.__incoming = {} - self.sheaves = {"default": emptySheafElement.copy()} + self.__activation = 0 + + def add_activation(self, activation): + self.__activation += activation - def get_activation(self, sheaf="default"): + def reset(self): + self.__activation = 0 + + def get_activation(self): if len(self.__incoming) == 0: return 0 - if sheaf not in self.sheaves: - return 0 - return self.sheaves[sheaf]['activation'] + return self.__activation def get_links(self): return list(self.__incoming.values()) diff --git a/micropsi_core/nodenet/dict_engine/dict_nodenet.py b/micropsi_core/nodenet/dict_engine/dict_nodenet.py index 15e185b5..ecaebd83 100644 --- a/micropsi_core/nodenet/dict_engine/dict_nodenet.py +++ b/micropsi_core/nodenet/dict_engine/dict_nodenet.py @@ -5,7 +5,7 @@ import micropsi_core from micropsi_core.nodenet import monitor -from micropsi_core.nodenet.node import Nodetype +from micropsi_core.nodenet.node import Nodetype, FlowNodetype, HighdimensionalNodetype from micropsi_core.nodenet.nodenet import Nodenet, NODENET_VERSION from micropsi_core.nodenet.stepoperators import DoernerianEmotionalModulators from .dict_stepoperators import DictPropagate, DictCalculate @@ -14,10 +14,6 @@ import copy STANDARD_NODETYPES = { - "Nodespace": { - "name": "Nodespace" - }, - "Comment": { "name": "Comment", "symbol": "#", @@ -25,10 +21,10 @@ "shape": "Rectangle" }, - "Register": { - "name": "Register", + "Neuron": { + "name": "Neuron", "slottypes": ["gen"], - "nodefunction_name": "register", + "nodefunction_name": "neuron", "gatetypes": ["gen"] }, "Sensor": { @@ -37,10 +33,10 @@ "nodefunction_name": "sensor", "gatetypes": ["gen"] }, - "Actor": { - "name": "Actor", + "Actuator": { + "name": "Actuator", "parameters": ["datatarget"], - "nodefunction_name": "actor", + "nodefunction_name": "actuator", "slottypes": ["gen"], "gatetypes": ["gen"] }, @@ -54,21 +50,7 @@ "name": "Script", "slottypes": ["gen", "por", "ret", "sub", "sur"], "nodefunction_name": "script", - "gatetypes": ["gen", "por", "ret", "sub", "sur", "cat", "exp", "sym", "ref"], - "gate_defaults": { - "por": { - "threshold": -1 - }, - "ret": { - "threshold": -1 - }, - "sub": { - "threshold": -1 - }, - "sur": { - "threshold": -1 - } - } + "gatetypes": ["gen", "por", "ret", "sub", "sur", "cat", "exp", "sym", "ref"] }, "Pipe": { "name": "Pipe", @@ -95,34 +77,6 @@ "slottypes": ["gen", "por", "gin", "gou", "gfg"], "gatetypes": ["gen", "por", "gin", "gou", "gfg"], "nodefunction_name": "lstm", - "symbol": "◷", - "gate_defaults": { - "gen": { - "minimum": -1000, - "maximum": 1000, - "threshold": -1000 - }, - "por": { - "minimum": -1000, - "maximum": 1000, - "threshold": -1000 - }, - "gin": { - "minimum": -1000, - "maximum": 1000, - "threshold": -1000 - }, - "gou": { - "minimum": -1000, - "maximum": 1000, - "threshold": -1000 - }, - "gfg": { - "minimum": -1000, - "maximum": 1000, - "threshold": -1000 - } - } } } @@ -156,7 +110,7 @@ def engine(self): def current_step(self): return self._step - def __init__(self, name="", worldadapter="Default", world=None, owner="", uid=None, native_modules={}, use_modulators=True, worldadapter_instance=None): + def __init__(self, persistency_path, name="", worldadapter="Default", world=None, owner="", uid=None, native_modules={}, use_modulators=True, worldadapter_instance=None, version=None): """Create a new MicroPsi agent. Arguments: @@ -166,27 +120,22 @@ def __init__(self, name="", worldadapter="Default", world=None, owner="", uid=No uid (optional): unique handle of the agent; if none is given, it will be generated """ - super(DictNodenet, self).__init__(name, worldadapter, world, owner, uid, use_modulators=use_modulators, worldadapter_instance=worldadapter_instance) + super().__init__(persistency_path, name, worldadapter, world, owner, uid, native_modules=native_modules, use_modulators=use_modulators, worldadapter_instance=worldadapter_instance, version=version) + + self.nodetypes = {} + for type, data in STANDARD_NODETYPES.items(): + self.nodetypes[type] = Nodetype(nodenet=self, **data) self.stepoperators = [DictPropagate(), DictCalculate()] if self.use_modulators: self.stepoperators.append(DoernerianEmotionalModulators()) self.stepoperators.sort(key=lambda op: op.priority) - self._version = NODENET_VERSION # used to check compatibility of the node net data self._step = 0 self._nodes = {} self._nodespaces = {} - self._nodetypes = {} - for type, data in STANDARD_NODETYPES.items(): - self._nodetypes[type] = Nodetype(nodenet=self, **data) - - self._native_modules = {} - for type, data in native_modules.items(): - self._native_modules[type] = Nodetype(nodenet=self, **data) - self.nodegroups = {} self.initialize_nodenet({}) @@ -195,7 +144,6 @@ def get_data(self, **params): data = super().get_data(**params) data['nodes'] = self.construct_nodes_dict(**params) data['nodespaces'] = self.construct_nodespaces_dict("Root", transitive=True) - data['version'] = self._version data['modulators'] = self.construct_modulators_dict() return data @@ -204,7 +152,21 @@ def export_json(self): data['links'] = self.construct_links_list() return data - def get_nodes(self, nodespace_uids=[], include_links=True): + def get_links_for_nodes(self, node_uids): + source_nodes = [self.get_node(uid) for uid in node_uids] + links = {} + nodes = {} + for node in source_nodes: + nodelinks = node.get_associated_links() + for l in nodelinks: + links[l.signature] = l.get_data(complete=True) + if l.source_node.parent_nodespace != node.parent_nodespace: + nodes[l.source_node.uid] = l.source_node.get_data(include_links=False) + if l.target_node.parent_nodespace != node.parent_nodespace: + nodes[l.target_node.uid] = l.target_node.get_data(include_links=False) + return list(links.values()), nodes + + def get_nodes(self, nodespace_uids=[], include_links=True, links_to_nodespaces=[]): """ Returns a dict with contents for the given nodespaces """ @@ -246,17 +208,28 @@ def get_nodes(self, nodespace_uids=[], include_links=True): return data - def save(self, filename): - # dict_engine saves everything to json, just dump the json export - with open(filename, 'w+') as fp: - fp.write(json.dumps(self.export_json(), sort_keys=True, indent=4)) - if os.path.getsize(filename) < 100: - # kind of hacky, but we don't really know what was going on - raise RuntimeError("Error writing nodenet file") - - def load(self, filename): + def save(self, base_path=None, zipfile=None): + if base_path is None: + base_path = self.persistency_path + data = json.dumps(self.export_json(), sort_keys=True, indent=4) + if zipfile: + zipfile.writestr('nodenet.json', data) + else: + filename = os.path.join(base_path, 'nodenet.json') + # dict_engine saves everything to json, just dump the json export + with open(filename, 'w+', encoding="utf-8") as fp: + fp.write(data) + if os.path.getsize(filename) < 100: + # kind of hacky, but we don't really know what was going on + raise RuntimeError("Error writing nodenet file") + + def load(self): """Load the node net from a file""" # try to access file + if self._version != NODENET_VERSION: + self.logger.error("Wrong version of nodenet data in nodenet %s, cannot load." % self.uid) + return False + filename = os.path.join(self.persistency_path, 'nodenet.json') with self.netlock: initfrom = {} @@ -264,30 +237,34 @@ def load(self, filename): if os.path.isfile(filename): try: self.logger.info("Loading nodenet %s from file %s", self.name, filename) - with open(filename) as file: + with open(filename, encoding="utf-8") as file: initfrom.update(json.load(file)) except ValueError: - self.logger.warn("Could not read nodenet data") + self.logger.warning("Could not read nodenet data") return False except IOError: - self.logger.warn("Could not open nodenet file") + self.logger.warning("Could not open nodenet file") return False - if self._version == NODENET_VERSION: - self.initialize_nodenet(initfrom) - return True - else: - raise NotImplementedError("Wrong version of nodenet data, cannot import.") - - def remove(self, filename): - os.remove(filename) + self.initialize_nodenet(initfrom) + return True def reload_native_modules(self, native_modules): """ reloads the native-module definition, and their nodefunctions and afterwards reinstantiates the nodenet.""" - self._native_modules = {} + self.native_modules = {} for key in native_modules: - self._native_modules[key] = Nodetype(nodenet=self, **native_modules[key]) + if native_modules[key].get('engine', self.engine) == self.engine: + try: + if native_modules[key].get('flow_module'): + raise NotImplementedError("dict nodenet does not support flow modules") + elif native_modules[key].get('dimensionality'): + raise NotImplementedError("dict nodenet does not support highdimensional native modules") + else: + self.native_modules[key] = Nodetype(nodenet=self, **native_modules[key]) + except Exception as err: + self.logger.error("Can not instantiate node type %s: %s: %s" % (key, err.__class__.__name__, str(err))) + saved = self.export_json() self.clear() self.merge_data(saved, keep_uids=True) @@ -299,7 +276,6 @@ def initialize_nodespace(self, id, data): self.initialize_nodespace(data[id]['parent_nodespace'], data) self._nodespaces[id] = DictNodespace(self, data[id].get('parent_nodespace'), - data[id].get('position'), name=data[id].get('name', 'Root'), uid=id, index=data[id].get('index')) @@ -320,7 +296,10 @@ def initialize_nodenet(self, initfrom): # set up nodespaces; make sure that parent nodespaces exist before children are initialized self._nodespaces = {} - self._nodespaces["Root"] = DictNodespace(self, None, [0, 0, 0], name="Root", uid="Root") + self._nodespaces["Root"] = DictNodespace(self, None, name="Root", uid="Root") + + if 'current_step' in initfrom: + self._step = initfrom['current_step'] if len(initfrom) != 0: # now merge in all init data (from the persisted file typically) @@ -334,14 +313,10 @@ def construct_links_list(self): data.extend([l.get_data(complete=True) for l in node.get_gate(g).get_links()]) return data - def construct_nodes_dict(self, max_nodes=-1, **params): + def construct_nodes_dict(self, **params): data = {} - i = 0 for node_uid in self.get_node_uids(): - i += 1 data[node_uid] = self.get_node(node_uid).get_data(**params) - if max_nodes > 0 and i > max_nodes: - break return data def construct_nodespaces_dict(self, nodespace_uid, transitive=False): @@ -370,34 +345,10 @@ def construct_nodespaces_dict(self, nodespace_uid, transitive=False): def get_nodetype(self, type): """ Returns the nodetpype instance for the given nodetype or native_module or None if not found""" - if type in self._nodetypes: - return self._nodetypes[type] + if type in self.nodetypes: + return self.nodetypes[type] else: - return self._native_modules[type] - - def get_nodespace_data(self, nodespace_uid, include_links): - data = { - 'nodes': {}, - 'name': self.name, - 'is_active': self.is_active, - 'current_step': self.current_step, - 'nodespaces': self.construct_nodespaces_dict(nodespace_uid), - 'world': self.world, - 'worldadapter': self.worldadapter, - 'modulators': self.construct_modulators_dict() - } - followupnodes = [] - nodespace = self.get_nodespace(nodespace_uid) - for uid in nodespace.get_known_ids(entitytype="nodes"): - node = self.get_node(uid) - data['nodes'][uid] = node.get_data(include_links=include_links) - if include_links: - followupnodes.extend(node.get_associated_node_uids()) - if include_links: - for uid in followupnodes: - if uid not in data['nodes']: - data['nodes'][uid] = self.get_node(uid).get_data(include_links=include_links) - return data + return self.native_modules[type] def get_activation_data(self, nodespace_uids=None, rounded=1): activations = {} @@ -412,12 +363,17 @@ def get_activation_data(self, nodespace_uids=None, rounded=1): for uid in node_ids: node = self.get_node(uid) if rounded is None: - activations[uid] = [node.get_gate(gate_name).activation for gate_name in node.get_gate_types()] + act = [node.get_gate(gate_name).activation for gate_name in node.get_gate_types()] + if set(act) != {0}: + activations[uid] = act else: - activations[uid] = [round(node.get_gate(gate_name).activation, rounded) for gate_name in node.get_gate_types()] + act = [round(node.get_gate(gate_name).activation, rounded) for gate_name in node.get_gate_types()] + if set(act) != {0}: + activations[uid] = act return activations def delete_node(self, node_uid): + self.close_figures(node_uid) if node_uid in self._nodespaces: affected_entity_ids = self._nodespaces[node_uid].get_known_ids() for uid in affected_entity_ids: @@ -478,14 +434,10 @@ def merge_data(self, nodenet_data, keep_uids=False): newuid = uid data['uid'] = newuid uidmap[uid] = newuid - if data['type'] not in self._nodetypes and data['type'] not in self._native_modules: - self.logger.warn("Invalid nodetype %s for node %s" % (data['type'], uid)) - data['parameters'] = { - 'comment': 'There was a %s node here' % data['type'] - } - data['type'] = 'Comment' - data.pop('gate_parameters', '') + if data['type'] not in self.nodetypes and data['type'] not in self.native_modules: + self.logger.error("Invalid nodetype %s for node %s" % (data['type'], uid)) invalid_nodes.append(uid) + continue self._nodes[newuid] = DictNode(self, **data) # merge in links @@ -496,13 +448,16 @@ def merge_data(self, nodenet_data, keep_uids=False): for link in links: if link['source_node_uid'] in invalid_nodes or link['target_node_uid'] in invalid_nodes: continue - self.create_link( - uidmap[link['source_node_uid']], - link['source_gate_name'], - uidmap[link['target_node_uid']], - link['target_slot_name'], - link['weight'] - ) + try: + self.create_link( + uidmap[link['source_node_uid']], + link['source_gate_name'], + uidmap[link['target_node_uid']], + link['target_slot_name'], + link['weight'] + ) + except ValueError: + self.logger.warning("Invalid link data") for monitorid in nodenet_data.get('monitors', {}): data = nodenet_data['monitors'][monitorid] @@ -515,11 +470,7 @@ def merge_data(self, nodenet_data, keep_uids=False): mon = getattr(monitor, data['classname'])(self, **data) self._monitors[mon.uid] = mon else: - self.logger.warn('unknown classname for monitor: %s (uid:%s) ' % (data['classname'], monitorid)) - else: - # Compatibility mode - mon = monitor.NodeMonitor(self, name=data['node_name'], **data) - self._monitors[mon.uid] = mon + self.logger.warning('unknown classname for monitor: %s (uid:%s) ' % (data['classname'], monitorid)) def step(self): """perform a calculation step""" @@ -537,8 +488,9 @@ def step(self): break else: del self.deleted_items[i] + self.user_prompt_response = {} - def create_node(self, nodetype, nodespace_uid, position, name="", uid=None, parameters=None, gate_parameters=None): + def create_node(self, nodetype, nodespace_uid, position, name="", uid=None, parameters=None, gate_configuration=None): nodespace_uid = self.get_nodespace(nodespace_uid).uid node = DictNode( self, @@ -547,12 +499,12 @@ def create_node(self, nodetype, nodespace_uid, position, name="", uid=None, para type=nodetype, uid=uid, parameters=parameters, - gate_parameters=gate_parameters) + gate_configuration=gate_configuration) return node.uid - def create_nodespace(self, parent_uid, position, name="", uid=None, options=None): + def create_nodespace(self, parent_uid, name="", uid=None, options=None): parent_uid = self.get_nodespace(parent_uid).uid - nodespace = DictNodespace(self, parent_uid, position=position, name=name, uid=uid) + nodespace = DictNodespace(self, parent_uid, name=name, uid=uid) return nodespace.uid def get_node(self, uid): @@ -580,13 +532,11 @@ def is_node(self, uid): def is_nodespace(self, uid): return uid in self._nodespaces - def set_entity_positions(self, positions): + def set_node_positions(self, positions): """ Sets the position of nodes or nodespaces """ for uid in positions: if uid in self._nodes: self._nodes[uid].position = positions[uid] - elif uid in self._nodespaces: - self._nodespaces[uid].position = positions[uid] def get_nativemodules(self, nodespace=None): """Returns a dict of native modules. Optionally filtered by the given nodespace""" @@ -617,30 +567,30 @@ def get_sensors(self, nodespace=None, datasource=None): sensors[uid] = self._nodes[uid] return sensors - def get_actors(self, nodespace=None, datatarget=None): - """Returns a dict of all sensor nodes. Optionally filtered by the given nodespace""" + def get_actuators(self, nodespace=None, datatarget=None): + """Returns a dict of all actuator nodes. Optionally filtered by the given nodespace""" nodes = self._nodes if nodespace is None else self._nodespaces[nodespace].get_known_ids('nodes') - actors = {} + actuators = {} for uid in nodes: - if self._nodes[uid].type == 'Actor': + if self._nodes[uid].type == 'Actuator': if datatarget is None or self._nodes[uid].get_parameter('datatarget') == datatarget: - actors[uid] = self._nodes[uid] - return actors + actuators[uid] = self._nodes[uid] + return actuators - def set_link_weight(self, source_node_uid, gate_type, target_node_uid, slot_type, weight=1, certainty=1): + def set_link_weight(self, source_node_uid, gate_type, target_node_uid, slot_type, weight=1): """Set weight of the given link.""" source_node = self.get_node(source_node_uid) if source_node is None: return False - link = source_node.link(gate_type, target_node_uid, slot_type, weight, certainty) + link = source_node.link(gate_type, target_node_uid, slot_type, weight) if link is None: return False else: return True - def create_link(self, source_node_uid, gate_type, target_node_uid, slot_type, weight=1, certainty=1): + def create_link(self, source_node_uid, gate_type, target_node_uid, slot_type, weight=1): """Creates a new link. Arguments. @@ -649,7 +599,6 @@ def create_link(self, source_node_uid, gate_type, target_node_uid, slot_type, we target_node_uid: uid of the target node slot_type: type of the target slot weight: the weight of the link (a float) - certainty (optional): a probabilistic parameter for the link Returns: the link if successful, @@ -660,7 +609,7 @@ def create_link(self, source_node_uid, gate_type, target_node_uid, slot_type, we if source_node is None: return False, None - source_node.link(gate_type, target_node_uid, slot_type, weight, certainty) + source_node.link(gate_type, target_node_uid, slot_type, weight) return True def delete_link(self, source_node_uid, gate_type, target_node_uid, slot_type): @@ -724,7 +673,7 @@ def ungroup_nodes(self, nodespace_uid, group): if nodespace_uid is None: nodespace_uid = self.get_nodespace(None).uid - if group in self.nodegroups: + if group in self.nodegroups[nodespace_uid]: del self.nodegroups[nodespace_uid][group] def get_activations(self, nodespace_uid, group): @@ -751,20 +700,28 @@ def set_activations(self, nodespace_uid, group, new_activations): for i in range(len(nodes)): nodes[i].set_gate_activation(gate, new_activations[i]) - def get_thetas(self, nodespace_uid, group): + def get_gate_configurations(self, nodespace_uid, group, gatefunction_parameter=None): if nodespace_uid is None: nodespace_uid = self.get_nodespace(None).uid if group not in self.nodegroups[nodespace_uid]: raise ValueError("Group %s does not exist in nodespace %s" % (group, nodespace_uid)) - thetas = [] nodes = self.nodegroups[nodespace_uid][group][0] gate = self.nodegroups[nodespace_uid][group][1] + data = {'gatefunction': set()} + if gatefunction_parameter: + data['parameter_values'] = [] for node in nodes: - thetas.append(node.get_gate(gate).get_parameter('theta')) - return thetas + config = node.get_gate_configuration(gate) + data['gatefunction'].add(config['gatefunction']) + if gatefunction_parameter is not None: + data['parameter_values'].append(config['gatefunction_parameters'].get(gatefunction_parameter, 0)) + if len(data['gatefunction']) > 1: + raise RuntimeError("Heterogenous gatefunction configuration") + data['gatefunction'] = data['gatefunction'].pop() + return data - def set_thetas(self, nodespace_uid, group, thetas): + def set_gate_configurations(self, nodespace_uid, group, gatefunction, gatefunction_parameter=None, parameter_values=None): if nodespace_uid is None: nodespace_uid = self.get_nodespace(None).uid @@ -773,7 +730,10 @@ def set_thetas(self, nodespace_uid, group, thetas): nodes = self.nodegroups[nodespace_uid][group][0] gate = self.nodegroups[nodespace_uid][group][1] for i in range(len(nodes)): - nodes[i].set_gate_parameter(gate, 'theta', thetas[i]) + parameter = {} + if gatefunction_parameter: + parameter[gatefunction_parameter] = parameter_values[i] + nodes[i].set_gate_configuration(gate, gatefunction, parameter) def get_link_weights(self, nodespace_from_uid, group_from, nodespace_to_uid, group_to): if nodespace_from_uid is None: @@ -820,23 +780,51 @@ def set_link_weights(self, nodespace_from_uid, group_from, nodespace_to_uid, gro to_slot = self.nodegroups[nodespace_to_uid][group_to][1] from_nodes = self.nodegroups[nodespace_from_uid][group_from][0] from_gate = self.nodegroups[nodespace_from_uid][group_from][1] - for row in range(len(to_nodes)): - to_node = to_nodes[row] - for column in range(len(from_nodes)): - from_node = from_nodes[column] - weight = new_w[row][column] - if weight != 0: - self.set_link_weight(from_node.uid, from_gate, to_node.uid, to_slot, weight) - else: - self.delete_link(from_node.uid, from_gate, to_node.uid, to_slot) + + if type(new_w) == int and new_w == 1: + if len(from_nodes) != len(to_nodes): + raise ValueError("from_elements and to_elements need to have equal lengths for identity links") + for i in range(len(to_nodes)): + self.set_link_weight( + from_nodes[i].uid, + from_gate, + to_nodes[i].uid, + to_slot, + 1 + ) + + else: + for row in range(len(to_nodes)): + to_node = to_nodes[row] + for column in range(len(from_nodes)): + from_node = from_nodes[column] + weight = new_w[row][column] + if weight != 0: + self.set_link_weight(from_node.uid, from_gate, to_node.uid, to_slot, weight) + else: + self.delete_link(from_node.uid, from_gate, to_node.uid, to_slot) def get_available_gatefunctions(self): """ - Returns a list of available gate functions + Returns a dict of the available gatefunctions and their parameters and parameter-defaults """ - from inspect import getmembers, isfunction + import inspect from micropsi_core.nodenet import gatefunctions - return sorted([name for name, func in getmembers(gatefunctions, isfunction)]) + data = {} + for name, func in inspect.getmembers(gatefunctions, inspect.isfunction): + sig = inspect.signature(func) + data[name] = {} + skip = True + for key in sig.parameters: + if skip: + # first param is input_activation. skip + skip = False + continue + default = sig.parameters[key].default + if default == inspect.Signature.empty: + default = None + data[name][key] = default + return data def has_nodespace_changes(self, nodespace_uids=[], since_step=0): if nodespace_uids == []: @@ -847,7 +835,7 @@ def has_nodespace_changes(self, nodespace_uids=[], since_step=0): return True return False - def get_nodespace_changes(self, nodespace_uids=[], since_step=0): + def get_nodespace_changes(self, nodespace_uids=[], since_step=0, include_links=True): result = { 'nodes_dirty': {}, 'nodespaces_dirty': {}, @@ -869,10 +857,11 @@ def get_nodespace_changes(self, nodespace_uids=[], since_step=0): for uid in self.get_nodespace(nsuid).get_known_ids(): if uid not in result['nodes_deleted'] and self.is_node(uid): if self.get_node(uid).last_changed >= since_step: - result['nodes_dirty'][uid] = self.get_node(uid).get_data(include_links=True) - for assoc in self.get_node(uid).get_associated_node_uids(): - if self.get_node(assoc).parent_nodespace not in nodespace_uids and assoc not in result['nodes_dirty']: - result['nodes_dirty'][assoc] = self.get_node(assoc).get_data(include_links=True) + result['nodes_dirty'][uid] = self.get_node(uid).get_data(include_links=include_links) + if include_links: + for assoc in self.get_node(uid).get_associated_node_uids(): + if self.get_node(assoc).parent_nodespace not in nodespace_uids and assoc not in result['nodes_dirty']: + result['nodes_dirty'][assoc] = self.get_node(assoc).get_data(include_links=include_links) elif uid not in result['nodespaces_deleted'] and self.is_nodespace(uid): if self.get_nodespace(uid).last_changed >= since_step: diff --git a/micropsi_core/nodenet/dict_engine/dict_nodespace.py b/micropsi_core/nodenet/dict_engine/dict_nodespace.py index 547f1e7f..20b5d767 100644 --- a/micropsi_core/nodenet/dict_engine/dict_nodespace.py +++ b/micropsi_core/nodenet/dict_engine/dict_nodespace.py @@ -23,12 +23,12 @@ class DictNodespace(NetEntity, Nodespace): netentities: a dictionary containing all the contained nodes and nodespaces, to speed up drawing """ - def __init__(self, nodenet, parent_nodespace, position, name="", uid=None, index=None): + def __init__(self, nodenet, parent_nodespace, name="", uid=None, index=None): """create a node space at a given position and within a given node space""" self.__activators = {} self.__netentities = {} uid = uid or micropsi_core.tools.generate_uid() - NetEntity.__init__(self, nodenet, parent_nodespace, position, name, "nodespaces", uid, index) + NetEntity.__init__(self, nodenet, parent_nodespace, name, "nodespaces", uid, index) self.last_changed = nodenet.current_step self.contents_last_changed = nodenet.current_step nodenet._register_nodespace(self) diff --git a/micropsi_core/nodenet/dict_engine/dict_stepoperators.py b/micropsi_core/nodenet/dict_engine/dict_stepoperators.py index c7648009..2959d58f 100644 --- a/micropsi_core/nodenet/dict_engine/dict_stepoperators.py +++ b/micropsi_core/nodenet/dict_engine/dict_stepoperators.py @@ -17,38 +17,12 @@ def execute(self, nodenet, nodes, netapi): for uid, node in nodes.items(): node.reset_slots() - # propagate sheaf existence - for uid, node in nodes.items(): - for gate_type in node.get_gate_types(): - gate = node.get_gate(gate_type) - if gate.get_parameter('spreadsheaves'): - for sheaf in gate.sheaves: - for link in gate.get_links(): - for slotname in link.target_node.get_slot_types(): - if sheaf not in link.target_node.get_slot(slotname).sheaves and link.target_node.type != "Actor": - link.target_node.get_slot(slotname).sheaves[sheaf] = dict( - uid=gate.sheaves[sheaf]['uid'], - name=gate.sheaves[sheaf]['name'], - activation=0) - # propagate activation for uid, node in nodes.items(): for gate_type in node.get_gate_types(): gate = node.get_gate(gate_type) for link in gate.get_links(): - for sheaf in gate.sheaves: - targetsheaf = sheaf - if link.target_node.type != "Pipe": - targetsheaf = "default" - - if targetsheaf in link.target_slot.sheaves: - link.target_slot.sheaves[targetsheaf]['activation'] += \ - float(gate.sheaves[sheaf]['activation']) * float(link.weight) # TODO: where's the string coming from? - elif sheaf.endswith(link.target_node.uid): - targetsheaf = sheaf[:-(len(link.target_node.uid) + 1)] - link.target_slot.sheaves[targetsheaf]['activation'] += \ - float(gate.sheaves[sheaf]['activation']) * float(link.weight) # TODO: where's the string coming from? - + link.target_slot.add_activation(float(gate.activation) * float(link.weight)) # TODO: where's the string coming from? class DictCalculate(Calculate): """ diff --git a/micropsi_core/nodenet/gatefunctions.py b/micropsi_core/nodenet/gatefunctions.py index 6064a183..e9d3a431 100644 --- a/micropsi_core/nodenet/gatefunctions.py +++ b/micropsi_core/nodenet/gatefunctions.py @@ -6,17 +6,38 @@ # identity, abs, sigmoid, "return 0.0 if x == 0.0 else 1.0 / x" -def identity(input_activation, rho, theta): +def identity(input_activation): return input_activation -def absolute(input_activation, rho, theta): +def absolute(input_activation): return abs(input_activation) -def sigmoid(input_activation, rho, theta): - return 1.0 / (1.0 + math.exp(-(theta + input_activation))) +def sigmoid(input_activation, bias=0): + return 1.0 / (1.0 + math.exp(-(bias + input_activation))) -def one_over_x(input_activation, rho, theta): +def elu(input_activation, bias=0): + input_activation += bias + if input_activation <= 0: + return math.exp(input_activation) - 1. + else: + return input_activation + + +def relu(input_activation, bias=0): + return max(0, input_activation + bias) + + +def one_over_x(input_activation): return 0.0 if input_activation == 0.0 else 1.0 / input_activation + + +def threshold(input_activation, minimum=0.0, maximum=1.0, threshold=0.0, amplification=1.0): + act = input_activation + if act < threshold: + return 0 + act *= amplification + act = min(maximum, max(act, minimum)) + return act diff --git a/micropsi_core/nodenet/link.py b/micropsi_core/nodenet/link.py index 754dca60..201c6c0a 100644 --- a/micropsi_core/nodenet/link.py +++ b/micropsi_core/nodenet/link.py @@ -27,16 +27,6 @@ def weight(self): """ pass # pragma: no cover - @property - @abstractmethod - def certainty(self): - """ - Returns the certainty value of this link. - Note that this is not being used right now and defined/reserved for future use. - Implementations can always return 1 for the time being - """ - pass # pragma: no cover - @property @abstractmethod def source_node(self): @@ -72,7 +62,6 @@ def target_slot(self): def get_data(self, complete=False, **_): data = { "weight": self.weight, - "certainty": self.certainty, "target_slot_name": self.target_slot.type, "target_node_uid": self.target_node.uid, } diff --git a/micropsi_core/nodenet/monitor.py b/micropsi_core/nodenet/monitor.py index 95e99e17..8de9e668 100644 --- a/micropsi_core/nodenet/monitor.py +++ b/micropsi_core/nodenet/monitor.py @@ -4,6 +4,7 @@ Monitor definition """ +import math import random import micropsi_core.tools from abc import ABCMeta, abstractmethod @@ -23,80 +24,118 @@ class Monitor(metaclass=ABCMeta): values: the observed values """ - def __init__(self, nodenet, name='', uid=None, color=None): + def __init__(self, nodenet, name='', uid=None, color=None, values={}): self.uid = uid or micropsi_core.tools.generate_uid() self.nodenet = nodenet self.values = {} + for key in sorted(values.keys()): + self.values[int(key)] = values[key] self.name = name or "some monitor" self.color = color or "#%02d%02d%02d" % (random.randint(0,99), random.randint(0,99), random.randint(0,99)) - def get_data(self): - return { + def get_data(self, with_values=True): + data = { "uid": self.uid, - "values": self.values, "name": self.name, "color": self.color, "classname": self.__class__.__name__ } + if with_values: + data["values"] = self.values + return data @abstractmethod - def step(self, step): + def getvalue(self): pass # pragma: no cover + def step(self, step): + self.values[step] = self.getvalue() + def clear(self): self.values = {} +class GroupMonitor(Monitor): + + def __init__(self, nodenet, nodespace, name, name_prefix='', node_uids=[], gate='gen', uid=None, color=None, values={}, **_): + super().__init__(nodenet, name=name, uid=uid, color=color, values=values) + self.nodespace = nodespace + self.node_uids = node_uids + self.name_prefix = name_prefix + self.gate = gate + if len(node_uids) == 0: + self.nodenet.group_nodes_by_names(nodespace, name_prefix, gatetype=gate, group_name=name) + self.node_uids = self.nodenet.get_node_uids(nodespace, name) + else: + self.nodenet.group_nodes_by_ids(nodespace, node_uids, name, gatetype=gate) + + def get_data(self, with_values=True): + data = super().get_data(with_values=with_values) + data.update({ + "nodespace": self.nodespace, + "node_uids": self.node_uids, + "gate": self.gate + }) + return data + + def getvalue(self): + data = self.nodenet.get_activations(self.nodespace, self.name) + if type(data) == list: + return data + else: + return data.tolist() + + class NodeMonitor(Monitor): - def __init__(self, nodenet, node_uid, type, target, sheaf=None, name=None, uid=None, color=None, **_): + def __init__(self, nodenet, node_uid, type, target, name=None, uid=None, color=None, values={}, **_): name = name or "%s %s @ Node %s" % (type, target, nodenet.get_node(node_uid).name or nodenet.get_node(node_uid).uid) - super(NodeMonitor, self).__init__(nodenet, name, uid, color=color) + super(NodeMonitor, self).__init__(nodenet, name, uid, color=color, values=values) self.node_uid = node_uid self.type = type self.target = target or 'gen' - self.sheaf = sheaf or 'default' - def get_data(self): - data = super().get_data() + def get_data(self, with_values=True): + data = super().get_data(with_values=with_values) data.update({ "node_uid": self.node_uid, "type": self.type, - "target": self.target, - "sheaf": self.sheaf, + "target": self.target }) return data - def step(self, step): + def getvalue(self): + value = None if self.nodenet.is_node(self.node_uid): if self.type == 'gate' and self.target in self.nodenet.get_node(self.node_uid).get_gate_types(): - self.values[step] = self.nodenet.get_node(self.node_uid).get_gate(self.target).activations[self.sheaf] + value = self.nodenet.get_node(self.node_uid).get_gate(self.target).activation if self.type == 'slot' and self.target in self.nodenet.get_node(self.node_uid).get_slot_types(): - self.values[step] = self.nodenet.get_node(self.node_uid).get_slot(self.target).activations[self.sheaf] + value = self.nodenet.get_node(self.node_uid).get_slot(self.target).activation + + if value is not None and not math.isnan(value): + return value else: - self.values[step] = None + return None class LinkMonitor(Monitor): - def __init__(self, nodenet, source_node_uid, gate_type, target_node_uid, slot_type, property=None, name=None, uid=None, color=None, **_): + def __init__(self, nodenet, source_node_uid, gate_type, target_node_uid, slot_type, name=None, uid=None, color=None, values={}, **_): api = nodenet.netapi name = name or "%s:%s -> %s:%s" % (api.get_node(source_node_uid).name, gate_type, api.get_node(source_node_uid).name, slot_type) - super(LinkMonitor, self).__init__(nodenet, name, uid, color=color) + super(LinkMonitor, self).__init__(nodenet, name, uid, color=color, values=values) self.source_node_uid = source_node_uid self.target_node_uid = target_node_uid self.gate_type = gate_type self.slot_type = slot_type - self.property = property or 'weight' - def get_data(self): - data = super().get_data() + def get_data(self, with_values=True): + data = super().get_data(with_values=with_values) data.update({ "source_node_uid": self.source_node_uid, "target_node_uid": self.target_node_uid, "gate_type": self.gate_type, - "slot_type": self.slot_type, - "property": self.property, + "slot_type": self.slot_type }) return data @@ -110,46 +149,64 @@ def find_link(self): return l return None - def step(self, step): + def getvalue(self): link = self.find_link() if link: - self.values[step] = getattr(self.find_link(), self.property) + return self.find_link().weight else: - self.values[step] = None + return None class ModulatorMonitor(Monitor): - def __init__(self, nodenet, modulator, name=None, uid=None, color=None, **_): + def __init__(self, nodenet, modulator, name=None, uid=None, color=None, values={}, **_): name = name or "Modulator: %s" % modulator - super(ModulatorMonitor, self).__init__(nodenet, name, uid, color=color) + super(ModulatorMonitor, self).__init__(nodenet, name, uid, color=color, values=values) self.modulator = modulator self.nodenet = nodenet - def get_data(self): - data = super().get_data() + def get_data(self, with_values=True): + data = super().get_data(with_values=with_values) data.update({ "modulator": self.modulator }) return data - def step(self, step): - self.values[step] = self.nodenet.get_modulator(self.modulator) + def getvalue(self): + return self.nodenet.get_modulator(self.modulator) class CustomMonitor(Monitor): - def __init__(self, nodenet, function, name=None, uid=None, color=None, **_): - super(CustomMonitor, self).__init__(nodenet, name, uid, color=color) + def __init__(self, nodenet, function, name=None, uid=None, color=None, values={}, **_): + super(CustomMonitor, self).__init__(nodenet, name, uid, color=color, values=values) self.function = function self.compiled_function = micropsi_core.tools.create_function(self.function, parameters="netapi") - def get_data(self): - data = super().get_data() + def get_data(self, with_values=True): + data = super().get_data(with_values=with_values) data.update({ "function": self.function, }) return data - def step(self, step): - self.values[step] = self.compiled_function(self.nodenet.netapi) + def getvalue(self): + return self.compiled_function(self.nodenet.netapi) + + +class AdhocMonitor(Monitor): + + def __init__(self, nodenet, function, name=None, uid=None, color=None, values={}, parameters={}, **_): + super().__init__(nodenet, name, uid, color=color, values=values) + self.function = function + self.parameters = parameters + + def get_data(self, with_values=True): + data = super().get_data(with_values=with_values) + data.update({ + "function": "%s.%s" % (self.function.__module__, self.function.__name__) + }) + return data + + def getvalue(self): + return self.function(**self.parameters) diff --git a/micropsi_core/nodenet/native_modules.py b/micropsi_core/nodenet/native_modules.py deleted file mode 100644 index b8cc049a..00000000 --- a/micropsi_core/nodenet/native_modules.py +++ /dev/null @@ -1,535 +0,0 @@ -""" -Builtin native modules - -Currently contains - * GradientDescent for 3 layers (input, hidden, outpu) - * GradientDescent for LSTMS -""" - -import os - -nodetypes = {} - -try: - import numpy as np - import theano - numpy_installed = True -except ImportError: - numpy_installed = False - - -if numpy_installed: - # only register these native modules if we - # have theano and numpy installed. - nodetypes["GradientDescent"] = { - "name": "GradientDescent", - "engine": "theano_engine", - "slottypes": ["gen"], - "gatetypes": ["gen"], - "nodefunction_name": "gradient_descent", - "symbol": "☲", - "category": "nn_learning", - "path": os.path.abspath(__file__), - "parameters": [ - "ae_type", - "adadelta_rho", - "adadelta_eps", - "check_grad", - "weight_decay", - "tied_weights", - "sparsity_value", - "sparsity_penalty", - "t", - "ctr", - "input_prefix", - "hidden_prefix", - "output_prefix", - "input_nodespace" - ], - "parameter_values": { - "ae_type": ["sparse", "denoising"], - "tied_weights": ["True", "False"], - "check_grad": ["yes", "no"] - }, - "parameter_defaults": { - "ae_type": "denoising", - "tied_weights": "True", - "hidden_prefix": "hidden_1", - "output_prefix": "output_1" - } - } - - -def gradient_descent(netapi, node=None, **params): - """ - Online gradient descent with backpropagation for three layers (input, hidden, - and output layer) and AdaDelta for adapting the learning rate per parameter. - - References: - [1] Werbos, PJ. "Beyond Regression: New Tools for Prediction and Analysis - in the Behavioral Sciences." (1974). - [2] Zeiler, MD. "ADADELTA: An adaptive learning rate method." (2012). - [3] Vincent, P. "Extracting and Composing Robust Features with Denoising - Autoencoders." (2008). - """ - - # To be able to switch this native module on and off, require positive - # activation on the gen slot for its code to be run. - if node.get_slot('gen').activation > 0: - - import theano - import theano.tensor as T - - # get shared name prefix of nodes in input, hidden, and output layers - input_ = node.get_parameter('input_prefix') - hidden = node.get_parameter('hidden_prefix') - output = node.get_parameter('output_prefix') - - # get the name of the nodespace where the input lives - ns_input_name = node.get_parameter('input_nodespace') - - # get nodespace uids of nodes in input, hidden, and output layers - # assumption: if the input layer consists of sensor nodes, they have their - # own nodespace, all other nodes are in this node's nodespace - ns_input_uid = None - for ns in netapi.get_nodespaces(): - if ns.name == ns_input_name: - ns_input_uid = ns.uid - break - ns_hidden_uid = node.parent_nodespace - ns_output_uid = node.parent_nodespace - - # initialization - if not hasattr(node, 'initialized'): - - node.set_state('cumulative_error', 0) - - sparse = str(node.get_parameter('ae_type')) == "sparse" - # denoising = str(node.get_parameter('ae_type')) == "denoising" - tied_weights = str(node.get_parameter('tied_weights')) == "True" - - # group nodes - netapi.group_nodes_by_names(ns_input_uid, node_name_prefix=input_) - netapi.group_nodes_by_names(ns_hidden_uid, node_name_prefix=hidden) - netapi.group_nodes_by_names(ns_output_uid, node_name_prefix=output) - - # get activation values - a_i_array = netapi.get_activations(ns_input_uid, input_) - a_h_array = netapi.get_activations(ns_hidden_uid, hidden) - a_o_array = netapi.get_activations(ns_output_uid, output) - - node.set_parameter('error', 0.0) # store error values to observe how training develops - - len_input = len(a_i_array) - len_hidden = len(a_h_array) - len_output = len(a_o_array) - - if len_input == 0: - netapi.logger.warn("Node net has no input nodes whose names start with '%s'", input_) - node.set_parameter('ctr', 0) - return - elif len_hidden == 0: - netapi.logger.warn("Node net has no hidden nodes whose names start with '%s'.", hidden) - node.set_parameter('ctr', 0) - return - elif len_output == 0: - netapi.logger.warn("Node net has no output names whose names start with '%s'.", output) - node.set_parameter('ctr', 0) - return - else: - netapi.logger.info("Initializing theano-based autoencoder backprop with layout: %i -> %i -> %i", - len_input, len_hidden, len_output) - - # get parameter values from node net - b_h_array = netapi.get_thetas(ns_hidden_uid, hidden) - b_o_array = netapi.get_thetas(ns_output_uid, output) - w_hi_array = netapi.get_link_weights(ns_input_uid, input_, ns_hidden_uid, hidden) - w_oh_array = netapi.get_link_weights(ns_hidden_uid, hidden, ns_output_uid, output) - - # declare shared variables ( shared b/w theano and node nets ) - a_i = node.a_i = theano.shared(value=a_i_array.astype(T.config.floatX), name="a_i", borrow=False) - a_h = node.a_h = theano.shared(value=a_h_array.astype(T.config.floatX), name="a_h", borrow=False) - a_o = node.a_o = theano.shared(value=a_o_array.astype(T.config.floatX), name="a_o", borrow=False) - b_h = node.b_h = theano.shared(value=b_h_array.astype(T.config.floatX), name="b_h", borrow=False) - b_o = node.b_o = theano.shared(value=b_o_array.astype(T.config.floatX), name="b_o", borrow=False) - w_hi = node.w_hi = theano.shared(value=w_hi_array.astype(T.config.floatX), name="w_hi", borrow=False) - w_oh = node.w_oh = theano.shared(value=w_oh_array.astype(T.config.floatX), name="w_oh", borrow=False) - - # write initial parameter values to shared variables - node.b_h.set_value(b_h_array, borrow=True) - node.b_o.set_value(b_o_array, borrow=True) - node.w_hi.set_value(w_hi_array, borrow=True) - node.w_oh.set_value(w_oh_array, borrow=True) - - # initialize accumulation variables for AdaDelta, ie. mean square gradients and mean square deltas - ms_grad_b_o = node.ms_grad_b_o = theano.shared(value=np.zeros_like(b_o_array), name="ms_grad_b_o", borrow=True) - ms_grad_w_oh = node.ms_grad_w_oh = theano.shared(value=np.zeros_like(w_oh_array), name="ms_grad_w_oh", borrow=True) - ms_grad_b_h = node.ms_grad_b_h = theano.shared(value=np.zeros_like(b_h_array), name="ms_grad_b_h", borrow=True) - ms_grad_w_hi = node.ms_grad_w_hi = theano.shared(value=np.zeros_like(w_hi_array), name="ms_grad_w_hi", borrow=True) - - ms_delta_b_o = node.ms_delta_b_o = theano.shared(value=np.zeros_like(b_o_array), name="ms_delta_b_o", borrow=True) - ms_delta_w_oh = node.ms_delta_w_oh = theano.shared(value=np.zeros_like(w_oh_array), name="ms_delta_w_oh", borrow=True) - ms_delta_b_h = node.ms_delta_b_h = theano.shared(value=np.zeros_like(b_h_array), name="ms_delta_b_h", borrow=True) - ms_delta_w_hi = node.ms_delta_w_hi = theano.shared(value=np.zeros_like(w_hi_array), name="ms_delta_w_hi", borrow=True) - - # make function parameters theano compatible - weight_decay = T.scalar("weight_decay", dtype=T.config.floatX) - sparsity_value = T.scalar("sparsity_value", dtype=T.config.floatX) - sparsity_penalty = T.scalar("sparsity_penalty", dtype=T.config.floatX) - ada_rho = T.scalar("ada_rho", dtype=T.config.floatX) - ada_eps = T.scalar("ada_eps", dtype=T.config.floatX) - - # declare the reconstruction error - error_term = T.sum(T.square(a_o - a_i)) / 2. # squared error - # error_term = -T.sum(a_i * T.log(a_o) + (1. - a_i) * T.log(1. - a_o)) # cross-entropy - - # use a weight constraint as a regularizer - weight_constraint = (weight_decay / 2.) * (T.sum(T.square(w_hi)) + T.sum(T.square(w_oh))) - - if sparse: # training criterion for a sparse autoencoder - - # save the average activation of hidden units; initialize to first activation received - avg_a_h = node.avg_a_h = theano.shared(value=a_h_array, name="avg_a_h", borrow=False) - new_avg_a_h = 0.95 * avg_a_h + (1 - 0.95) * a_h # for gradient checking, set new_avg_a_h = a_h - - rho = sparsity_value - information_gain = rho * T.log(rho / new_avg_a_h) + (1. - rho) * T.log((1. - rho) / (1. - new_avg_a_h)) - - sparsity_constraint = sparsity_penalty * T.sum(information_gain) - cost = error_term + weight_constraint + sparsity_constraint - - else: # training criterion for a denoising autoencoder - - cost = error_term + weight_constraint - - node.cost = theano.function([weight_decay, sparsity_value, sparsity_penalty], cost, on_unused_input='ignore') - node.error = theano.function([], error_term / len(b_h_array)) - - # compute gradients - sigmoid_deriv_a_o = a_o * (1. - a_o) - grad_o = (a_o - a_i) * sigmoid_deriv_a_o # squared error # T.grad(cost, z_o) - # grad_o = ((a_i - a_o) / (a_o - a_o**2)) * sigmoid_deriv_a_o # cross-entropy - - sigmoid_deriv_a_h = a_h * (1. - a_h) - - if sparse: - - grad_w_oh = T.dot(T.reshape(grad_o, (len_input, 1)), T.reshape(a_h, (1, len_hidden))) + weight_decay * w_oh - grad_sparsity = (- rho / new_avg_a_h + (1. - rho) / (1. - new_avg_a_h)).T - grad_h = (T.dot(w_oh.T, grad_o) + sparsity_penalty * grad_sparsity) * sigmoid_deriv_a_h - grad_w_hi = T.dot(T.reshape(grad_h, (len_hidden, 1)), T.reshape(a_i, (1, len_input))) + weight_decay * w_hi - - else: # denoising - - grad_w_oh = T.dot(T.reshape(grad_o, (len_input, 1)), T.reshape(a_h, (1, len_hidden))) + weight_decay * w_oh - grad_h = T.dot(w_oh.T, grad_o) * sigmoid_deriv_a_h - grad_w_hi = T.dot(T.reshape(grad_h, (len_hidden, 1)), T.reshape(a_i, (1, len_input))) + weight_decay * w_hi - - if tied_weights: - grad_w_oh = grad_w_oh + grad_w_hi.T - gradients = [grad_o, grad_w_oh, grad_h] - ms_grad = [ms_grad_b_o, ms_grad_w_oh, ms_grad_b_h] - ms_delta = [ms_delta_b_o, ms_delta_w_oh, ms_delta_b_h] - else: - gradients = [grad_o, grad_w_oh, grad_h, grad_w_hi] - ms_grad = [ms_grad_b_o, ms_grad_w_oh, ms_grad_b_h, ms_grad_w_hi] - ms_delta = [ms_delta_b_o, ms_delta_w_oh, ms_delta_b_h, ms_delta_w_hi] - - # update accumulation variables for AdaDelta and compute new deltas - # compute an exponentially decaying average of squared gradients - # ie. recent gradients are more important and the quantity doesn't continue to grow - # thereby allowing the learning rate to grow or shrink as time progresses ( rather than just shrink as in AdaGrad ) - new_ms_grad = [ada_rho * ms_g + (1 - ada_rho) * (g**2) for ms_g, g in zip(ms_grad, gradients)] - # Note: the square root of the mean squared gradients plus epsilon is effectively the RMS of the gradients - # epsilon is added ~"to start off the first iteration and to ensure progress when previous updates become small" - deltas = [(T.sqrt(ms_d + ada_eps) / T.sqrt(ms_g + ada_eps)) * g for ms_d, ms_g, g in zip(ms_delta, new_ms_grad, gradients)] - # compute an exponentially decaying average of squared deltas -- this is to ensure correct units - new_ms_delta = [ada_rho * ms_d + (1 - ada_rho) * (d**2) for ms_d, d in zip(ms_delta, deltas)] - - # update parameters, ie. old_value - learning_rate * delta_value - if tied_weights: - new_b_o, new_w_oh, new_b_h = (old - update for old, update in zip([b_o, w_oh, b_h], deltas)) - new_w_hi = new_w_oh.T - new_ms_grad.append(new_ms_grad[1].T) - new_ms_delta.append(new_ms_delta[1].T) - gradients.append(gradients[1].T) - else: - new_b_o, new_w_oh, new_b_h, new_w_hi = (old - update for old, update in zip([b_o, w_oh, b_h, w_hi], deltas)) - - if sparse: - - update_function = theano.function([weight_decay, sparsity_value, sparsity_penalty, ada_rho, ada_eps], - None, - updates=[(b_o, new_b_o), - (w_oh, new_w_oh), - (b_h, new_b_h), - (w_hi, new_w_hi), - (avg_a_h, new_avg_a_h), - (ms_grad_b_o, new_ms_grad[0]), - (ms_grad_w_oh, new_ms_grad[1]), - (ms_grad_b_h, new_ms_grad[2]), - (ms_grad_w_hi, new_ms_grad[3]), - (ms_delta_b_o, new_ms_delta[0]), - (ms_delta_w_oh, new_ms_delta[1]), - (ms_delta_b_h, new_ms_delta[2]), - (ms_delta_w_hi, new_ms_delta[3])], - on_unused_input='ignore') - - else: # denoising - - update_function = theano.function([weight_decay, sparsity_value, sparsity_penalty, ada_rho, ada_eps], - None, - updates=[(b_o, new_b_o), - (w_oh, new_w_oh), - (b_h, new_b_h), - (w_hi, new_w_hi), - (ms_grad_b_o, new_ms_grad[0]), - (ms_grad_w_oh, new_ms_grad[1]), - (ms_grad_b_h, new_ms_grad[2]), - (ms_grad_w_hi, new_ms_grad[3]), - (ms_delta_b_o, new_ms_delta[0]), - (ms_delta_w_oh, new_ms_delta[1]), - (ms_delta_b_h, new_ms_delta[2]), - (ms_delta_w_hi, new_ms_delta[3])], - on_unused_input='ignore') - - node.get_updated_parameters = update_function - - # for gradient checking use the following function: - node.get_gradients = theano.function([weight_decay, sparsity_value, sparsity_penalty, ada_rho, ada_eps], - [gradients[0], gradients[1], gradients[2], gradients[3]], on_unused_input='ignore') - - node.initialized = True - - # get input activations from node net - a_i_array = netapi.get_activations(ns_input_uid, input_) - - # learn only if activation on the input layer has been persistent for as many steps as your neural net has layers - # Note: since we're currently using denoising autoencoders, this means persistent up to Bernoulli noise - try: - # check if activation has changed since the last step ( by testing if there's any different activation value ) - bool_idx = node.prev_a_i != a_i_array - input_changed = np.any(bool_idx) - - # if deviating activations were 0 ( i.e most likely the effect of Bernoulli noising ), assume no change - is_zero = node.prev_a_i[bool_idx] == 0 - # if is_zero contains elements but not all input activations and their values are all zero, assume no change - if len(is_zero) and len(is_zero) < len(a_i_array) and np.all(is_zero): - input_changed = False - except: - input_changed = True - - node.prev_a_i = a_i_array - - if input_changed: - node.set_parameter('ctr', 1) - else: - node.set_parameter('ctr', int(node.get_parameter('ctr')) + 1) - - # until counter equals number of layers, ie. the same activation has reached all layers, don't compute - if node.get_parameter('ctr') < 3: - return - - # get other activations from node net - a_h_array = netapi.get_activations(ns_hidden_uid, hidden) - a_o_array = netapi.get_activations(ns_output_uid, output) - - # define learning parameters - param = node.get_parameter('weight_decay') - if param is None: - weight_decay = netapi.floatX(4e-06) # 0.0001 . 1e-07 assuming batches of size 1000 . 4e-06 assuming batches of size 256 - node.set_parameter('weight_decay', str(weight_decay)) # store as regular float to appease the serializer - else: - weight_decay = netapi.floatX(param) - - param = node.get_parameter('sparsity_value') - if param is None: - sparsity_value = netapi.floatX(0.05) - node.set_parameter('sparsity_value', str(sparsity_value)) - else: - sparsity_value = netapi.floatX(param) - - param = node.get_parameter('sparsity_penalty') - if param is None: - sparsity_penalty = netapi.floatX(0.001) # 3.0 . 0.003 assuming batches of size 1000 . 0.01 assuming batches of size 256 - node.set_parameter('sparsity_penalty', str(sparsity_penalty)) - else: - sparsity_penalty = netapi.floatX(param) - - param = node.get_parameter('adadelta_rho') - if param is None: - ada_rho = netapi.floatX(0.95) - node.set_parameter('adadelta_rho', str(ada_rho)) - else: - ada_rho = netapi.floatX(param) - - param = node.get_parameter('adadelta_eps') - if param is None: - ada_eps = netapi.floatX(1e-6) - node.set_parameter('adadelta_eps', str(ada_eps)) - else: - ada_eps = netapi.floatX(param) - - param = node.get_parameter('ae_type') - if param is None: - ae_type = 'sparse' # options: 'sparse', 'denoising' - node.set_parameter('ae_type', 'sparse') - else: - ae_type = str(param) - - param = node.get_parameter('t') - if param is None: - t = 0 - node.set_parameter('t', t) - else: - t = int(param) - - # gradient checking - # Note: use double precision when running gradient checks - if node.get_parameter('check_grad') == 'yes': - - # get values of biases and weights from node net - b_h_array = netapi.get_thetas(ns_hidden_uid, hidden) - b_o_array = netapi.get_thetas(ns_output_uid, output) - w_hi_array = netapi.get_link_weights(ns_input_uid, input_, ns_hidden_uid, hidden) - w_oh_array = netapi.get_link_weights(ns_hidden_uid, hidden, ns_output_uid, output) - - # compute the analytical gradient - anal_grad = compute_analytic_gradient( - netapi, node, a_i_array, a_h_array, a_o_array, b_h_array, b_o_array, w_hi_array, w_oh_array, - weight_decay, sparsity_value, sparsity_penalty, ada_rho, ada_eps) - - # compute the numerical gradient - num_grad = compute_numeric_gradient( - netapi, node, a_i_array, a_h_array, a_o_array, b_h_array, b_o_array, w_hi_array, w_oh_array, - weight_decay, sparsity_value, sparsity_penalty) - - # compare them - diff = np.linalg.norm(num_grad - anal_grad) / np.linalg.norm(num_grad + anal_grad) - print("Gradient difference: %e" % diff) # %.10f" % diff - print("The norm of the difference between numerical and analytical gradient should be < 1e-9\n") - - # write values to shared variables - node.a_i.set_value(a_i_array, borrow=True) - node.a_h.set_value(a_h_array, borrow=True) - node.a_o.set_value(a_o_array, borrow=True) - - # update values in shared variables ( using backpropgation of the gradients ) - node.get_updated_parameters(weight_decay, sparsity_value, sparsity_penalty, ada_rho, ada_eps) - - # write new parameter values to node net - netapi.set_thetas(ns_output_uid, output, node.b_o.get_value(borrow=True)) - netapi.set_link_weights(ns_hidden_uid, hidden, ns_output_uid, output, node.w_oh.get_value(borrow=True)) - netapi.set_thetas(ns_hidden_uid, hidden, node.b_h.get_value(borrow=True)) - netapi.set_link_weights(ns_input_uid, input_, ns_hidden_uid, hidden, node.w_hi.get_value(borrow=True)) - - error = float(node.error()) - # save current error as node parameter - node.set_parameter('error', error) - node.set_state('cumulative_error', node.get_state('cumulative_error') + error) - - t = int(node.get_parameter('t')) - if t % 1000 == 0: - netapi.logger.debug("Number of backprop steps computed %d" % t) - netapi.logger.debug("Average Error %.6f (Latest: 0=%.6f)" % ((node.get_state('cumulative_error') / 1000), error)) - node.set_state('cumulative_error', 0.0) - - # reset counter after successful backprop step; cf. must wait for new sensory activation to reach output layer - node.set_parameter('ctr', 0) - node.set_parameter('t', t + 1) - - -def sigmoid(z): - """ The sigmoid ( activation ) function. """ - return 1. / (1. + np.exp(-z)) - - -def compute_analytic_gradient(netapi, node, a_i, a_h, a_o, b_h, b_o, w_hi, w_oh, weight_decay, - sparsity_value, sparsity_penalty, ada_rho, ada_eps): - - # make sure borrow is False here because otherwise the buffers are overwritten and - # compute_numerical_gradient(..) still needs these same input values for proper comparison - node.a_i.set_value(a_i, borrow=False) - node.a_h.set_value(a_h, borrow=False) - node.a_o.set_value(a_o, borrow=False) - node.b_h.set_value(b_h, borrow=False) - node.b_o.set_value(b_o, borrow=False) - node.w_hi.set_value(w_hi, borrow=False) - node.w_oh.set_value(w_oh, borrow=False) - - delta_o, delta_w_oh, delta_h, delta_w_hi = \ - node.get_gradients(weight_decay, sparsity_value, sparsity_penalty, ada_rho, ada_eps) - - gradient = np.concatenate((delta_o, np.ravel(delta_w_oh), delta_h, np.ravel(delta_w_hi))) - - return gradient - - -def compute_numeric_gradient(netapi, node, a_i, a_h, a_o, b_h, b_o, w_hi, w_oh, weight_decay, sparsity_value, sparsity_penalty): - """ Compute numerical gradient for validating backprop implementation above. """ - - from copy import deepcopy - - # helper variables - epsilon = netapi.floatX(1e-4) - ni = len(b_o) - nh = len(b_h) - nih = ni * nh - - theta = np.concatenate((b_o, np.ravel(w_oh), b_h, np.ravel(w_hi))) - - n = theta.shape[0] - I = np.eye(n, dtype=netapi.floatX) - gradient = np.zeros(theta.shape, dtype=netapi.floatX) - - for i in range(n): - - eps_vec = np.array(I[:, i] * epsilon, dtype=netapi.floatX) - eps_plus = theta + eps_vec - eps_minus = theta - eps_vec - - # split theta into parts, recompute activations, update shared variables, compute cost - b_o_plus = eps_plus[: ni] - w_oh_plus = eps_plus[ni: ni + nih].reshape((ni, nh)) - b_h_plus = eps_plus[ni + nih: ni + nih + nh] - w_hi_plus = eps_plus[ni + nih + nh:].reshape((nh, ni)) - a_i_plus = deepcopy(a_i) - a_h_plus = np.ravel(sigmoid(w_hi_plus.dot(a_i_plus) + b_h_plus)) - a_o_plus = np.ravel(sigmoid(w_oh_plus.dot(a_h_plus) + b_o_plus)) - - node.a_i.set_value(a_i_plus, borrow=True) - node.a_h.set_value(a_h_plus, borrow=True) - node.a_o.set_value(a_o_plus, borrow=True) - node.b_h.set_value(b_h_plus, borrow=True) - node.b_o.set_value(b_o_plus, borrow=True) - node.w_hi.set_value(w_hi_plus, borrow=True) - node.w_oh.set_value(w_oh_plus, borrow=True) - - cost = node.cost(weight_decay, sparsity_value, sparsity_penalty) - - # split theta into parts, recompute activations, update shared variables, compute cost - b_o_minus = eps_minus[: ni] - w_oh_minus = eps_minus[ni: ni + nih].reshape((ni, nh)) - b_h_minus = eps_minus[ni + nih: ni + nih + nh] - w_hi_minus = eps_minus[ni + nih + nh:].reshape((nh, ni)) - a_i_minus = deepcopy(a_i) - a_h_minus = np.ravel(sigmoid(w_hi_minus.dot(a_i_minus) + b_h_minus)) - a_o_minus = np.ravel(sigmoid(w_oh_minus.dot(a_h_minus) + b_o_minus)) - - node.a_i.set_value(a_i_minus, borrow=True) - node.a_h.set_value(a_h_minus, borrow=True) - node.a_o.set_value(a_o_minus, borrow=True) - node.b_h.set_value(b_h_minus, borrow=True) - node.b_o.set_value(b_o_minus, borrow=True) - node.w_hi.set_value(w_hi_minus, borrow=True) - node.w_oh.set_value(w_oh_minus, borrow=True) - - cost_ = node.cost(weight_decay, sparsity_value, sparsity_penalty) - - # compute cost difference - gradient[i] = (cost - cost_) / (2. * epsilon) - - if i % 1000 == 0: - print("Computed numeric gradient for %d parameters" % i) - - return gradient diff --git a/micropsi_core/nodenet/netapi.py b/micropsi_core/nodenet/netapi.py index a35c6e52..b78cac8f 100644 --- a/micropsi_core/nodenet/netapi.py +++ b/micropsi_core/nodenet/netapi.py @@ -1,14 +1,7 @@ -try: - from . import vizapi -except ImportError: - vizapi = None - class NetAPI(object): - """ - Node Net API facade class for use from within the node net (in node functions) - """ + # Node Net API facade class for use from within the node net (in node functions) @property def uid(self): @@ -16,20 +9,19 @@ def uid(self): @property def step(self): + """ The current step of the nodenet """ return self.__nodenet.current_step @property def worldadapter(self): + """ The worldadapter instance """ return self.__nodenet.worldadapter_instance @property def logger(self): + """ The nodenet logger """ return self.__nodenet.logger - @property - def vizapi(self): - return vizapi - def __init__(self, nodenet): self.__nodenet = nodenet @@ -52,7 +44,7 @@ def get_node(self, uid): """ return self.__nodenet.get_node(uid) - def get_nodes(self, nodespace=None, node_name_prefix=None, nodetype=None, sortby='id'): + def get_nodes(self, nodespace=None, node_name_prefix=None, nodetype=None, sortby='ids'): """ Returns a list of nodes in the given nodespace (all Nodespaces if None) whose names start with the given prefix (all if None) @@ -76,6 +68,8 @@ def get_nodes(self, nodespace=None, node_name_prefix=None, nodetype=None, sortby nodes = sorted(nodes, key=lambda node: node.uid) elif sortby == 'names': nodes = sorted(nodes, key=lambda node: node.name) + else: + raise ValueError("Unknown sortby value %s" % sortby) return nodes @@ -127,7 +121,7 @@ def get_nodes_in_slot_field(self, node, slot=None, no_links_to=None, nodespace=N nodes.append(link.source_node) return nodes - def get_nodes_active(self, nodespace, type=None, min_activation=1, gate=None, sheaf='default'): + def get_nodes_active(self, nodespace, type=None, min_activation=1, gate=None): """ Returns all nodes with a min activation, of the given type, active at the given gate, or with node.activation """ @@ -136,10 +130,10 @@ def get_nodes_active(self, nodespace, type=None, min_activation=1, gate=None, sh if type is None or node.type == type: if gate is not None: if gate in node.get_gate_types(): - if node.get_gate(gate).activations[sheaf] >= min_activation: + if node.get_gate(gate).activation >= min_activation: nodes.append(node) else: - if node.activations[sheaf] >= min_activation: + if node.activation >= min_activation: nodes.append(node) return nodes @@ -155,7 +149,7 @@ def delete_nodespace(self, nodespace): """ self.__nodenet.delete_nodespace(nodespace.uid) - def create_node(self, nodetype, nodespace, name=None): + def create_node(self, nodetype, nodespace=None, name=None, **parameters): """ Creates a new node or node space of the given type, with the given name and in the given nodespace. Returns the newly created entity. @@ -164,7 +158,7 @@ def create_node(self, nodetype, nodespace, name=None): name = "" # TODO: empty names crash the client right now, but really shouldn't pos = [100, 100, 0] # default so native modules will not be bothered with positions - uid = self.__nodenet.create_node(nodetype, nodespace, pos, name) + uid = self.__nodenet.create_node(nodetype, nodespace, pos, name, parameters=parameters) entity = self.__nodenet.get_node(uid) return entity @@ -173,24 +167,23 @@ def create_nodespace(self, parent_nodespace, name=None, options=None): Create a new nodespace with the given name in the given parent_nodespace Options: new_partition - Whether or not to create a seperate partition for this nodespace - Attention: Experimental Feature, Sensors & Actors only work in the root-partition + Attention: Experimental Feature, Sensors & Actuators only work in the root-partition """ if name is None: name = "" # TODO: empty names crash the client right now, but really shouldn't - pos = [100, 100, 0] # default so native modules will not be bothered with positions - uid = self.__nodenet.create_nodespace(parent_nodespace, pos, name=name, options=options) + uid = self.__nodenet.create_nodespace(parent_nodespace, name=name, options=options) entity = self.__nodenet.get_nodespace(uid) return entity - def link(self, source_node, source_gate, target_node, target_slot, weight=1, certainty=1): + def link(self, source_node, source_gate, target_node, target_slot, weight=1): """ Creates a link between two nodes. If the link already exists, it will be updated - with the given weight and certainty values (or the default 1 if not given) + with the given weight (or the default 1 if not given) """ - self.__nodenet.create_link(source_node.uid, source_gate, target_node.uid, target_slot, weight, certainty) + self.__nodenet.create_link(source_node.uid, source_gate, target_node.uid, target_slot, weight) - def link_with_reciprocal(self, source_node, target_node, linktype, weight=1, certainty=1): + def link_with_reciprocal(self, source_node, target_node, linktype, weight=1): """ Creates two (reciprocal) links between two nodes, valid linktypes are subsur, porret, catexp and symref """ @@ -199,23 +192,23 @@ def link_with_reciprocal(self, source_node, target_node, linktype, weight=1, cer if linktype == "subsur": subslot = "sub" if "sub" in target_slot_types else "gen" surslot = "sur" if "sur" in source_slot_types else "gen" - self.__nodenet.create_link(source_node.uid, "sub", target_node.uid, subslot, weight, certainty) - self.__nodenet.create_link(target_node.uid, "sur", source_node.uid, surslot, weight, certainty) + self.__nodenet.create_link(source_node.uid, "sub", target_node.uid, subslot, weight) + self.__nodenet.create_link(target_node.uid, "sur", source_node.uid, surslot, weight) elif linktype == "porret": porslot = "por" if "por" in target_slot_types else "gen" retslot = "ret" if "ret" in source_slot_types else "gen" - self.__nodenet.create_link(source_node.uid, "por", target_node.uid, porslot, weight, certainty) - self.__nodenet.create_link(target_node.uid, "ret", source_node.uid, retslot, weight, certainty) + self.__nodenet.create_link(source_node.uid, "por", target_node.uid, porslot, weight) + self.__nodenet.create_link(target_node.uid, "ret", source_node.uid, retslot, weight) elif linktype == "catexp": catslot = "cat" if "cat" in target_slot_types else "gen" expslot = "exp" if "exp" in source_slot_types else "gen" - self.__nodenet.create_link(source_node.uid, "cat", target_node.uid, catslot, weight, certainty) - self.__nodenet.create_link(target_node.uid, "exp", source_node.uid, expslot, weight, certainty) + self.__nodenet.create_link(source_node.uid, "cat", target_node.uid, catslot, weight) + self.__nodenet.create_link(target_node.uid, "exp", source_node.uid, expslot, weight) elif linktype == "symref": symslot = "sym" if "sym" in target_slot_types else "gen" refslot = "ref" if "ref" in source_slot_types else "gen" - self.__nodenet.create_link(source_node.uid, "sym", target_node.uid, symslot, weight, certainty) - self.__nodenet.create_link(target_node.uid, "ref", source_node.uid, refslot, weight, certainty) + self.__nodenet.create_link(source_node.uid, "sym", target_node.uid, symslot, weight) + self.__nodenet.create_link(target_node.uid, "ref", source_node.uid, refslot, weight) def unlink(self, source_node, source_gate=None, target_node=None, target_slot=None): """ @@ -239,38 +232,21 @@ def unlink_slot(self, node, slot_name, source_node_uid=None, source_gate_name=No if source_gate_name is None or l.source_gate.type == source_gate_name: l.source_node.unlink(l.source_gate.type, target_node_uid=node.uid, slot_name=slot_name) - def unlink_direction(self, node, gateslot=None): + def link_actuator(self, node, datatarget, weight=1, gate='sub', slot='sur'): """ - Deletes all links from a node ending at the given slot or originating at the given gate - Read this as 'delete all por linkage from this node' + Links a node to an actuator. If no actuator exists in the node's nodespace for the given datatarget, + a new actuator will be created, otherwise the first actuator found will be used """ - self.logger.warn("unlink direction is deprecated. use unlink_gate and unlink_slot") - node.unlink(gateslot) - - links_to_delete = set() - for slottype in node.get_slot_types(): - if gateslot is None or gateslot == slottype: - for link in node.get_slot(slottype).get_links(): - links_to_delete.add(link) - - for link in links_to_delete: - link.source_node.unlink(target_node_uid=node.uid, slot_name=gateslot) - - def link_actor(self, node, datatarget, weight=1, certainty=1, gate='sub', slot='sur'): - """ - Links a node to an actor. If no actor exists in the node's nodespace for the given datatarget, - a new actor will be created, otherwise the first actor found will be used - """ - actor = None - for uid, candidate in self.__nodenet.get_actors(node.parent_nodespace).items(): + actuator = None + for uid, candidate in self.__nodenet.get_actuators(node.parent_nodespace).items(): if candidate.get_parameter('datatarget') == datatarget: - actor = candidate - if actor is None: - actor = self.create_node("Actor", node.parent_nodespace, datatarget) - actor.set_parameter('datatarget', datatarget) + actuator = candidate + if actuator is None: + actuator = self.create_node("Actuator", node.parent_nodespace, datatarget) + actuator.set_parameter('datatarget', datatarget) - self.link(node, gate, actor, 'gen', weight, certainty) - # self.link(actor, 'gen', node, slot) + self.link(node, gate, actuator, 'gen', weight) + # self.link(actuator, 'gen', node, slot) def link_sensor(self, node, datasource, slot='sur', weight=1): """ @@ -287,29 +263,28 @@ def link_sensor(self, node, datasource, slot='sur', weight=1): self.link(sensor, 'gen', node, slot, weight) - def import_actors(self, nodespace, datatarget_prefix=None): + def import_actuators(self, nodespace, datatarget_prefix=None): """ - Makes sure an actor for all datatargets whose names start with the given prefix, or all datatargets, + Makes sure an actuator for all datatargets whose names start with the given prefix, or all datatargets, exists in the given nodespace. """ - all_actors = [] + all_actuators = [] if self.worldadapter is None: - return all_actors + return all_actuators datatargets = self.worldadapter.get_available_datatargets() - datatargets = sorted(datatargets) for datatarget in datatargets: if datatarget_prefix is None or datatarget.startswith(datatarget_prefix): - actor = None - for uid, candidate in self.__nodenet.get_actors(nodespace, datatarget).items(): - actor = candidate + actuator = None + for uid, candidate in self.__nodenet.get_actuators(nodespace, datatarget).items(): + actuator = candidate break - if actor is None: - actor = self.create_node("Actor", nodespace, datatarget) - actor.set_parameter('datatarget', datatarget) - all_actors.append(actor) - return all_actors + if actuator is None: + actuator = self.create_node("Actuator", nodespace, datatarget) + actuator.set_parameter('datatarget', datatarget) + all_actuators.append(actuator) + return all_actuators def import_sensors(self, nodespace, datasource_prefix=None): """ @@ -321,7 +296,6 @@ def import_sensors(self, nodespace, datasource_prefix=None): return all_sensors datasources = self.worldadapter.get_available_datasources() - datasources = sorted(datasources) for datasource in datasources: if datasource_prefix is None or datasource.startswith(datasource_prefix): @@ -335,17 +309,6 @@ def import_sensors(self, nodespace, datasource_prefix=None): all_sensors.append(sensor) return all_sensors - def set_gatefunction(self, nodespace, nodetype, gatetype, gatefunction): - """Sets the gatefunction for gates of type gatetype of nodes of type nodetype, in the given - nodespace. - The gatefunction needs to be given as a string. - """ - nodespace = self.get_nodespace(nodespace) - for uid in nodespace.get_known_ids(entitytype="nodes"): - node = self.get_node(uid) - if node.type == nodetype: - node.set_gatefunction_name(gatetype, gatefunction) - def notify_user(self, node, msg): """ Stops the nodenetrunner for this nodenet, and displays an information to the user, @@ -354,41 +317,11 @@ def notify_user(self, node, msg): node: the node object that emits this message msg: a string to display to the user """ - self.__nodenet.user_prompt = { - 'node': node.get_data(include_links=False), - 'msg': msg, - 'options': None - } - self.__nodenet.is_active = False + self.__nodenet.set_user_prompt(node, None, msg, []) - def ask_user_for_parameter(self, node, msg, options): - """ - Stops the nodenetrunner for this nodenet, and asks the user for values to the given parameters. - These parameters will be passed into the nodefunction in the next step of the nodenet. - The user can choose to either continue or suspend running the nodenet - Parameters: - node: the node object that emits this message - msg: a string to display to the user - options: an array of objects representing the variables to set by the user. Needs key, label. Optional: type (textarea or text), values: an array or object of possible values - - example usage: - options = [{ - 'key': 'where', - 'label': 'Where should I go next?', - 'values': {'north': 'North', 'east': 'East', 'south': 'South', 'west': 'west'} - }, { - 'key': 'wait': - 'label': 'How long should I wait until I go there?', - 'type': 'textarea' - }] - netapi.ask_user_for_parameter(node, "Please decide what to do next", options) - """ - self.__nodenet.user_prompt = { - 'node': node.get_data(), - 'msg': msg, - 'options': options - } - self.__nodenet.is_active = False + def show_user_prompt(self, node, key): + promptinfo = node.get_user_prompt(key) + self.__nodenet.set_user_prompt(node, key, promptinfo['callback'].__doc__, promptinfo['parameters']) def autoalign_nodespace(self, nodespace): """ Calls the autoalignment on the given nodespace """ @@ -428,7 +361,7 @@ def copy_nodes(self, nodes, nodespace_uid): uids = [node.uid for node in nodes] uidmap = {} for node in nodes: - new_uid = self.__nodenet.create_node(node.type, nodespace_uid, node.position, name=node.name, parameters=node.clone_parameters(), gate_parameters=node.get_gate_parameters()) + new_uid = self.__nodenet.create_node(node.type, nodespace_uid, node.position, name=node.name, parameters=node.clone_parameters(), gate_configuration=node.get_gate_configuration()) uidmap[node.uid] = new_uid for node in nodes: for g in node.get_gate_types(): @@ -438,8 +371,7 @@ def copy_nodes(self, nodes, nodespace_uid): l.source_gate.type, self.get_node(uidmap[l.target_node.uid]), l.target_slot.type, - weight=l.weight, - certainty=l.certainty) + weight=l.weight) mapping = {} for node in nodes: mapping[node] = self.get_node(uidmap[node.uid]) @@ -483,19 +415,24 @@ def substitute_activations(self, nodespace_uid, group, new_activations): """ return self.__nodenet.set_activations(nodespace_uid, group, new_activations) - def get_thetas(self, nodespace_uid, group): + def get_gate_configurations(self, nodespace_uid, group, gatefunction_parameter=None): """ - Returns an array of theta values for the given group. - For multi-gate nodes, the thetas of the gen gates will be returned + Returns a dictionary containing a list of gatefunction names, and a list of the values + of the given gatefunction_parameter (if given) """ - return self.__nodenet.get_thetas(nodespace_uid, group) + return self.__nodenet.get_gate_configurations(nodespace_uid, group, gatefunction_parameter) - def set_thetas(self, nodespace_uid, group, new_thetas): + def set_gate_configurations(self, nodespace_uid, group, gatefunction, gatefunction_parameter=None, parameter_values=None): """ - Bulk-sets thetas for the given group. - new_thetas dimensionality has to match the group length + Bulk-sets gatefunctions and a gatefunction_parameter for the given group. + Arguments: + nodespace_uid (string) - id of the parent nodespace + group (string) - name of the group + gatefunction (string) - name of the gatefunction to set + gatefunction_parameter (optinoal) - name of the gatefunction_paramr to set + parameter_values (optional) - values to set for the gatefunction_parameetr """ - self.__nodenet.set_thetas(nodespace_uid, group, new_thetas) + self.__nodenet.set_gate_configurations(nodespace_uid, group, gatefunction, gatefunction_parameter, parameter_values) def get_link_weights(self, nodespace_from_uid, group_from, nodespace_to_uid, group_to): """ @@ -519,23 +456,22 @@ def get_node_ids(self, nodespace_uid, group): """ return self.__nodenet.get_node_uids(nodespace_uid, group) - def add_gate_monitor(self, node_uid, gate, sheaf=None, name=None, color=None): + def add_gate_monitor(self, node_uid, gate, name=None, color=None): """Adds a continuous monitor to the activation of a gate. The monitor will collect the activation value in every calculation step. Returns the uid of the new monitor.""" - return self.__nodenet.add_gate_monitor(node_uid, gate, sheaf=sheaf, name=name, color=color) + return self.__nodenet.add_gate_monitor(node_uid, gate, name=name, color=color) - def add_slot_monitor(self, node_uid, slot, sheaf=None, name=None, color=None): + def add_slot_monitor(self, node_uid, slot, name=None, color=None): """Adds a continuous monitor to the activation of a slot. The monitor will collect the activation value in every calculation step. Returns the uid of the new monitor.""" - return self.__nodenet.add_slot_monitor(node_uid, slot, sheaf=sheaf, name=name, color=color) + return self.__nodenet.add_slot_monitor(node_uid, slot, name=name, color=color) - def add_link_monitor(self, source_node_uid, gate_type, target_node_uid, slot_type, property=None, name=None, color=None): - """Adds a continuous monitor to a link. You can choose to monitor either weight (default) or certainty - The monitor will collect respective value in every calculation step. + def add_link_monitor(self, source_node_uid, gate_type, target_node_uid, slot_type, name=None, color=None): + """Adds a continuous weightmonitor to a link. The monitor will record the linkweight in every calculation step. Returns the uid of the new monitor.""" - return self.__nodenet.add_link_monitor(source_node_uid, gate_type, target_node_uid, slot_type, property=property, name=name, color=color) + return self.__nodenet.add_link_monitor(source_node_uid, gate_type, target_node_uid, slot_type, name=name, color=color) def add_modulator_monitor(self, modulator, name, color=None): """Adds a continuous monitor to a global modulator. @@ -549,6 +485,15 @@ def add_custom_monitor(self, function, name, color=None): Returns the uid of the new monitor.""" return self.__nodenet.add_custom_monitor(function, name, color=color) + def add_adhoc_monitor(self, function, name, parameters={}): + return self.__nodenet.add_adhoc_monitor(function, name, parameters) + + def add_group_monitor(self, nodespace, name, node_name_prefix='', node_uids=[], gate='gen', color=None): + """Adds a continuous monitor, that tracks the activations of the given group + return-value for every calculation step. + Returns the uid of the new monitor.""" + return self.__nodenet.add_group_monitor(nodespace, name, node_name_prefix=node_name_prefix, node_uids=node_uids, gate=gate, color=color) + def get_monitor(self, uid): """Returns the monitor with the given uid""" return self.__nodenet.get_monitor(uid) diff --git a/micropsi_core/nodenet/node.py b/micropsi_core/nodenet/node.py index 793706d9..322bcbc3 100644 --- a/micropsi_core/nodenet/node.py +++ b/micropsi_core/nodenet/node.py @@ -19,9 +19,6 @@ __date__ = '09.05.12' -emptySheafElement = dict(uid="default", name="default", activation=0) - - class Node(metaclass=ABCMeta): """ Abstract base class for node implementations. @@ -105,23 +102,7 @@ def parent_nodespace(self, uid): @abstractmethod def activation(self): """ - This node's activation property ('default' sheaf) as calculated once per step by its node function - """ - pass # pragma: no cover - - # @property - # @abstractmethod - # def activations(self): - # """ - # This node's activation properties (dict of all sheaves) as calculated once per step by its node function - # """ - - @property - @abstractmethod - def activations(self): - """ - Returns a copy of the nodes's activations (all sheaves) - Changes to the returned dict will not affect the node + This node's activation property as calculated once per step by its node function """ pass # pragma: no cover @@ -129,7 +110,7 @@ def activations(self): @abstractmethod def activation(self, activation): """ - Sets this node's activation property ('default' sheaf), overriding what has been calculated by the node function + Sets this node's activation property, overriding what has been calculated by the node function """ pass # pragma: no cover @@ -147,15 +128,19 @@ def nodetype(self): """ return self._nodetype - def __init__(self, nodetype_name, nodetype): + def __init__(self, nodenet, nodetype_name, nodetype): """ Constructor needs the string name of this node's type, and a Nodetype instance """ + self._nodenet = nodenet self._nodetype_name = nodetype_name self._nodetype = nodetype self.logger = nodetype.logger def get_data(self, complete=False, include_links=True): + """ + Return this node's json data for the frontend + """ data = { "name": self.name, "position": self.position, @@ -163,11 +148,9 @@ def get_data(self, complete=False, include_links=True): "type": self.type, "parameters": self.clone_parameters(), "state": self.clone_state(), - "gate_parameters": self.clone_non_default_gate_parameters(), - "sheaves": self.clone_sheaves(), "activation": self.activation, "gate_activations": self.construct_gates_dict(), - "gate_functions": self.get_gatefunction_names() + "gate_configuration": self.get_gate_configuration() } data["uid"] = self.uid if complete: @@ -177,6 +160,9 @@ def get_data(self, complete=False, include_links=True): return data def construct_links_dict(self): + """ + Return a dict of links originating at this node + """ links = {} for key in self.get_gate_types(): gatelinks = self.get_gate(key).get_links() @@ -184,6 +170,11 @@ def construct_links_dict(self): links[key] = [l.get_data() for l in gatelinks] return links + def get_user_prompt(self, key): + if key not in self._nodetype.user_prompts: + raise KeyError("Nodetype %s does not define a user_prompt named %s" % (self._nodetype.name, key)) + return self._nodetype.user_prompts[key] + @abstractmethod def get_gate(self, type): """ @@ -192,40 +183,20 @@ def get_gate(self, type): pass # pragma: no cover @abstractmethod - def set_gate_parameter(self, gate_type, parameter, value): + def set_gate_configuration(self, gate_type, gatefunction, gatefunction_parameters={}): """ - Sets the given gate parameter to the given value + Configures the given gate to use the gatefunction of the given name, with the given parameters + if gatefunction_name is None, the default "identity" gatefunction is set for the gate """ - pass # pragma: no cover - - @abstractmethod - def clone_non_default_gate_parameters(self, gate_type): - """ - Returns a copy of all gate parameters set to a non-default value. - Write access to this dict will not affect the node. - """ - pass # pragma: no cover - - @abstractmethod - def set_gatefunction_name(self, gate_type, gatefunction_name): - """ - sets the gatefunction of the given gate to the one with the given name - """ - pass # pragma: no cover - - @abstractmethod - def get_gatefunction_name(self, gate_type): - """ - returns the name of the gatefunction configured for this gate - """ - pass # pragma: no cover + pass @abstractmethod - def get_gatefunction_names(self): + def get_gate_configuration(self, gate_type=None): """ - Returns a map of gates and their gatefunctions + Returns a dict specifying the gatefunction and parameters configured for the given gate, + or all gates if None """ - pass # pragma: no cover + pass @abstractmethod def get_slot(self, type): @@ -303,15 +274,6 @@ def clone_state(self): """ pass # pragma: no cover - @abstractmethod - def clone_sheaves(self): - """ - Returns a copy of the activation values present in the node. - Note that this is about node activation, not gate activation (gates have their own sheaves). - Write access to this dict will not affect the node. - """ - pass # pragma: no cover - @abstractmethod def node_function(self): """ @@ -356,6 +318,9 @@ def get_slot_types(self): return list(self.nodetype.slottypes) def get_associated_links(self): + """ + Return a list of all links originating or terminating at this node + """ links = [] for key in self.get_gate_types(): links.extend(self.get_gate(key).get_links()) @@ -364,6 +329,9 @@ def get_associated_links(self): return links def get_associated_node_uids(self): + """ + Return a list of all node_uids that are linked to this node + """ nodes = [] for link in self.get_associated_links(): if link.source_node.uid != self.uid: @@ -373,11 +341,27 @@ def get_associated_node_uids(self): return nodes def construct_gates_dict(self): + """ + Return a dict mapping gate-names to gate-activations + """ data = {} for gate_name in self.get_gate_types(): - data[gate_name] = self.get_gate(gate_name).clone_sheaves() + data[gate_name] = self.get_gate(gate_name).activation return data + def show_plot(self, figure=None): + try: + from matplotlib import pyplot as plt + if figure is None: + figure = plt.gca().figure + plt.show() + self._nodenet.register_figure(self.uid, figure) + except ImportError: + self.logger.error("Matplotlib is needed for plotting") + + def close_figures(self): + self._nodenet.close_figures(node_uid=self.uid) + def __repr__(self): return "<%s \"%s\" (%s)>" % (self.nodetype.name, self.name, self.uid) @@ -416,16 +400,7 @@ def empty(self): @abstractmethod def activation(self): """ - Returns the gate's activation ('default' sheaf) - """ - pass # pragma: no cover - - @property - @abstractmethod - def activations(self): - """ - Returns a copy of the gate's activations (all sheaves) - Changes to the returned dict will not affect the gate + Returns the gate's activation """ pass # pragma: no cover @@ -437,23 +412,7 @@ def get_links(self): pass # pragma: no cover @abstractmethod - def get_parameter(self, parameter): - """ - Returns the value of the given parameter or none if the parameter is not set. - Note that the returned value may be a default inherited from gate parameter defaults as defined in Nodetype - """ - pass # pragma: no cover - - @abstractmethod - def clone_sheaves(self): - """ - Returns a copy of the activation values present in the gate. - Write access to this dict will not affect the gate. - """ - pass # pragma: no cover - - @abstractmethod - def gate_function(self, input_activation, sheaf="default"): + def gate_function(self, input_activation): """ This function sets the activation of the gate. This only needs to be implemented if the reference implementation for the node functions from @@ -470,18 +429,6 @@ def gate_function(self, input_activation, sheaf="default"): """ pass # pragma: no cover - @abstractmethod - def open_sheaf(self, input_activation, sheaf="default"): - """ - This function opens a new sheaf and calls gate_function function for the newly opened sheaf. - This only needs to be implemented if the reference implementation for the node functions from - nodefunctions.py is being used. - - Alternative implementations are free to handle sheaves in the node functions directly and - can pass on the implementation of this method. - """ - pass # pragma: no cover - def __repr__(self): return "" % (self.type, self.node) @@ -521,25 +468,15 @@ def empty(self): @abstractmethod def activation(self): """ - Returns the activation in this slot ('default' sheaf) + Returns the activation in this slot """ pass # pragma: no cover @property @abstractmethod - def activations(self): + def get_activation(self): """ - Returns a copy of the slots's activations (all sheaves) - Changes to the returned dict will not affect the gate - """ - pass # pragma: no cover - - @property - @abstractmethod - def get_activation(self, sheaf="default"): - """ - Returns the activation in this slot for the given sheaf. - Will return the activation in the 'default' sheaf if the sheaf does not exist + Returns the activation in this slot. """ pass # pragma: no cover @@ -558,17 +495,6 @@ class Nodetype(object): """Every node has a type, which is defined by its slot types, gate types, its node function and a list of node parameteres.""" - GATE_DEFAULTS = { - "minimum": -1, - "maximum": 1, - "certainty": 1, - "amplification": 1, - "threshold": -1, - "theta": 0, - "rho": 0, - "spreadsheaves": 0 - } - @property def parameters(self): return self._parameters @@ -590,36 +516,16 @@ def nodefunction_definition(self, nodefunction_definition): self.nodefunction = micropsi_core.tools.create_function(nodefunction_definition, parameters="nodenet, node, " + args) except SyntaxError as err: - self.logger.warn("Syntax error while compiling node function: %s", str(err)) + self.logger.warning("Syntax error while compiling node function: %s", str(err)) raise err @property def nodefunction_name(self): return self._nodefunction_name - @nodefunction_name.setter - def nodefunction_name(self, nodefunction_name): - import os - from importlib.machinery import SourceFileLoader - self._nodefunction_name = nodefunction_name - try: - if self.path: - module = SourceFileLoader("nodefunctions", self.path).load_module() - self.nodefunction = getattr(module, nodefunction_name) - else: - from micropsi_core.nodenet import nodefunctions - if hasattr(nodefunctions, nodefunction_name): - self.nodefunction = getattr(nodefunctions, nodefunction_name) - else: - self.logger.warning("Can not find definition of nodefunction %s" % nodefunction_name) - - except (ImportError, AttributeError) as err: - self.logger.warning("Import error while importing node function: nodefunctions.%s %s" % (nodefunction_name, err)) - raise err - def __init__(self, name, nodenet, slottypes=None, gatetypes=None, parameters=None, - nodefunction_definition=None, nodefunction_name=None, parameter_values=None, gate_defaults=None, - symbol=None, shape=None, engine=None, parameter_defaults=None, path='', category=''): + nodefunction_definition=None, nodefunction_name=None, parameter_values=None, + symbol=None, shape=None, engine=None, parameter_defaults=None, path='', category='', user_prompts={}, **_): """Initializes or creates a nodetype. Arguments: @@ -633,34 +539,176 @@ def __init__(self, name, nodenet, slottypes=None, gatetypes=None, parameters=Non self._parameters = [] self._nodefunction_definition = None self._nodefunction_name = None + self.line_number = -1 self.name = name - self.slottypes = slottypes or {} - self.gatetypes = gatetypes or {} + + self.slottypes = slottypes or [] + self.gatetypes = gatetypes or [] self.path = path self.category = category - + self.shape = shape + self.symbol = symbol self.logger = nodenet.logger - self.gate_defaults = {} - for g in self.gatetypes: - self.gate_defaults[g] = Nodetype.GATE_DEFAULTS.copy() - - if gate_defaults is not None: - for g in gate_defaults: - for key in gate_defaults[g]: - if g not in self.gate_defaults: - raise Exception("Invalid gate default value for nodetype %s: Gate %s not found" % (name, g)) - self.gate_defaults[g][key] = gate_defaults[g][key] - - self.parameters = parameters or {} + self.parameters = parameters or [] self.parameter_values = parameter_values or {} self.parameter_defaults = parameter_defaults or {} + self.user_prompts = {} + for key, val in user_prompts.items(): + self.user_prompts[key] = val.copy() + if nodefunction_definition: self.nodefunction_definition = nodefunction_definition elif nodefunction_name: - self.nodefunction_name = nodefunction_name + self._nodefunction_name = nodefunction_name else: self.nodefunction = None + self.load_functions() + + def load_functions(self): + """ Loads nodefunctions and user_prompt callbacks""" + import os + from importlib.machinery import SourceFileLoader + import inspect + try: + if self.path and self._nodefunction_name or self.user_prompts.keys(): + modulename = "nodetypes." + self.category.replace('/', '.') + os.path.basename(self.path)[:-3] + module = SourceFileLoader(modulename, self.path).load_module() + if self._nodefunction_name: + self.nodefunction = getattr(module, self._nodefunction_name) + self.line_number = inspect.getsourcelines(self.nodefunction)[1] + for key, data in self.user_prompts.items(): + if hasattr(module, data['callback']): + self.user_prompts[key]['callback'] = getattr(module, data['callback']) + else: + self.logger.warning("Callback '%s' for user_prompt %s of nodetype %s not defined" % (data['callback'], key, self.name)) + elif self._nodefunction_name: + from micropsi_core.nodenet import nodefunctions + if hasattr(nodefunctions, self._nodefunction_name): + self.nodefunction = getattr(nodefunctions, self._nodefunction_name) + else: + self.logger.warning("Can not find definition of nodefunction %s" % self._nodefunction_name) + except (ImportError, AttributeError) as err: + self.logger.warning("Import error while importing node definition file of nodetype %s: %s" % (self.name, err)) + raise err + + def get_gate_dimensionality(self, gate): + return 1 + + def get_slot_dimensionality(self, slot): + return 1 + + def get_data(self): + data = { + 'name': self.name, + 'parameters': self.parameters, + 'parameter_values': self.parameter_values, + 'parameter_defaults': self.parameter_defaults, + 'symbol': self.symbol, + 'shape': self.shape, + 'nodefunction_definition': self.nodefunction_definition, + 'nodefunction_name': self.nodefunction_name, + 'path': self.path, + 'category': self.category, + 'line_number': self.line_number, + 'gatetypes': self.gatetypes, + 'slottypes': self.slottypes + } + return data + + +class FlowNodetype(Nodetype): + def __init__(self, name, nodenet, slottypes=None, gatetypes=None, parameters=None, + nodefunction_definition=None, nodefunction_name=None, parameter_values=None, + symbol=None, shape=None, engine=None, parameter_defaults=None, path='', category='', + flow_module=True, inputs=None, outputs=None, implementation=None, is_autogenerated=False, **_): + super().__init__(name, nodenet, slottypes=slottypes, gatetypes=gatetypes, parameters=parameters, + nodefunction_definition=nodefunction_definition, nodefunction_name=nodefunction_name, parameter_values=parameter_values, + symbol=symbol, shape=shape, engine=engine, parameter_defaults=parameter_defaults, path=path, category=category) + if is_autogenerated: + self.slottypes = [] + self.gatetypes = [] + else: + self.slottypes = ['sub'] + self.gatetypes = ['sur'] + self.is_autogenerated = is_autogenerated + self.is_flow_module = True + self.implementation = implementation + self.inputs = inputs + self.outputs = outputs + + def get_data(self): + data = super().get_data() + data.update({ + 'inputs': self.inputs, + 'outputs': self.outputs, + 'implementation': self.implementation, + 'is_autogenerated': self.is_autogenerated + }) + return data + + +class HighdimensionalNodetype(Nodetype): + def __init__(self, name, nodenet, slottypes=None, gatetypes=None, parameters=None, + nodefunction_definition=None, nodefunction_name=None, parameter_values=None, + symbol=None, shape=None, engine=None, parameter_defaults=None, path='', category='', dimensionality={}, **_): + super().__init__(name, nodenet, slottypes=slottypes, gatetypes=gatetypes, parameters=parameters, + nodefunction_definition=nodefunction_definition, nodefunction_name=nodefunction_name, parameter_values=parameter_values, + symbol=symbol, shape=shape, engine=engine, parameter_defaults=parameter_defaults, path=path, category=category) + + self.is_highdimensional = bool(dimensionality) + if nodenet.engine == "dict_engine" and self.is_highdimensional: + nodenet.logger.warning("Dict engine does not support high dimensional native_modules") + self.is_highdimensional = False + self.dimensionality = {} + + self.gategroups = [("%s0" % g) if dimensionality['gates'].get(g, 1) > 1 else g for g in gatetypes] + self.slotgroups = [("%s0" % s) if dimensionality['slots'].get(s, 1) > 1 else s for s in slottypes] + self.dimensionality = dimensionality + gates = [] + slots = [] + index = 0 + self.slotindexes = {} + self.gateindexes = {} + for g in self.gatetypes: + self.gateindexes[g] = index + if dimensionality['gates'].get(g, 1) > 1: + group = ["%s%d" % (g, i) for i in range(dimensionality['gates'][g])] + gates.extend(group) + index += dimensionality['gates'][g] + else: + gates.append(g) + index += 1 + + index = 0 + for s in self.slottypes: + self.slotindexes[s] = index + if dimensionality['slots'].get(s, 1) > 1: + group = ["%s%d" % (s, i) for i in range(dimensionality['slots'][s])] + slots.extend(group) + index += dimensionality['slots'][s] + else: + slots.append(s) + index += 1 + self.gatetypes = gates + self.slottypes = slots + + def get_gate_dimensionality(self, gate): + return self.dimensionality.get('gates', {}).get(gate, 1) + + def get_slot_dimensionality(self, slot): + return self.dimensionality.get('slots', {}).get(slot, 1) + + def get_data(self): + data = super().get_data() + data['gatetypes'] = self.gategroups + data['slottypes'] = self.slotgroups + data['is_highdimensional'] = True + data['dimensionality'] = { + 'gates': dict(("%s0" % g, self.dimensionality['gates'][g]) for g in self.dimensionality['gates']), + 'slots': dict(("%s0" % s, self.dimensionality['slots'][s]) for s in self.dimensionality['slots']), + } + return data diff --git a/micropsi_core/nodenet/node_alignment.py b/micropsi_core/nodenet/node_alignment.py index 91d155a1..4d5ae021 100644 --- a/micropsi_core/nodenet/node_alignment.py +++ b/micropsi_core/nodenet/node_alignment.py @@ -38,7 +38,7 @@ def align(nodenet, nodespace, entity_uids=False): unaligned_nodespaces = [id for id in unaligned_nodespaces if id in entity_uids] unaligned_nodes = [id for id in unaligned_nodes if id in entity_uids] sensors = [] - actors = [] + actuators = [] activators = [] ymin = min(nodenet.get_node(n).position[1] for n in unaligned_nodes + unaligned_nodespaces) xmin = min(nodenet.get_node(n).position[0] for n in unaligned_nodes + unaligned_nodespaces) @@ -46,9 +46,9 @@ def align(nodenet, nodespace, entity_uids=False): else: sensors = [s for s in unaligned_nodes if nodenet.get_node(s).type == "Sensor"] - actors = [a for a in unaligned_nodes if nodenet.get_node(a).type == "Actor"] + actuators = [a for a in unaligned_nodes if nodenet.get_node(a).type == "Actuator"] activators = [a for a in unaligned_nodes if nodenet.get_node(a).type == "Activator"] - unaligned_nodes = [n for n in unaligned_nodes if not nodenet.get_node(n).type in ("Sensor", "Actor", "Activator")] + unaligned_nodes = [n for n in unaligned_nodes if not nodenet.get_node(n).type in ("Sensor", "Actuator", "Activator")] start_position = (BORDER + GRID / 2, BORDER + (0.5 + math.ceil(len(unaligned_nodespaces) / PREFERRED_WIDTH)) * GRID, 0) @@ -65,8 +65,8 @@ def align(nodenet, nodespace, entity_uids=False): # group_other_links(por_groups) # group nodes that share a sur-linked parent below that parent group_with_same_parent(por_groups) - # put sensors and actors below - sensor_group = HorizontalGroup([DisplayNode(i) for i in sensors] + [DisplayNode(i) for i in actors]) + # put sensors and actuators below + sensor_group = HorizontalGroup([DisplayNode(i) for i in sensors] + [DisplayNode(i) for i in actuators]) actviator_group = HorizontalGroup([DisplayNode(i) for i in activators]) por_groups.append(sensor_group) por_groups.append(actviator_group) diff --git a/micropsi_core/nodenet/nodefunctions.py b/micropsi_core/nodenet/nodefunctions.py index 2417fab4..fbb490c5 100644 --- a/micropsi_core/nodenet/nodefunctions.py +++ b/micropsi_core/nodenet/nodefunctions.py @@ -13,7 +13,7 @@ #################################################################################################### -def register(netapi, node=None, **params): +def neuron(netapi, node=None, **params): activation = node.get_slot('gen').activation node.activation = node.get_gate('gen').gate_function(activation) @@ -27,7 +27,7 @@ def sensor(netapi, node=None, datasource=None, **params): node.get_gate('gen').gate_function(datasource_value) -def actor(netapi, node=None, datatarget=None, **params): +def actuator(netapi, node=None, datatarget=None, **params): activation_to_set = node.get_slot("gen").activation if netapi.worldadapter and datatarget in netapi.worldadapter.get_available_datatargets(): netapi.worldadapter.add_to_datatarget(datatarget, activation_to_set) @@ -111,7 +111,7 @@ def script(netapi, node=None, **params): 0.01 if node.get_slot("ret").activation < 0 else 1) -def pipe(netapi, node=None, sheaf="default", **params): +def pipe(netapi, node=None, **params): gen = 0.0 por = 0.0 ret = 0.0 @@ -127,28 +127,28 @@ def pipe(netapi, node=None, sheaf="default", **params): else: countdown -= 1 - gen_sur_exp = node.get_slot("sur").get_activation(sheaf) + node.get_slot("exp").get_activation(sheaf) + gen_sur_exp = node.get_slot("sur").get_activation() + node.get_slot("exp").get_activation() + gen_sur_exp *= node.get_slot("sub").get_activation() if 0 < gen_sur_exp < expectation: # don't report anything below expectation gen_sur_exp = 0 - gen += node.get_slot("gen").get_activation(sheaf) * node.get_slot("sub").get_activation(sheaf) + gen += node.get_slot("gen").get_activation() * node.get_slot("sub").get_activation() if abs(gen) < 0.1: gen = gen_sur_exp # cut off gen loop at lower threshold - if node.get_slot("por").get_activation(sheaf) == 0 and not node.get_slot("por").empty: + if node.get_slot("por").get_activation() == 0 and not node.get_slot("por").empty: gen = gen_sur_exp - sub += node.get_slot("sub").get_activation(sheaf) - sub += node.get_slot("cat").get_activation(sheaf) - sub *= max(node.get_slot("por").get_activation(sheaf), 0) if not node.get_slot("por").empty else 1 - sub *= 0 if node.get_slot("gen").get_activation(sheaf) != 0 else 1 + sub += node.get_slot("sub").get_activation() + sub += node.get_slot("cat").get_activation() + sub *= max(node.get_slot("por").get_activation(), 0) if not node.get_slot("por").empty else 1 + sub *= 0 if node.get_slot("gen").get_activation() != 0 else 1 if sub > 0: sub = 1 if sub < 0: sub = -1 - sur += node.get_slot("sur").get_activation(sheaf) - if sur == 0: sur += node.get_slot("sur").get_activation("default") # no activation in our sheaf, maybe from sensors? - if abs(node.get_slot("gen").get_activation(sheaf) * node.get_slot("sub").get_activation(sheaf)) > 0.2: # cut off sur-reports from gen looping before the loop fades away - sur += 1 if node.get_slot("gen").get_activation(sheaf) > 0 else -1 - sur += node.get_slot("exp").get_activation(sheaf) * node.get_slot("sub").get_activation(sheaf) + sur += node.get_slot("sur").get_activation() + if abs(node.get_slot("gen").get_activation() * node.get_slot("sub").get_activation()) > 0.2: # cut off sur-reports from gen looping before the loop fades away + sur += 1 if node.get_slot("gen").get_activation() > 0 else -1 + sur += node.get_slot("exp").get_activation() * node.get_slot("sub").get_activation() if sur > 0 and sur < expectation: # don't report anything below expectation sur = 0 @@ -160,8 +160,10 @@ def pipe(netapi, node=None, sheaf="default", **params): countdown = int(node.get_parameter("wait") or 1) if not node.get_slot("ret").empty: - sur = sur * node.get_slot("ret").get_activation(sheaf) - if node.get_slot("por").get_activation(sheaf) < 0: + sur = sur * node.get_slot("ret").get_activation() + if node.get_slot("por").get_activation() < 0: + sur = 0 + if node.get_slot("sub").get_activation() < 1: sur = 0 if sur > 1: @@ -169,37 +171,36 @@ def pipe(netapi, node=None, sheaf="default", **params): if sur < -1: sur = -1 - por += node.get_slot("sur").get_activation(sheaf) - por += (0 if node.get_slot("gen").get_activation(sheaf) < 0.1 else 1) * \ - (1+node.get_slot("por").get_activation(sheaf)) + por += node.get_slot("sur").get_activation() + por += (0 if node.get_slot("gen").get_activation() < 0.1 else 1) * \ + (1+node.get_slot("por").get_activation()) if countdown <= 0 and por < expectation: por = -1 - por *= node.get_slot("por").get_activation(sheaf) if not node.get_slot("por").empty else 1 # only por if por - por *= node.get_slot("sub").get_activation(sheaf) # only por if sub - por += node.get_slot("por").get_activation(sheaf) if node.get_slot("sub").get_activation(sheaf) == 0 and node.get_slot("sur").get_activation(sheaf) == 0 else 0 + por *= node.get_slot("por").get_activation() if not node.get_slot("por").empty else 1 # only por if por + por *= node.get_slot("sub").get_activation() # only por if sub + por += node.get_slot("por").get_activation() if node.get_slot("sub").get_activation() == 0 and node.get_slot("sur").get_activation() == 0 else 0 if por > 0: por = 1 - ret += node.get_slot("ret").get_activation(sheaf) if node.get_slot("sub").get_activation(sheaf) == 0 and node.get_slot("sur").get_activation(sheaf) == 0 else 0 - if node.get_slot("por").get_activation(sheaf) < 0: + ret += node.get_slot("ret").get_activation() if node.get_slot("sub").get_activation() == 0 and node.get_slot("sur").get_activation() == 0 else 0 + if node.get_slot("por").get_activation() < 0: ret = 1 if ret > 1: ret = 1 cat = sub - if cat == 0: cat += node.get_slot("cat").get_activation(sheaf) + if cat == 0: cat += node.get_slot("cat").get_activation() if cat < 0: cat = 0 - exp += node.get_slot("sur").get_activation(sheaf) - exp += node.get_slot("exp").get_activation(sheaf) - if abs(node.get_slot("gen").get_activation(sheaf) * node.get_slot("sub").get_activation(sheaf)) > 0.2: # cut off sur-reports from gen looping before the loop fades away + exp += node.get_slot("sur").get_activation() + exp += node.get_slot("exp").get_activation() + if abs(node.get_slot("gen").get_activation() * node.get_slot("sub").get_activation()) > 0.2: # cut off sur-reports from gen looping before the loop fades away exp += 1 - if exp == 0: exp += node.get_slot("sur").get_activation("default") # no activation in our sheaf, maybe from sensors? if exp > 1: exp = 1 - if node.get_slot('sub').get_activation(sheaf) > 0 and node.nodenet.use_modulators: + if node.get_slot('sub').get_activation() > 0 and node.nodenet.use_modulators: if sur > 0: netapi.change_modulator('base_number_of_expected_events', 1) elif sur < 0: @@ -209,19 +210,13 @@ def pipe(netapi, node=None, sheaf="default", **params): node.set_state("countdown", countdown) # set gates - node.set_sheaf_activation(gen, sheaf) - node.get_gate("gen").gate_function(gen, sheaf) - node.get_gate("por").gate_function(por, sheaf) - node.get_gate("ret").gate_function(ret, sheaf) - node.get_gate("sub").gate_function(sub, sheaf) - node.get_gate("sur").gate_function(sur, sheaf) - node.get_gate("exp").gate_function(exp, sheaf) - node.get_gate("cat").gate_function(cat, sheaf) - #if cat > 0 and node.get_slot("sub").get_activation(sheaf) > 0: # cats will be checked in their own sheaf - # node.get_gate("cat").open_sheaf(cat, sheaf) - # node.get_gate("cat").gate_function(0, sheaf) - #else: - # node.get_gate("cat").gate_function(cat, sheaf) + node.get_gate("gen").gate_function(gen) + node.get_gate("por").gate_function(por) + node.get_gate("ret").gate_function(ret) + node.get_gate("sub").gate_function(sub) + node.get_gate("sur").gate_function(sur) + node.get_gate("exp").gate_function(exp) + node.get_gate("cat").gate_function(cat) def activator(netapi, node, **params): diff --git a/micropsi_core/nodenet/nodenet.py b/micropsi_core/nodenet/nodenet.py index dcfbbf53..a2501b9f 100644 --- a/micropsi_core/nodenet/nodenet.py +++ b/micropsi_core/nodenet/nodenet.py @@ -4,8 +4,9 @@ Nodenet definition """ - +import os import logging + from datetime import datetime from threading import Lock from abc import ABCMeta, abstractmethod @@ -13,11 +14,14 @@ import micropsi_core.tools from .netapi import NetAPI from . import monitor +from . import recorder +from .node import Nodetype, FlowNodetype, HighdimensionalNodetype __author__ = 'joscha' __date__ = '09.05.12' -NODENET_VERSION = 1 + +NODENET_VERSION = 2 class NodenetLockException(Exception): @@ -64,10 +68,11 @@ def metadata(self): 'current_step': self.current_step, 'world': self._world_uid, 'worldadapter': self._worldadapter_uid, - 'version': NODENET_VERSION, + 'version': self._version, 'runner_condition': self._runner_condition, 'use_modulators': self.use_modulators, - 'nodespace_ui_properties': self._nodespace_ui_properties + 'nodespace_ui_properties': self._nodespace_ui_properties, + 'worldadapter_config': {} if not self.worldadapter_instance else self.worldadapter_instance.config } return data @@ -136,25 +141,34 @@ def worldadapter_instance(self, _worldadapter_instance): Connects the node net to the given world adapter uid, or disconnects if None is given """ self._worldadapter_instance = _worldadapter_instance + if self._worldadapter_instance: + self._worldadapter_instance.nodenet = self - def __init__(self, name="", worldadapter="Default", world=None, owner="", uid=None, use_modulators=True, worldadapter_instance=None): + def __init__(self, persistency_path, name="", worldadapter="Default", world=None, owner="", uid=None, native_modules={}, use_modulators=True, worldadapter_instance=None, version=None): """ Constructor for the abstract base class, must be called by implementations """ self._uid = uid or micropsi_core.tools.generate_uid() + self.persistency_path = persistency_path self._name = name self._world_uid = world self._worldadapter_uid = worldadapter if world else None self._worldadapter_instance = worldadapter_instance + if self._worldadapter_instance: + self._worldadapter_instance.nodenet = self self.is_active = False + self.frequency = 0.0 self.use_modulators = use_modulators - self._version = NODENET_VERSION # used to check compatibility of the node net data + self._version = version or NODENET_VERSION # used to check compatibility of the node net data self._uid = uid self._runner_condition = None + self.runner_config = {} self.owner = owner self._monitors = {} + self._recorders = {} + self._adhoc_monitors = {} self._nodespace_ui_properties = {} self.netlock = Lock() @@ -163,12 +177,27 @@ def __init__(self, name="", worldadapter="Default", world=None, owner="", uid=No self.logger.info("Setting up nodenet %s with engine %s", self.name, self.engine) self.user_prompt = None + self.user_prompt_response = {} self.netapi = NetAPI(self) self.deleted_items = {} self.stepping_rate = [] self.dashboard_values = {} + self.figures = {} + + self.native_modules = {} + for type, data in native_modules.items(): + if data.get('engine', self.engine) == self.engine: + try: + if data.get('flow_module'): + self.native_modules[type] = FlowNodetype(nodenet=self, **data) + elif data.get('dimensionality'): + self.native_modules[type] = HighdimensionalNodetype(nodenet=self, **data) + else: + self.native_modules[type] = Nodetype(nodenet=self, **data) + except Exception as err: + self.logger.error("Can not instantiate node type %s: %s: %s" % (type, err.__class__.__name__, str(err))) self._modulators = {} if use_modulators: @@ -176,6 +205,9 @@ def __init__(self, name="", worldadapter="Default", world=None, owner="", uid=No for modulator in emo.writeable_modulators + emo.readable_modulators: self._modulators[modulator] = 1 + if not os.path.isdir(self.persistency_path): + os.mkdir(self.persistency_path) + def get_data(self, complete=False, include_links=True): """ Returns a dict representing the whole node net. @@ -196,37 +228,70 @@ def get_data(self, complete=False, include_links=True): }) return data + def simulation_started(self): + self.is_active = True + + def simulation_stopped(self): + self.is_active = False + + def set_user_prompt(self, node, key, message, parameters={}): + if self.user_prompt is not None: + raise RuntimeError("Currently only one user prompt per nodenet step supported. node %s already registered one" % str(self.user_prompt['node'])) + else: + self.user_prompt = { + 'node': node, + 'key': key, + 'msg': message, + 'parameters': parameters + } + self.is_active = False + + def consume_user_prompt(self): + data = self.user_prompt + if data: + data['node'] = data['node'].get_data() + self.user_prompt = None + return data + + def set_user_prompt_response(self, node_uid, key, parameters): + node = self.get_node(node_uid) + node.get_user_prompt(key)['callback'](self.netapi, node, parameters) + @abstractmethod - def get_nodes(self, nodespaces=[], include_links=True): + def get_nodes(self, nodespaces=[], node_uids=[], include_links=True, links_to_nodespaces=[]): """ Returns a dict with contents for the given nodespaces """ pass # pragma: no cover @abstractmethod - def save(self, filename): + def get_links_for_nodes(self, node_uids): """ - Saves the nodenet to the given main metadata json file. + Returns a tuple consisting of links from/to the given node + and the nodes that are connected via these links """ pass # pragma: no cover @abstractmethod - def load(self, filename): + def save(self, base_path=None, zipfile=None): """ - Loads the node net from the given main metadata json file. + Saves the nodenet to persistency. + Arguments: + base_path (String) - Save files to a non-standard directory + zipfile (ZipFile object) - Save the nodenet to a zipfile instead """ pass # pragma: no cover @abstractmethod - def remove(self, filename): + def load(self): """ - Removes the node net's given main metadata json file, plus any additional files the node net may - have created for persistency + Loads the node net from the given main metadata json file. """ pass # pragma: no cover - def timed_step(self): + def timed_step(self, runner_config={}): start = datetime.now() + self.runner_config = runner_config self.step() elapsed = datetime.now() - start self.stepping_rate.append(elapsed.seconds + ((elapsed.microseconds // 1000) / 1000)) @@ -262,9 +327,9 @@ def is_node(self, uid): pass # pragma: no cover @abstractmethod - def create_node(self, nodetype, nodespace_uid, position, name="", uid=None, parameters=None, gate_parameters=None): + def create_node(self, nodetype, nodespace_uid, position, name="", uid=None, parameters=None, gate_configuration=None): """ - Creates a new node of the given node type (string), in the nodespace with the given UID, at the given + Creates a new node of the given node type (string), in the given nodespace, at the given position and returns the uid of the new node """ pass # pragma: no cover @@ -317,15 +382,15 @@ def get_nodespace_properties(self, nodespace_uid=None): return self._nodespace_ui_properties @abstractmethod - def set_entity_positions(self, positions): - """ Sets the position of nodes or nodespaces. + def set_node_positions(self, positions): + """ Sets the position of nodes. Parameters: a hash of uids to their positions """ pass # pragma: no cover @abstractmethod - def create_nodespace(self, parent_uid, position, name="", uid=None): + def create_nodespace(self, parent_uid, name="", uid=None): """ - Creates a new nodespace in the nodespace with the given UID, at the given position. + Creates a new nodespace within the given parent-nodespace """ pass # pragma: no cover @@ -344,21 +409,21 @@ def get_sensors(self, nodespace=None, datasource=None): pass # pragma: no cover @abstractmethod - def get_actors(self, nodespace=None, datatarget=None): + def get_actuators(self, nodespace=None, datatarget=None): """ - Returns a dict of all actor nodes. Optionally filtered by the given nodespace and data target + Returns a dict of all actuator nodes. Optionally filtered by the given nodespace and data target """ pass # pragma: no cover @abstractmethod - def create_link(self, source_node_uid, gate_type, target_node_uid, slot_type, weight=1, certainty=1): + def create_link(self, source_node_uid, gate_type, target_node_uid, slot_type, weight=1): """ Creates a new link between the given node/gate and node/slot """ pass # pragma: no cover @abstractmethod - def set_link_weight(self, source_node_uid, gate_type, target_node_uid, slot_type, weight=1, certainty=1): + def set_link_weight(self, source_node_uid, gate_type, target_node_uid, slot_type, weight=1): """ Set weight of the link between the given node/gate and node/slot """ @@ -383,33 +448,12 @@ def reload_native_modules(self, native_modules): "name": "Name of the Native Module", "slottypes": ["trigger"], "nodefunction_name": "native_module_function", - "gatetypes": ["done"], - "gate_defaults": { - "done": { - "minimum": -100, - "maximum": 100, - "threshold": -100 - } - } + "gatetypes": ["done"] } """ pass # pragma: no cover - @abstractmethod - def get_nodespace_data(self, nodespace_uid, include_links): - """ - Returns a data dict of the nodenet state for the given nodespace. - - Implementations are expected to fill the following keys: - 'nodes' - map of nodes it the given rectangle - 'links' - map of links ending or originating in the given rectangle - 'nodespaces' - map of nodespaces positioned in the given rectangle - 'monitors' - result of self.construct_monitors_dict() - 'user_prompt' - self.user_prompt if set, should be cleared then - """ - pass # pragma: no cover - def get_activation_data(self, nodespace_uids=[], rounded=1): """ Returns a dict of uids to lists of activation values. @@ -451,6 +495,26 @@ def get_standard_nodetype_definitions(self): """ pass # pragma: no cover + def get_native_module_definitions(self): + """ + Returns the native modules supported by this nodenet + """ + data = {} + for key in self.native_modules: + if type(self.native_modules[key]) != FlowNodetype: + data[key] = self.native_modules[key].get_data() + return data + + def get_flow_module_definitions(self): + """ + Returns the flow modules supported by this nodenet + """ + data = {} + for key in self.native_modules: + if type(self.native_modules[key]) == FlowNodetype: + data[key] = self.native_modules[key].get_data() + return data + @abstractmethod def group_nodes_by_names(self, nodespace_uid, node_name_prefix=None, gatetype="gen", sortby='id', group_name=None): """ @@ -496,18 +560,23 @@ def set_activations(self, nodespace_uid, group, new_activations): pass # pragma: no cover @abstractmethod - def get_thetas(self, nodespace_uid, group): + def get_gate_configurations(self, nodespace_uid, group, gatefunction_parameter=None): """ - Returns a list of theta values for the given group. - For multi-gate nodes, the thetas of the gen gates will be returned + Returns a dictionary containing a list of gatefunction names, and a list of the values + of the given gatefunction_parameter (if given) """ pass # pragma: no cover @abstractmethod - def set_thetas(self, nodespace_uid, group, thetas): + def set_gate_configurations(self, nodespace_uid, group, gatefunction, gatefunction_parameter=None, parameter_values=None): """ - Bulk-sets thetas for the given group. - new_thetas dimensionality has to match the group length + Bulk-sets gatefunctions and a gatefunction_parameter for the given group. + Arguments: + nodespace_uid (string) - id of the parent nodespace + group (string) - name of the group + gatefunction (string) - name of the gatefunction to set + gatefunction_parameter (optinoal) - name of the gatefunction_paramr to set + parameter_values (optional) - values to set for the gatefunction_parameetr """ pass # pragma: no cover @@ -532,7 +601,7 @@ def set_link_weights(self, nodespace_from_uid, group_from, nodespace_to_uid, gro @abstractmethod def get_available_gatefunctions(self): """ - Returns a list of available gate functions + Returns a dict of the available gatefunctions and their parameters and parameter-defaults """ pass # pragma: no cover @@ -545,7 +614,7 @@ def has_nodespace_changes(self, nodespace_uids=[], since_step=0): pass # pragma: no cover @abstractmethod - def get_nodespace_changes(self, nodespace_uids=[], since_step=0): + def get_nodespace_changes(self, nodespace_uids=[], since_step=0, include_links=True): """ Returns a dictionary of structural changes that happened in the given nodespace since the given step @@ -574,28 +643,29 @@ def _track_deletion(self, entity_type, uid): def clear(self): self._monitors = {} + self.close_figures() - def add_gate_monitor(self, node_uid, gate, sheaf=None, name=None, color=None): + def add_gate_monitor(self, node_uid, gate, name=None, color=None): """Adds a continuous monitor to the activation of a gate. The monitor will collect the activation value in every calculation step. Returns the uid of the new monitor.""" - mon = monitor.NodeMonitor(self, node_uid, 'gate', gate, sheaf=sheaf, name=name, color=color) + mon = monitor.NodeMonitor(self, node_uid, 'gate', gate, name=name, color=color) self._monitors[mon.uid] = mon return mon.uid - def add_slot_monitor(self, node_uid, slot, sheaf=None, name=None, color=None): + def add_slot_monitor(self, node_uid, slot, name=None, color=None): """Adds a continuous monitor to the activation of a slot. The monitor will collect the activation value in every calculation step. Returns the uid of the new monitor.""" - mon = monitor.NodeMonitor(self, node_uid, 'slot', slot, sheaf=sheaf, name=name, color=color) + mon = monitor.NodeMonitor(self, node_uid, 'slot', slot, name=name, color=color) self._monitors[mon.uid] = mon return mon.uid - def add_link_monitor(self, source_node_uid, gate_type, target_node_uid, slot_type, property=None, name=None, color=None): - """Adds a continuous monitor to a link. You can choose to monitor either weight (default) or certainty - The monitor will collect respective value in every calculation step. + def add_link_monitor(self, source_node_uid, gate_type, target_node_uid, slot_type, name=None, color=None): + """Adds a continuous monitor to the activation of a slot. The monitor will collect the activation + value in every calculation step. Returns the uid of the new monitor.""" - mon = monitor.LinkMonitor(self, source_node_uid, gate_type, target_node_uid, slot_type, property=property, name=name, color=color) + mon = monitor.LinkMonitor(self, source_node_uid, gate_type, target_node_uid, slot_type, name=name, color=color) self._monitors[mon.uid] = mon return mon.uid @@ -615,22 +685,77 @@ def add_custom_monitor(self, function, name, color=None): self._monitors[mon.uid] = mon return mon.uid + def add_group_monitor(self, nodespace, name, node_name_prefix='', node_uids=[], gate='gen', color=None): + """Adds a continuous monitor, that tracks the activations of the given group + return-value for every calculation step. + Returns the uid of the new monitor.""" + mon = monitor.GroupMonitor(self, nodespace, name, node_name_prefix, node_uids, gate, color=color) + self._monitors[mon.uid] = mon + return mon.uid + + def add_adhoc_monitor(self, function, name, parameters={}): + """Adds an ephemeral adhoc monitor to quickly plot values returned by the given function. + If a monitor with the given name already exists, it's value-function is updated. """ + if name in self._adhoc_monitors: + self._adhoc_monitors[name].function = function + self._adhoc_monitors[name].parameters + else: + mon = monitor.AdhocMonitor(self, function, name, parameters=parameters) + self._adhoc_monitors[name] = mon + def get_monitor(self, uid): return self._monitors.get(uid) - def update_monitors(self): + def get_recorder(self, uid): + return self._recorders.get(uid) + + def update_monitors_and_recorders(self): for uid in self._monitors: self._monitors[uid].step(self.current_step) + for uid in self._recorders: + self._recorders[uid].step(self.current_step) + for name in self._adhoc_monitors: + self._adhoc_monitors[name].step(self.current_step) - def construct_monitors_dict(self): + def construct_monitors_dict(self, with_values=True): data = {} for monitor_uid in self._monitors: - data[monitor_uid] = self._monitors[monitor_uid].get_data() + data[monitor_uid] = self._monitors[monitor_uid].get_data(with_values=with_values) + return data + + def construct_recorders_dict(self): + data = {} + for uid in self._recorders: + data[uid] = self._recorders[uid].get_data() + return data + + def construct_adhoc_monitors_dict(self, with_values=True): + data = {} + for name in self._adhoc_monitors: + data[self._adhoc_monitors[name].uid] = self._adhoc_monitors[name].get_data(with_values=with_values) return data def remove_monitor(self, monitor_uid): del self._monitors[monitor_uid] + def add_gate_activation_recorder(self, group_definition, name, interval=1): + """ Adds an activation recorder to a group of nodes.""" + raise NotImplementedError("Recorders are not implemented in the this engine") + + def add_node_activation_recorder(self, group_definition, name, interval=1): + """ Adds an activation recorder to a group of nodes.""" + raise NotImplementedError("Recorders are not implemented in the this engine") + + def add_linkweight_recorder(self, from_group_definition, to_group_definition, name, interval=1): + """ Adds a linkweight recorder to links between to groups.""" + raise NotImplementedError("Recorders are not implemented in the this engine") + + def remove_recorder(self, recorder_uid): + filename = self._recorders[recorder_uid].filename + if os.path.isfile(filename): + os.remove(filename) + del self._recorders[recorder_uid] + def get_dashboard(self): data = self.dashboard_values.copy() data['is_active'] = self.is_active @@ -664,3 +789,29 @@ def check_stop_runner_condition(self): else: del self.self._runner_condition['monitor'] return False + + def register_figure(self, node_uid, figure): + """ Registers a figure for the given node_uid""" + if node_uid in self.figures: + self.figures[node_uid].append(figure) + else: + self.figures[node_uid] = [figure] + + def close_figures(self, node_uid=None): + """ Close all figures used by the given node, or alle figures if no uid given""" + try: + import matplotlib.pyplot as plt + if node_uid is not None: + plots = self.figures.get(node_uid, []) + if len(plots): + self.logger.debug("Closing %d figures belonging to node %s" % (len(plots), node_uid)) + for fig in plots: + plt.close(fig) + del self.figures[node_uid] + else: + self.logger.debug("Closing open figures.") + for uid in self.figures: + [plt.close(fig) for fig in self.figures[uid]] + self.figures = {} + except ImportError: + pass diff --git a/micropsi_core/nodenet/nodespace.py b/micropsi_core/nodenet/nodespace.py index 8c7f1361..0a34a0bf 100644 --- a/micropsi_core/nodenet/nodespace.py +++ b/micropsi_core/nodenet/nodespace.py @@ -45,24 +45,6 @@ def index(self, index): """ pass # pragma: no cover - @property - @abstractmethod - def position(self): - """ - This node's 3D coordinates within its nodespace - """ - # todo: persistent 3D coordinates are likely to be made non-persistent or stored elsewhere - pass # pragma: no cover - - @position.setter - @abstractmethod - def position(self, position): - """ - This node's 3D coordinates within its nodespace - """ - # todo: persistent 3D coordinates are likely to be made non-persistent or stored elsewhere - pass # pragma: no cover - @property @abstractmethod def name(self): @@ -95,7 +77,6 @@ def get_data(self): "uid": self.uid, "index": self.index, "name": self.name, - "position": self.position, "parent_nodespace": self.parent_nodespace, } diff --git a/micropsi_core/nodenet/operations/__init__.py b/micropsi_core/nodenet/operations/__init__.py index 89df238e..d5d2483b 100644 --- a/micropsi_core/nodenet/operations/__init__.py +++ b/micropsi_core/nodenet/operations/__init__.py @@ -25,10 +25,12 @@ def selectioninfo(nodetypes=[], mincount=0, maxcount=-1): def _decorator(func): - func.selectioninfo = { + if not hasattr(func, 'selectioninfo'): + func.selectioninfo = [] + func.selectioninfo.append({ 'nodetypes': nodetypes if type(nodetypes) == list else [nodetypes], 'mincount': mincount, 'maxcount': maxcount - } + }) return func return _decorator diff --git a/micropsi_core/nodenet/operations/layout.py b/micropsi_core/nodenet/operations/layout.py index 6474ed11..3879e0da 100644 --- a/micropsi_core/nodenet/operations/layout.py +++ b/micropsi_core/nodenet/operations/layout.py @@ -2,7 +2,8 @@ from micropsi_core.nodenet.operations import selectioninfo -@selectioninfo(mincount=1) +@selectioninfo(mincount=1, nodetypes=['Nodespace']) +@selectioninfo(mincount=2) def autoalign(netapi, selection): """ Autoalign nodes or nodespaces.""" if len(selection) == 1: diff --git a/micropsi_core/nodenet/operations/recorders.py b/micropsi_core/nodenet/operations/recorders.py new file mode 100644 index 00000000..0d7ccc90 --- /dev/null +++ b/micropsi_core/nodenet/operations/recorders.py @@ -0,0 +1,62 @@ + +from micropsi_core.nodenet.operations import selectioninfo + + +try: + import numpy as np + + @selectioninfo(mincount=2) + def add_gate_activation_recorder(netapi, selection, gate='gen', interval=1, name='gate_activation_recorder'): + """Adds an activation recorder to the selected nodes""" + firstnode = netapi.get_node(selection[0]) + nodespace = netapi.get_nodespace(firstnode.parent_nodespace) + group_config = { + 'nodespace_uid': nodespace.uid, + 'node_uids': selection, + 'gatetype': gate} + recorder = netapi.add_gate_activation_recorder(group_config, name=name, interval=int(interval)) + return {'uid': recorder.uid} + + @selectioninfo(mincount=2) + def add_node_activation_recorder(netapi, selection, interval=1, name='node_activation_recorder'): + """Adds an activation recorder to the selected nodes""" + firstnode = netapi.get_node(selection[0]) + nodespace = netapi.get_nodespace(firstnode.parent_nodespace) + group_config = { + 'nodespace_uid': nodespace.uid, + 'node_uids': selection} + recorder = netapi.add_node_activation_recorder(group_config, name=name, interval=int(interval)) + return {'uid': recorder.uid} + + @selectioninfo(mincount=2) + def add_linkweight_recorder(netapi, selection, direction='down', from_gate='gen', to_slot='gen', interval=1, name='linkweight_recorder'): + """ Attempts to detect two layers of nodes (y-coordinate) and adds a linkweight-monitor""" + nodes = [netapi.get_node(uid) for uid in selection] + nodespace = netapi.get_nodespace(nodes[0].parent_nodespace) + groups = {} + for n in nodes: + if n.position[1] in groups: + groups[n.position[1]].append(n) + else: + groups[n.position[1]] = [n] + if len(groups.keys()) != 2: + raise RuntimeError("Could not determine 2 node-layers") + + grouplist = list(groups.values()) + if direction == 'up': + grouplist.reverse() + + from_group_config = { + 'nodespace_uid': nodespace.uid, + 'node_uids': [n.uid for n in grouplist[0]], + 'gatetype': from_gate} + to_group_config = { + 'nodespace_uid': nodespace.uid, + 'node_uids': [n.uid for n in grouplist[1]], + 'gatetype': to_slot} + recorder = netapi.add_linkweight_recorder(from_group_config, to_group_config, name=name, interval=int(interval)) + return {'uid': recorder.uid} + + +except ImportError: + pass diff --git a/micropsi_core/nodenet/recorder.py b/micropsi_core/nodenet/recorder.py new file mode 100644 index 00000000..f353545b --- /dev/null +++ b/micropsi_core/nodenet/recorder.py @@ -0,0 +1,224 @@ +# -*- coding: utf-8 -*- + + +""" +Recorder + +Recorders need nummpy, record things like activation, linkweights, biases over time, +and persist to their own files. they can be imported and exported as numpy npz +""" + +import os +try: + import numpy as np +except ImportError: + pass +from abc import ABCMeta, abstractmethod +from micropsi_core import tools + + +class Recorder(metaclass=ABCMeta): + + """A recorder will record values from section of the nodenet + and offer import/export functionaliy + Recorders need numpy.""" + + initial_size = 10000 + + def __init__(self, nodenet, name="", uid="", interval=1, first_step=0, current_index=-1): + self._nodenet = nodenet + self.name = name + self.uid = uid or tools.generate_uid() + self.interval = interval + self.filename = os.path.join(nodenet.persistency_path, 'recorder_%s.npz' % self.uid) + self.first_step = first_step + self.current_index = current_index + self.shapes = {} + self.values = {} + if os.path.isfile(self.filename): + self.load() + + def get_data(self): + data = { + "uid": self.uid, + "name": self.name, + "interval": self.interval, + "filename": self.filename, + "current_index": self.current_index, + "first_step": self.first_step, + "classname": self.__class__.__name__ + } + return data + + def step(self, step): + if step % self.interval == 0: + self.current_index += 1 + values = self.get_values() + for key in values: + if key not in self.values: + self.first_step = step + self.values[key] = np.zeros(shape=self.shapes[key], dtype=self._nodenet.numpyfloatX) + self.values[key][:] = np.NAN + if step - self.first_step >= len(self.values[key]): + newshapes = list(self.shapes[key]) + newshapes[0] += self.initial_size + self.shapes[key] = tuple(newshapes) + new_values = np.zeros(shape=self.shapes[key], dtype=self._nodenet.numpyfloatX) + new_values[0:len(self.values[key])] = self.values[key] + self.values[key] = new_values + self.values[key][self.current_index] = values[key] + + @abstractmethod + def get_values(self): + pass # pragma: no cover + + def export_data(self): + data = {} + for key in self.values: + data["%s_%s" % (self.name, key)] = self.values[key] + data['%s_meta' % self.name] = [self.first_step, self.interval] + return data + + def save(self, filename=None): + data = self.export_data() + if data: + np.savez(filename if filename is not None else self.filename, **data) + + def load(self, filename=None): + data = np.load(filename if filename is not None else self.filename) + for key in data: + if key.endswith('_meta'): + self.first_step = int(data[key][0]) + self.interval = int(data[key][1]) + else: + self.values[key] = data[key] + + def clear(self): + self.values = {} + self.current_index = -1 + + def import_file(self, filename): + self.load(filename) + + +class GateActivationRecorder(Recorder): + """ An activation recorder to record activaitons of nodegroups""" + + def __init__(self, nodenet, group_config={}, name="", uid="", interval=1, first_step=0, current_index=-1, **_): + super().__init__(nodenet, name, uid, interval, first_step=first_step, current_index=current_index) + if 'group_name' not in group_config: + group_config['group_name'] = name + self.group_config = group_config + self.nodespace = group_config['nodespace_uid'] + self.group_name = group_config['group_name'] + + if not group_config.get('node_uids', []): + self._nodenet.group_nodes_by_names(**group_config) + else: + self._nodenet.group_nodes_by_ids(**group_config) + + uids = self._nodenet.get_node_uids(self.nodespace, self.group_name) + self.shapes = {'activations': (self.initial_size, len(uids))} + + def get_data(self): + data = super().get_data() + data.update({ + "group_config": self.group_config, + }) + return data + + def get_values(self): + return {'activations': self._nodenet.get_activations(self.nodespace, self.group_name)} + + +class NodeActivationRecorder(Recorder): + """ An activation recorder to record activaitons of nodegroups""" + + def __init__(self, nodenet, group_config={}, name="", uid="", interval=1, first_step=0, current_index=-1, **_): + super().__init__(nodenet, name, uid, interval, first_step=first_step, current_index=current_index) + + self.group_config = group_config + self.nodespace = group_config['nodespace_uid'] + self.base_group_name = group_config.pop('group_name', name) + + if not group_config.get('node_uids', []): + nodes = self._nodenet.netapi.get_nodes(nodespace=self.nodespace, node_name_prefix=group_config['node_name_prefix'], sortby=group_config.get('sortby', 'ids')) + else: + nodes = [self._nodenet.get_node(uid) for uid in group_config['node_uids']] + + node_uids = [n.uid for n in nodes] + assert len(set([n.type for n in nodes])) == 1 # assert we have a homogeneous group + self.gatetypes = nodes[0].get_gate_types() + self.groupnames = [] + for g in self.gatetypes: + group_name = self.base_group_name + '_%s' % g + self.groupnames.append(group_name) + self._nodenet.group_nodes_by_ids(self.nodespace, node_uids, gatetype=g, group_name=group_name, sortby=group_config.get('sortby', 'id')) + + self.shapes = {'activations': (self.initial_size, len(self.gatetypes), len(nodes))} + + def get_data(self): + data = super().get_data() + data.update({ + "group_config": self.group_config, + }) + return data + + def get_values(self): + return {'activations': [self._nodenet.get_activations(self.nodespace, groupname) for groupname in self.groupnames]} + + +class LinkweightRecorder(Recorder): + """ An activation recorder to biases and the linkweights of two nodegroups""" + + def __init__(self, nodenet, from_group_config={}, to_group_config={}, name="", uid="", interval=1, first_step=0, current_index=-1, **_): + super().__init__(nodenet, name, uid, interval, first_step=first_step, current_index=current_index) + + if 'group_name' not in from_group_config: + from_group_config['group_name'] = "%s_from" % name + if 'group_name' not in to_group_config: + to_group_config['group_name'] = "%s_to" % name + + self.from_group_config = from_group_config + self.to_group_config = to_group_config + + self.from_nodespace = from_group_config['nodespace_uid'] + self.to_nodespace = to_group_config['nodespace_uid'] + self.from_name = from_group_config['group_name'] + self.to_name = to_group_config['group_name'] + + if not from_group_config.get('node_uids', []): + self._nodenet.group_nodes_by_names(**from_group_config) + else: + self._nodenet.group_nodes_by_ids(**from_group_config) + + if not to_group_config.get('node_uids', []): + self._nodenet.group_nodes_by_names(**to_group_config) + else: + self._nodenet.group_nodes_by_ids(**to_group_config) + + weights = self._nodenet.get_link_weights(self.from_nodespace, self.from_name, self.to_nodespace, self.to_name) + from_uids = self._nodenet.get_node_uids(self.from_nodespace, self.from_name) + to_uids = self._nodenet.get_node_uids(self.to_nodespace, self.to_name) + self.shapes = { + 'linkweights': (self.initial_size, weights.shape[0], weights.shape[1]), + 'from_bias': (self.initial_size, len(from_uids)), + 'to_bias': (self.initial_size, len(to_uids)) + } + + def get_data(self): + data = super().get_data() + data.update({ + 'from_group_config': self.from_group_config, + 'to_group_config': self.to_group_config + }) + return data + + def get_values(self): + from_config = self._nodenet.get_gate_configurations(self.from_nodespace, self.from_name, 'bias') + to_config = self._nodenet.get_gate_configurations(self.to_nodespace, self.to_name, 'bias') + return { + 'linkweights': self._nodenet.get_link_weights(self.from_nodespace, self.from_name, self.to_nodespace, self.to_name), + 'from_bias': from_config['parameter_values'], + 'to_bias': to_config['parameter_values'] + } diff --git a/micropsi_core/nodenet/theano_engine/theano_definitions.py b/micropsi_core/nodenet/theano_engine/theano_definitions.py index 850b98d9..f95813d2 100644 --- a/micropsi_core/nodenet/theano_engine/theano_definitions.py +++ b/micropsi_core/nodenet/theano_engine/theano_definitions.py @@ -24,9 +24,12 @@ GATE_FUNCTION_IDENTITY = 0 GATE_FUNCTION_ABSOLUTE = 1 GATE_FUNCTION_SIGMOID = 2 -GATE_FUNCTION_TANH = 3 -GATE_FUNCTION_RECT = 4 +#GATE_FUNCTION_TANH = 3 +GATE_FUNCTION_RELU = 4 GATE_FUNCTION_DIST = 5 +GATE_FUNCTION_ELU = 6 +GATE_FUNCTION_THRESHOLD = 7 + NFPG_PIPE_NON = 0 NFPG_PIPE_GEN = 1 @@ -152,9 +155,9 @@ def get_string_slot_type(type, nodetype=None): def get_numerical_node_type(type, nativemodules=None): - if type == "Register": + if type == "Neuron": return REGISTER - elif type == "Actor": + elif type == "Actuator": return ACTUATOR elif type == "Sensor": return SENSOR @@ -178,9 +181,9 @@ def get_numerical_node_type(type, nativemodules=None): def get_string_node_type(type, nativemodules=None): if type == REGISTER: - return "Register" + return "Neuron" elif type == ACTUATOR: - return "Actor" + return "Actuator" elif type == SENSOR: return "Sensor" elif type == ACTIVATOR: @@ -208,12 +211,14 @@ def get_numerical_gatefunction_type(type): return GATE_FUNCTION_ABSOLUTE elif type == "sigmoid": return GATE_FUNCTION_SIGMOID - elif type == "tanh": - return GATE_FUNCTION_TANH - elif type == "rect": - return GATE_FUNCTION_RECT + elif type == "relu": + return GATE_FUNCTION_RELU elif type == "one_over_x": return GATE_FUNCTION_DIST + elif type == "elu": + return GATE_FUNCTION_ELU + elif type == "threshold": + return GATE_FUNCTION_THRESHOLD else: raise ValueError("Supplied gatefunction type is not a valid type: "+str(type)) @@ -225,12 +230,14 @@ def get_string_gatefunction_type(type): return "absolute" elif type == GATE_FUNCTION_SIGMOID: return "sigmoid" - elif type == GATE_FUNCTION_TANH: - return "tanh" - elif type == GATE_FUNCTION_RECT: - return "rect" + elif type == GATE_FUNCTION_RELU: + return "relu" elif type == GATE_FUNCTION_DIST: return "one_over_x" + elif type == GATE_FUNCTION_ELU: + return "elu" + elif type == GATE_FUNCTION_THRESHOLD: + return "threshold" else: raise ValueError("Supplied gatefunction type is not a valid type: "+str(type)) @@ -326,4 +333,19 @@ def nodespace_to_id(numericid, partitionid): def nodespace_from_id(stringid): - return int(stringid[4:]) \ No newline at end of file + return int(stringid[4:]) + + +def create_tensor(ndim, dtype, name="tensor"): + # return a theano tensor with the given dimensionality + from theano import tensor as T + if ndim == 0: + return T.scalar(name=name, dtype=dtype) + elif ndim == 1: + return T.vector(name=name, dtype=dtype) + elif ndim == 2: + return T.matrix(name=name, dtype=dtype) + elif ndim == 3: + return T.tensor3(name=name, dtype=dtype) + elif ndim == 4: + return T.tensor4(name=name, dtype=dtype) diff --git a/micropsi_core/nodenet/theano_engine/theano_flowmodule.py b/micropsi_core/nodenet/theano_engine/theano_flowmodule.py new file mode 100644 index 00000000..e57d941c --- /dev/null +++ b/micropsi_core/nodenet/theano_engine/theano_flowmodule.py @@ -0,0 +1,239 @@ + +""" +Flowmodules are a special kind of native modules, with the following properties: + +* They have inputs and outputs, in addition to a sub-slot and a sur-gate +* They can be connected to create a flow between Flowmodules +* Flow-terminals are datasources, datatargets and Flow Endndoes +* Flow Endnodes are Flowmodules that have at least one link ending at their sub-slot +* If the sub-slot of an Endnode X receives activation, everything between X and other Flow-terminals (a Flowgraph) is calculated within one nodenet step. +* All Flowmodules that are part of an active Flowgraph show this via activation on their sur-gate + +* Flow modules can currently have to kinds of implementation: Theano or python +** Theano-implemented Flowmodules have a buildfunction, that returns a symbolic theano-expression +** Python-implemented Flowmodules hav a runfunction, that can do anything it wants. + +* Flowmodules delivering output might decide, that a certain output needs more data, and can choose to return None for that output + (the total number of return values still must match the number of outputs they define) + If a Flowgraph receives None as one of its inputs, it is prevented from running, even if it is requested. + + + +""" + +from micropsi_core.nodenet.theano_engine.theano_node import TheanoNode +from theano.tensor.var import TensorVariable + + +class FlowModule(TheanoNode): + + @property + def inputs(self): + return self.definition['inputs'] + + @property + def outputs(self): + return self.definition['outputs'] + + def __init__(self, nodenet, partition, parent_uid, uid, numerictype, parameters={}, inputmap={}, outputmap={}, is_copy_of=False, initialized=False): + super().__init__(nodenet, partition, parent_uid, uid, numerictype, parameters=parameters) + self.definition = nodenet.native_module_definitions[self.type] + self.implementation = self.definition['implementation'] + self.outexpression = None + self.outputmap = {} + self.inputmap = {} + self.is_copy_of = is_copy_of + self._load_functions() + self.is_part_of_active_graph = False + for i in self.definition['inputs']: + self.inputmap[i] = tuple() + for i in self.definition['outputs']: + self.outputmap[i] = set() + + for name in inputmap: + self.inputmap[name] = tuple(inputmap[name]) + for name in outputmap: + for link in outputmap[name]: + self.outputmap[name].add(tuple(link)) + self.__initialized = initialized + + def get_flow_data(self, *args, **kwargs): + inmap = {} + outmap = {} + data = {} + for name in self.inputmap: + inmap[name] = list(self.inputmap[name]) + for name in self.outputmap: + outmap[name] = [] + for link in self.outputmap[name]: + outmap[name].append(list(link)) + data = { + 'flow_module': True, + 'inputmap': inmap, + 'outputmap': outmap, + 'is_copy_of': self.is_copy_of, + # 'initialized': self.__initialized + } + return data + + def is_output_connected(self): + if len(self.outputs) == 0: + return False + else: + return len(set.union(*list(self.outputmap.values()))) > 0 + + def is_output_node(self): + """ Returns true if this is an output-node (that is, if it has at least one link at its sub-slot)""" + return len(self.get_slot('sub').get_links()) > 0 + + def is_input_node(self): + """ Returns true if this is an input-node (that is, it either has no inputs, or datasources as inputs)""" + if len(self.inputs) == 0: + return True + else: + return ('worldadapter', 'datasources') in self.inputmap.values() + + def is_requested(self): + """ Returns true if this node receives sub-activation""" + if self.definition.get('is_autogenerated'): + return False + return self.get_slot_activations(slot_type='sub') > 0 + + def set_theta(self, name, val): + """ Set the theta value of the given name """ + if self.is_copy_of: + raise RuntimeError("Shallow copies can not set shared variables") + self._nodenet.set_theta(self.uid, name, val) + + def get_theta(self, name): + """ Get the theta value for the given name """ + if self.is_copy_of: + return self._nodenet.get_theta(self.is_copy_of, name) + return self._nodenet.get_theta(self.uid, name) + + def set_state(self, name, val): + if self.is_copy_of: + raise RuntimeError("Shallow copies can not set states") + super().set_state(name, val) + + def get_state(self, name): + if self.is_copy_of: + return self._nodenet.get_node(self.is_copy_of).get_state(name) + return super().get_state(name) + + def set_parameter(self, name, val): + if self.is_copy_of: + raise RuntimeError("Shallow copies can not set parameters") + super().set_parameter(name, val) + + def get_parameter(self, name): + if self.is_copy_of: + return self._nodenet.get_node(self.is_copy_of).get_parameter(name) + return super().get_parameter(name) + + def clone_parameters(self): + if self.is_copy_of: + return self._nodenet.get_node(self.is_copy_of).clone_parameters() + return super().clone_parameters() + + def set_input(self, input_name, source_uid, source_output): + """ Connect a Flowmodule or the worldadapter to the given input of this Flowmodule """ + if input_name not in self.inputs: + raise NameError("Unknown input %s" % input_name) + if self.inputmap.get(input_name): + # hack: Worldadapter-flownodes can have multiple inputs + if not self.definition.get('is_autogenerated'): + raise RuntimeError("This input is already connected") + self.inputmap[input_name] = (source_uid, source_output) + + def unset_input(self, input_name): + """ Disconnect a Flowmodule or the worldadapter from the given input of this Flowmodule """ + if input_name not in self.inputs: + raise NameError("Unknown input %s" % input_name) + self.inputmap[input_name] = tuple() + + def set_output(self, output_name, target_uid, target_input): + """ Connect a Flowmodule or the worldadapter to the given output of this Flowmodule """ + self.outputmap[output_name].add((target_uid, target_input)) + + def unset_output(self, output_name, target_uid, target_input): + """ Connect a Flowmodule or the worldadapter from the given output of this Flowmodule """ + self.outputmap[output_name].discard((target_uid, target_input)) + + def node_function(self): + """ activates the sur gate if this Flowmodule is part of an active graph """ + if not self.definition.get('is_autogenerated'): + self.get_gate('sur').gate_function(1 if self.is_part_of_active_graph else 0) + + def ensure_initialized(self): + if not self.__initialized and not self.is_copy_of: + self._initfunction(self._nodenet.netapi, self, self.clone_parameters()) + self.__initialized = True + + def build(self, *inputs): + """ Builds the node, calls the initfunction if needed, and returns an outexpression. + This can be either a symbolic theano expression or a python function """ + if self.is_copy_of: + self._nodenet.get_node(self.is_copy_of).ensure_initialized() + self.ensure_initialized() + if self.implementation == 'theano': + outexpression = self._buildfunction(*inputs, netapi=self._nodenet.netapi, node=self, parameters=self.clone_parameters()) + + # add names to the theano expressions returned by the build function. + # names are added if we received a single expression OR exactly one per documented output, + # but not for lists of expressions (which may have arbitrary many items). + name_outexs = outexpression + if len(self.outputs) == 1: + name_outexs = [outexpression] + for out_idx, subexpression in enumerate(name_outexs): + if isinstance(subexpression, TensorVariable): + existing_name = "({})".format(subexpression.name) if subexpression.name is not None else "" + subexpression.name = "{}_{}{}".format(self.uid, self.outputs[out_idx], existing_name) + + elif self.implementation == 'python': + outexpression = self._flowfunction + + else: + raise ValueError("Unknown flow-implementation: %s" % self.implementation) + + self.outexpression = outexpression + + return outexpression + + def _load_functions(self): + """ Loads the run-/build-/init-functions """ + import os + import inspect + from importlib.machinery import SourceFileLoader + if self.definition.get('is_autogenerated'): + self.__initialized = True + self._initfunction = lambda x, y, z: None + self._flowfunction = self.worldadapter_flowfunction + else: + sourcefile = self.definition['path'] + modulename = 'nodetypes.' + self.definition['category'].replace('/', '.') + os.path.basename(sourcefile)[:-3] + module = SourceFileLoader(modulename, sourcefile).load_module() + + if self.definition.get('init_function_name'): + self._initfunction = getattr(module, self.definition['init_function_name']) + self.__initialized = False + else: + self._initfunction = lambda x, y, z: None + self.__initialized = True + + if self.implementation == 'theano': + self._buildfunction = getattr(module, self.definition['build_function_name']) + self.line_number = inspect.getsourcelines(self._buildfunction)[1] + elif self.implementation == 'python': + self._flowfunction = getattr(module, self.definition['run_function_name']) + self.line_number = inspect.getsourcelines(self._flowfunction)[1] + + def worldadapter_flowfunction(self, *args, **kwargs): + if len(self.outputs) == 1: + returnvalue = self._nodenet.worldadapter_instance.get_flow_datasource(self.outputs[0]) + else: + returnvalue = [] + for key in self.outputs: + returnvalue.append(self._nodenet.worldadapter_instance.get_flow_datasource(key)) + returnvalue = tuple(returnvalue) + return returnvalue diff --git a/micropsi_core/nodenet/theano_engine/theano_link.py b/micropsi_core/nodenet/theano_engine/theano_link.py index 09167fec..fa9f5cf9 100644 --- a/micropsi_core/nodenet/theano_engine/theano_link.py +++ b/micropsi_core/nodenet/theano_engine/theano_link.py @@ -39,16 +39,19 @@ def weight(self): inlinks = target_partition.inlinks[source_partition.spid] from_elements = inlinks[0].get_value(borrow=True) to_elements = inlinks[1].get_value(borrow=True) - weights = inlinks[2].get_value(borrow=True) + target_element = target_partition.allocated_node_offsets[node_from_id(self.__target_node_uid)] + nst source_element = source_partition.allocated_node_offsets[node_from_id(self.__source_node_uid)] + ngt + y = np.where(from_elements == source_element)[0][0] x = np.where(to_elements == target_element)[0][0] - return float(weights[x][y]) - @property - def certainty(self): - return 1 + inlink_type = inlinks[4] + if inlink_type == "dense": + weights = inlinks[2].get_value(borrow=True) + return float(weights[x][y]) + elif inlink_type == "identity": + return 1. if x == y else 0. @property def source_node(self): diff --git a/micropsi_core/nodenet/theano_engine/theano_netapi.py b/micropsi_core/nodenet/theano_engine/theano_netapi.py index 2d200c41..0752c129 100644 --- a/micropsi_core/nodenet/theano_engine/theano_netapi.py +++ b/micropsi_core/nodenet/theano_engine/theano_netapi.py @@ -2,12 +2,12 @@ from micropsi_core.nodenet.netapi import NetAPI +from contextlib import contextmanager + class TheanoNetAPI(NetAPI): - """ - Theano / numpy extension of the NetAPI, giving native modules access to bulk operations and efficient - data structures for machine learning purposes. - """ + # Theano / numpy extension of the NetAPI, giving native modules access to bulk operations and efficient + # data structures for machine learning purposes. def __init__(self, nodenet): super(TheanoNetAPI, self).__init__(nodenet) @@ -15,9 +15,24 @@ def __init__(self, nodenet): @property def floatX(self): + """ configured numpy float datatype (either numpy.float32 or numpy.float64""" return self.__nodenet.numpyfloatX + @property + @contextmanager + def flowbuilder(self): + """ Contextmanager to prevent the nodenet from compiling flow-graphs. Will compile when the context is left: + Usage: + with netapi.flowbuilder: + # create & connect flow modules + nodenet.step() """ + self.__nodenet.is_flowbuilder_active = True + yield + self.__nodenet.is_flowbuilder_active = False + self.__nodenet.update_flow_graphs() + def announce_nodes(self, nodespace_uid, numer_of_nodes, average_element_per_node): + """ announce a new number of nodes and grow the internal matrices before adding the nodes """ self.__nodenet.announce_nodes(nodespace_uid, numer_of_nodes, average_element_per_node) def decay_por_links(self, nodespace_uid): @@ -45,3 +60,64 @@ def decay_por_links(self, nodespace_uid): w_update *= (1 - porretdecay) w[rows, cols] = w_update partition.w.set_value(w, borrow=True) + + def add_gate_activation_recorder(self, group_definition, name, interval=1): + """ Adds an activation recorder to a group of nodes.""" + return self.__nodenet.add_gate_activation_recorder(group_definition, name, interval) + + def add_node_activation_recorder(self, group_definition, name, interval=1): + """ Adds an activation recorder to a group of nodes.""" + return self.__nodenet.add_node_activation_recorder(group_definition, name, interval) + + def add_linkweight_recorder(self, from_group_definition, to_group_definition, name, interval=1): + """ Adds a linkweight recorder to links between to groups.""" + return self.__nodenet.add_linkweight_recorder(from_group_definition, to_group_definition, name, interval) + + def get_recorder(self, uid): + """Returns the recorder with the given uid""" + return self.__nodenet.get_recorder(uid) + + def remove_recorder(self, uid): + """Removes the recorder with the given uid""" + return self.__nodenet.remove_recorder(uid) + + def group_node_gates(self, node_uid, gate_prefix, group_name=None): + """ Creates a group of the high-dimensional gates of the given node""" + self.__nodenet.group_highdimensional_elements(node_uid, gate=gate_prefix, group_name=group_name) + + def group_node_slots(self, node_uid, slot_prefix, group_name=None): + """ Creates a group of the high-dimensional slots of the given node""" + self.__nodenet.group_highdimensional_elements(node_uid, slot=slot_prefix, group_name=group_name) + + def flow(self, source_node, source_output, target_node, target_input): + """ Create flow between flowmodules. Use "worldadapter" and "datasources"/"datatargets" to create flow + to the worldadapter """ + source = source_node if source_node == 'worldadapter' else source_node.uid + target = target_node if target_node == 'worldadapter' else target_node.uid + return self.__nodenet.flow(source, source_output, target, target_input) + + def unflow(self, source_node, source_output, target_node, target_input): + """ Remove flow between the given flow_modules """ + source = source_node if source_node == 'worldadapter' else source_node.uid + target = target_node if target_node == 'worldadapter' else target_node.uid + return self.__nodenet.unflow(source, source_output, target, target_input) + + def get_callable_flowgraph(self, nodes, requested_outputs=None, use_different_thetas=False, use_unique_input_names=False): + """ Returns one callable for the given flow_modules. + Parameters: + use_different_thetas (default: False) - Return a callable that excepts a parameter "thetas" that will be used instead of existing thetas + use_unique_input_names (default: False) - Return a callable that excepts input parameter names as "uid_name" where uid is the node_uid, and name is the input_name + requested_outputs (default:None) - Optional list of (node_uid, outputname) tuples, so that the callable will return only the given outputs + """ + func, dangling_inputs, dangling_outputs = self.__nodenet.compile_flow_subgraph([n.uid for n in nodes], requested_outputs=requested_outputs, use_different_thetas=use_different_thetas, use_unique_input_names=use_unique_input_names) + return func + + def collect_thetas(self, nodes): + """ Returns a list of thetas, sorted by node first, alphabetically second """ + return self.__nodenet.collect_thetas([n.uid for n in nodes]) + + def shadow_flowgraph(self, flow_modules): + """ Creates a shallow copy of the given flow_modules, copying instances and internal connections. + Shallow copies will always have the parameters and shared variables of their originals + """ + return self.__nodenet.shadow_flowgraph(flow_modules) diff --git a/micropsi_core/nodenet/theano_engine/theano_node.py b/micropsi_core/nodenet/theano_engine/theano_node.py index b4192a03..f1050ca9 100644 --- a/micropsi_core/nodenet/theano_engine/theano_node.py +++ b/micropsi_core/nodenet/theano_engine/theano_node.py @@ -1,10 +1,12 @@ # -*- coding: utf-8 -*- -from micropsi_core.nodenet.node import Node, Gate, Slot +import os +import numpy as np + +from micropsi_core.nodenet.node import Node, Gate, Slot, HighdimensionalNodetype from micropsi_core.nodenet.theano_engine.theano_link import TheanoLink from micropsi_core.nodenet.theano_engine.theano_stepoperators import * from micropsi_core.nodenet.theano_engine.theano_definitions import * -import numpy as np class TheanoNode(Node): @@ -12,13 +14,12 @@ class TheanoNode(Node): theano node proxy class """ - def __init__(self, nodenet, partition, parent_uid, uid, type, parameters={}, **_): + def __init__(self, nodenet, partition, parent_uid, uid, numerictype, parameters={}, **_): - self._numerictype = type + self._numerictype = numerictype self._id = node_from_id(uid) self._uid = uid self._parent_id = nodespace_from_id(parent_uid) - self._nodenet = nodenet self._partition = partition self._state = {} @@ -26,13 +27,17 @@ def __init__(self, nodenet, partition, parent_uid, uid, type, parameters={}, **_ self.__slotcache = {} self.parameters = None + strtype = get_string_node_type(numerictype, nodenet.native_modules) - strtype = get_string_node_type(type, nodenet.native_modules) + Node.__init__(self, nodenet, strtype, nodenet.get_nodetype(strtype)) - Node.__init__(self, strtype, nodenet.get_nodetype(strtype)) + self.is_highdimensional = type(self._nodetype) == HighdimensionalNodetype + + self.datafile = os.path.join(nodenet.persistency_path, '%s_node_%s.npz' % (self._nodenet.uid, self.uid)) if strtype in nodenet.native_modules or strtype == "Comment": self.slot_activation_snapshot = {} + self.take_slot_activation_snapshot() self._state = {} if parameters is not None: @@ -40,6 +45,9 @@ def __init__(self, nodenet, partition, parent_uid, uid, type, parameters={}, **_ else: self.parameters = {} + if self.is_highdimensional: + self.slot_fat_snapshot = None + @property def uid(self): return self._uid @@ -90,92 +98,89 @@ def parent_nodespace(self): def activation(self): return float(self._partition.a.get_value(borrow=True)[self._partition.allocated_node_offsets[self._id] + GEN]) - @property - def activations(self): - return {"default": self.activation} - @activation.setter def activation(self, activation): a_array = self._partition.a.get_value(borrow=True) a_array[self._partition.allocated_node_offsets[self._id] + GEN] = activation self._partition.a.set_value(a_array, borrow=True) + def get_data(self, complete=False, include_links=True): + nspace = { + self._partition.spid: [self._parent_id] + } + data = self._partition.get_node_data(nodespaces_by_partition=nspace, ids=[self._id], complete=complete, include_links=include_links)[0][self.uid] + return data + def get_gate(self, type): if type not in self.__gatecache: + if type not in self.get_gate_types(): + return None self.__gatecache[type] = TheanoGate(type, self, self._nodenet, self._partition) return self.__gatecache[type] - def set_gatefunction_name(self, gate_type, gatefunction_name): - self._nodenet.set_node_gatefunction_name(self.uid, gate_type, gatefunction_name) + # def get_gatefunction_names(self): + # result = {} + # g_function_selector = self._partition.g_function_selector.get_value(borrow=True) + # for numericalgate in range(0, get_gates_per_type(self._numerictype, self._nodenet.native_modules)): + # result[get_string_gate_type(numericalgate, self.nodetype)] = \ + # get_string_gatefunction_type(g_function_selector[self._partition.allocated_node_offsets[self._id] + numericalgate]) + # return result - def get_gatefunction_name(self, gate_type): + def get_gate_configuration(self, gate_type=None): g_function_selector = self._partition.g_function_selector.get_value(borrow=True) - return get_string_gatefunction_type(g_function_selector[self._partition.allocated_node_offsets[self._id] + get_numerical_gate_type(gate_type, self.nodetype)]) - - def get_gatefunction_names(self): - result = {} - g_function_selector = self._partition.g_function_selector.get_value(borrow=True) - for numericalgate in range(0, get_gates_per_type(self._numerictype, self._nodenet.native_modules)): - result[get_string_gate_type(numericalgate, self.nodetype)] = \ - get_string_gatefunction_type(g_function_selector[self._partition.allocated_node_offsets[self._id] + numericalgate]) - return result - - def set_gate_parameter(self, gate_type, parameter, value): - self._nodenet.set_node_gate_parameter(self.uid, gate_type, parameter, value) - - def get_gate_parameters(self): - return self.clone_non_default_gate_parameters() - - def clone_non_default_gate_parameters(self, gate_type=None): - g_threshold_array = self._partition.g_threshold.get_value(borrow=True) - g_amplification_array = self._partition.g_amplification.get_value(borrow=True) - g_min_array = self._partition.g_min.get_value(borrow=True) - g_max_array = self._partition.g_max.get_value(borrow=True) - g_theta = self._partition.g_theta.get_value(borrow=True) - - gatemap = {} - gate_types = self.nodetype.gate_defaults.keys() - if gate_type is not None: - if gate_type in gate_types: - gate_types = [gate_type] - else: - return None - - for gate_type in gate_types: - numericalgate = get_numerical_gate_type(gate_type, self.nodetype) - gate_parameters = {} - - threshold = g_threshold_array[self._partition.allocated_node_offsets[self._id] + numericalgate].item() - if 'threshold' not in self.nodetype.gate_defaults[gate_type] or threshold != self.nodetype.gate_defaults[gate_type]['threshold']: - gate_parameters['threshold'] = threshold - - amplification = g_amplification_array[self._partition.allocated_node_offsets[self._id] + numericalgate].item() - if 'amplification' not in self.nodetype.gate_defaults[gate_type] or amplification != self.nodetype.gate_defaults[gate_type]['amplification']: - gate_parameters['amplification'] = amplification - - minimum = g_min_array[self._partition.allocated_node_offsets[self._id] + numericalgate].item() - if 'minimum' not in self.nodetype.gate_defaults[gate_type] or minimum != self.nodetype.gate_defaults[gate_type]['minimum']: - gate_parameters['minimum'] = minimum - - maximum = g_max_array[self._partition.allocated_node_offsets[self._id] + numericalgate].item() - if 'maximum' not in self.nodetype.gate_defaults[gate_type] or maximum != self.nodetype.gate_defaults[gate_type]['maximum']: - gate_parameters['maximum'] = maximum - - theta = g_theta[self._partition.allocated_node_offsets[self._id] + numericalgate].item() - if 'theta' not in self.nodetype.gate_defaults[gate_type] or theta != self.nodetype.gate_defaults[gate_type]['theta']: - gate_parameters['theta'] = theta - - if not len(gate_parameters) == 0: - gatemap[gate_type] = gate_parameters + offset = self._partition.allocated_node_offsets[self._id] + indexes = [] + gate_types = self.get_gate_types() + if gate_type is None: + indexes = [offset + get_numerical_gate_type(gate, self.nodetype) for gate in gate_types] + else: + indexes = [offset + get_numerical_gate_type(gate_type, self.nodetype)] + + data = {} + for i, elementindex in enumerate(indexes): + gfunc = g_function_selector[elementindex] + if gfunc != GATE_FUNCTION_IDENTITY: + data[gate_types[i]] = { + 'gatefunction': get_string_gatefunction_type(gfunc), + 'gatefunction_parameters': {} + } + if gfunc == GATE_FUNCTION_SIGMOID or gfunc == GATE_FUNCTION_ELU or gfunc == GATE_FUNCTION_RELU: + g_bias = self._partition.g_bias.get_value(borrow=True) + data[gate_types[i]]['gatefunction_parameters'] = {'bias': g_bias[elementindex]} + elif gfunc == GATE_FUNCTION_THRESHOLD: + g_min = self._partition.g_min.get_value(borrow=True) + g_max = self._partition.g_max.get_value(borrow=True) + g_amplification = self._partition.g_amplification.get_value(borrow=True) + g_threshold = self._partition.g_threshold.get_value(borrow=True) + data[gate_types[i]]['gatefunction_parameters'] = { + 'minimum': g_min[elementindex], + 'maximum': g_max[elementindex], + 'amplification': g_amplification[elementindex], + 'threshold': g_threshold[elementindex] + } + + if gate_type is None: + return data + else: + return data[gate_type] - return gatemap + def set_gate_configuration(self, gate_type, gatefunction, gatefunction_parameters={}): + elementindex = self._partition.allocated_node_offsets[self._id] + get_numerical_gate_type(gate_type, self.nodetype) + self._partition._set_gate_config_for_elements([elementindex], gatefunction) + for param, value in gatefunction_parameters.items(): + self._partition._set_gate_config_for_elements([elementindex], gatefunction, param, [value]) def take_slot_activation_snapshot(self): a_array = self._partition.a.get_value(borrow=True) self.slot_activation_snapshot.clear() - for slottype in self.nodetype.slottypes: - self.slot_activation_snapshot[slottype] = \ - a_array[self._partition.allocated_node_offsets[self._id] + get_numerical_slot_type(slottype, self.nodetype)] + if self.is_highdimensional: + start = self._partition.allocated_node_offsets[self._id] + end = start + len(self._nodetype.slottypes) + self.slot_fat_snapshot = np.array(a_array[start:end]) + else: + for slottype in self.nodetype.slottypes: + self.slot_activation_snapshot[slottype] = \ + a_array[self._partition.allocated_node_offsets[self._id] + get_numerical_slot_type(slottype, self.nodetype)] def get_slot(self, type): if type not in self.__slotcache: @@ -205,12 +210,18 @@ def get_associated_node_uids(self): element = self._partition.allocated_node_offsets[self._id] + numeric_slot from_elements = inlinks[0].get_value(borrow=True) to_elements = inlinks[1].get_value(borrow=True) - weights = inlinks[2].get_value(borrow=True) if element in to_elements: + inlink_type = inlinks[4] from_partition = self._nodenet.partitions[partition_from_spid] element_index = np.where(to_elements == element)[0][0] - slotrow = weights[element_index] - links_indices = np.nonzero(slotrow)[0] + + if inlink_type == "dense": + weights = inlinks[2].get_value(borrow=True) + slotrow = weights[element_index] + links_indices = np.nonzero(slotrow)[0] + elif inlink_type == "identity": + links_indices = [element_index] + for link_index in links_indices: source_id = from_partition.allocated_elements_to_nodes[from_elements[link_index]] ids.append(node_to_id(source_id, from_partition.pid)) @@ -223,11 +234,18 @@ def get_associated_node_uids(self): inlinks = to_partition.inlinks[self._partition.spid] from_elements = inlinks[0].get_value(borrow=True) to_elements = inlinks[1].get_value(borrow=True) - weights = inlinks[2].get_value(borrow=True) + inlink_type = inlinks[4] + if element in from_elements: element_index = np.where(from_elements == element)[0][0] - gatecolumn = weights[:, element_index] - links_indices = np.nonzero(gatecolumn)[0] + + if inlink_type == "dense": + weights = inlinks[2].get_value(borrow=True) + gatecolumn = weights[:, element_index] + links_indices = np.nonzero(gatecolumn)[0] + elif inlink_type == "identity": + links_indices = [element_index] + for link_index in links_indices: target_id = to_partition.allocated_elements_to_nodes[to_elements[link_index]] ids.append(node_to_id(target_id, to_partition.pid)) @@ -252,7 +270,7 @@ def set_parameter(self, parameter, value): sensor_element = self._partition.allocated_node_offsets[self._id] + GEN old_datasource_index = np.where(self._partition.sensor_indices == sensor_element)[0] - self._partition.sensor_indices[old_datasource_index] = 0 + self._partition.sensor_indices[old_datasource_index] = -1 if value not in datasources: self.logger.warning("Datasource %s not known, will not be assigned." % value) return @@ -269,12 +287,16 @@ def set_parameter(self, parameter, value): self._nodenet.sensormap[value] = self.uid self._partition.sensor_indices[datasource_index] = sensor_element - elif self.type == "Actor" and parameter == "datatarget": + + if self.name is None or self.name == "" or self.name == self.uid: + self.name = value + + elif self.type == "Actuator" and parameter == "datatarget": if value is not None and value != "": datatargets = self._nodenet.get_datatargets() actuator_element = self._partition.allocated_node_offsets[self._id] + GEN old_datatarget_index = np.where(self._partition.actuator_indices == actuator_element)[0] - self._partition.actuator_indices[old_datatarget_index] = 0 + self._partition.actuator_indices[old_datatarget_index] = -1 if value not in datatargets: self.logger.warning("Datatarget %s not known, will not be assigned." % value) return @@ -291,6 +313,10 @@ def set_parameter(self, parameter, value): self._nodenet.actuatormap[value] = self.uid self._partition.actuator_indices[datatarget_index] = actuator_element + + if self.name is None or self.name == "" or self.name == self.uid: + self.name = value + elif self.type == "Activator" and parameter == "type": if value != "sampling": self._nodenet.set_nodespace_gatetype_activator(self.parent_nodespace, value, self.uid) @@ -310,7 +336,10 @@ def set_parameter(self, parameter, value): elif self.type == "Comment" and parameter == "comment": self.parameters[parameter] = value elif self.type in self._nodenet.native_modules: - self.parameters[parameter] = value + if parameter in self.nodetype.parameters: + self.parameters[parameter] = value + else: + raise NameError("Parameter %s not defined for node %s" % (parameter, str(self))) def clear_parameter(self, parameter): if self.type in self._nodenet.native_modules and parameter in self.parameters: @@ -325,7 +354,7 @@ def clone_parameters(self): parameters['datasource'] = None else: parameters['datasource'] = self._nodenet.get_datasources()[datasource_index[0]] - elif self.type == "Actor": + elif self.type == "Actuator": actuator_element = self._partition.allocated_node_offsets[self._id] + GEN datatarget_index = np.where(self._partition.actuator_indices == actuator_element)[0] if len(datatarget_index) == 0: @@ -370,7 +399,6 @@ def clone_parameters(self): for parameter in self.parameters: if parameter not in parameters: parameters[parameter] = self.parameters[parameter] - return parameters def get_state(self, state): @@ -387,19 +415,114 @@ def clone_state(self): else: return None - def clone_sheaves(self): - return {"default": dict(uid="default", name="default", activation=self.activation)} # todo: implement sheaves + def _pluck_apart_state(self, state, numpy_elements): + if isinstance(state, dict): + result = dict() + for key, value in state.items(): + result[key] = self._pluck_apart_state(value, numpy_elements) + elif isinstance(state, list): + result = [] + for value in state: + result.append(self._pluck_apart_state(value, numpy_elements)) + elif isinstance(state, tuple): + raise ValueError("Tuples in node states are not supported") + elif isinstance(state, np.ndarray): + result = "__numpyelement__" + str(id(state)) + numpy_elements[result] = state + else: + return state + + return result + + def _put_together_state(self, state, numpy_elements): + if isinstance(state, dict): + result = dict() + for key, value in state.items(): + result[key] = self._put_together_state(value, numpy_elements) + elif isinstance(state, list): + result = [] + for value in state: + result.append(self._put_together_state(value, numpy_elements)) + elif isinstance(state, str) and state.startswith("__numpyelement__"): + result = numpy_elements[state] + else: + return state + + return result + + def get_persistable_state(self): + """ + Returns a tuple of dicts, the first one containing json-serializable state information + and the second one containing numpy elements that should be persisted into an npz. + The json-seriazable dict will contain special values that act as keys for the second dict. + This allows to save nested numpy state. + set_persistable_state knows how to unserialize from the returned tuple. + """ + numpy_elements = dict() + json_state = self._pluck_apart_state(self._state, numpy_elements) + + return json_state, numpy_elements + + def set_persistable_state(self, json_state, numpy_elements): + """ + Sets this node's state from a tuple created with get_persistable_state, + essentially nesting numpy objects back into the state dict where it belongs + """ + self._state = self._put_together_state(json_state, numpy_elements) def node_function(self): try: - self.nodetype.nodefunction(netapi=self._nodenet.netapi, node=self, sheaf="default", **self.clone_parameters()) + params = self.clone_parameters() + self.nodetype.nodefunction(netapi=self._nodenet.netapi, node=self, **params) except Exception: self._nodenet.is_active = False if self.nodetype is not None and self.nodetype.nodefunction is None: - self.logger.warn("No nodefunction found for nodetype %s. Node function definition is: %s" % (self.nodetype.name, self.nodetype.nodefunction_definition)) + self.logger.warning("No nodefunction found for nodetype %s. Node function definition is: %s" % (self.nodetype.name, self.nodetype.nodefunction_definition)) else: raise + def get_slot_activations(self, slot_type=None): + """ Returns a numpy array of the slot activations of a highdimensional + native module. You can optional give a high-level gatetype to recieve + only activations of an highdimensional slot type """ + if self.is_highdimensional: + if self.slot_fat_snapshot is None: + self.take_slot_activation_snapshot() + if slot_type: + offset = self.nodetype.slotindexes[slot_type] + length = self.nodetype.dimensionality['slots'].get(slot_type, 1) + if length == 1: + return self.slot_fat_snapshot[offset] + else: + return self.slot_fat_snapshot[offset:offset + length] + else: + return self.slot_fat_snapshot + else: + if slot_type is None: + return self.slot_activation_snapshot + else: + return self.slot_activation_snapshot[slot_type] + + def set_gate_activations(self, new_activations): + start = self._partition.allocated_node_offsets[node_from_id(self.uid)] + end = start + len(self._nodetype.gatetypes) + a_array = self._partition.a.get_value(borrow=True) + a_array[start:end] = new_activations + self._partition.a.set_value(a_array, borrow=True) + + def get_gate_activations(self): + start = self._partition.allocated_node_offsets[node_from_id(self.uid)] + end = start + len(self._nodetype.gatetypes) + a_array = self._partition.a.get_value(borrow=True) + return a_array[start:end] + + def save_data(self, data): + np.savez(self.datafile, data=data) + + def load_data(self): + if os.path.isfile(self.datafile): + return np.load(self.datafile)['data'] + class TheanoGate(Gate): """ @@ -430,10 +553,6 @@ def activation(self, value): a_array[self.__partition.allocated_node_offsets[node_from_id(self.__node.uid)] + self.__numerictype] = value self.__partition.a.set_value(a_array, borrow=True) - @property - def activations(self): - return {'default': self.activation} # todo: implement sheaves - def __init__(self, type, node, nodenet, partition): self.__type = type self.__node = node @@ -464,11 +583,16 @@ def get_links(self): inlinks = to_partition.inlinks[self.__partition.spid] from_elements = inlinks[0].get_value(borrow=True) to_elements = inlinks[1].get_value(borrow=True) - weights = inlinks[2].get_value(borrow=True) if element in from_elements: element_index = np.where(from_elements == element)[0][0] - gatecolumn = weights[:, element_index] - links_indices = np.nonzero(gatecolumn)[0] + inlink_type = inlinks[4] + if inlink_type == "dense": + weights = inlinks[2].get_value(borrow=True) + gatecolumn = weights[:, element_index] + links_indices = np.nonzero(gatecolumn)[0] + elif inlink_type == "identity": + links_indices = [element_index] + for link_index in links_indices: target_id = to_partition.allocated_elements_to_nodes[to_elements[link_index]] target_type = to_partition.allocated_nodes[target_id] @@ -483,22 +607,11 @@ def get_links(self): def invalidate_caches(self): self.__linkcache = None - def get_parameter(self, parameter_name): - gate_parameters = self.__node.nodetype.gate_defaults[self.type] - gate_parameters.update(self.__node.clone_non_default_gate_parameters(self.type)) - return gate_parameters[parameter_name] - - def clone_sheaves(self): - return {"default": dict(uid="default", name="default", activation=self.activation)} # todo: implement sheaves - - def gate_function(self, input_activation, sheaf="default"): + def gate_function(self, input_activation): # in the theano implementation, this will only be called for native module gates, and simply write # the value back to the activation vector for the theano math to take over self.activation = input_activation - def open_sheaf(self, input_activation, sheaf="default"): - pass # todo: implement sheaves - class TheanoSlot(Slot): """ @@ -524,13 +637,7 @@ def empty(self): @property def activation(self): - return self.__node.slot_activation_snapshot[self.__type] - - @property - def activations(self): - return { - "default": self.activation - } + return self.__node.get_slot_activations(self.__type) def __init__(self, type, node, nodenet, partition): self.__type = type @@ -540,7 +647,7 @@ def __init__(self, type, node, nodenet, partition): self.__numerictype = get_numerical_slot_type(type, node.nodetype) self.__linkcache = None - def get_activation(self, sheaf="default"): + def get_activation(self): return self.activation def get_links(self): @@ -565,12 +672,17 @@ def get_links(self): for partition_from_spid, inlinks in self.__partition.inlinks.items(): from_elements = inlinks[0].get_value(borrow=True) to_elements = inlinks[1].get_value(borrow=True) - weights = inlinks[2].get_value(borrow=True) if element in to_elements: - from_partition = self.__nodenet.partitions[partition_from_spid] element_index = np.where(to_elements == element)[0][0] - slotrow = weights[element_index] - links_indices = np.nonzero(slotrow)[0] + inlink_type = inlinks[4] + from_partition = self.__nodenet.partitions[partition_from_spid] + if inlink_type == "dense": + weights = inlinks[2].get_value(borrow=True) + slotrow = weights[element_index] + links_indices = np.nonzero(slotrow)[0] + elif inlink_type == "identity": + links_indices = [element_index] + for link_index in links_indices: source_id = from_partition.allocated_elements_to_nodes[from_elements[link_index]] source_type = from_partition.allocated_nodes[source_id] @@ -583,4 +695,4 @@ def get_links(self): return self.__linkcache def invalidate_caches(self): - self.__linkcache = None \ No newline at end of file + self.__linkcache = None diff --git a/micropsi_core/nodenet/theano_engine/theano_nodenet.py b/micropsi_core/nodenet/theano_engine/theano_nodenet.py index ded96a22..0aceee9c 100644 --- a/micropsi_core/nodenet/theano_engine/theano_nodenet.py +++ b/micropsi_core/nodenet/theano_engine/theano_nodenet.py @@ -4,17 +4,28 @@ Nodenet definition """ import json +import io import os import copy import math +import theano from theano import tensor as T import numpy as np import scipy +import networkx as nx +try: + import ipdb as pdb +except ImportError: + import pdb + +from micropsi_core.tools import post_mortem +from micropsi_core.tools import OrderedSet from micropsi_core.nodenet import monitor -from micropsi_core.nodenet.nodenet import Nodenet -from micropsi_core.nodenet.node import Nodetype +from micropsi_core.nodenet import recorder +from micropsi_core.nodenet.nodenet import Nodenet, NODENET_VERSION +from micropsi_core.nodenet.node import Nodetype, FlowNodetype, HighdimensionalNodetype from micropsi_core.nodenet.stepoperators import DoernerianEmotionalModulators from micropsi_core.nodenet.theano_engine.theano_node import * from micropsi_core.nodenet.theano_engine.theano_definitions import * @@ -22,24 +33,22 @@ from micropsi_core.nodenet.theano_engine.theano_nodespace import * from micropsi_core.nodenet.theano_engine.theano_netapi import TheanoNetAPI from micropsi_core.nodenet.theano_engine.theano_partition import TheanoPartition +from micropsi_core.nodenet.theano_engine.theano_flowmodule import FlowModule from configuration import config as settings STANDARD_NODETYPES = { - "Nodespace": { - "name": "Nodespace" - }, "Comment": { "name": "Comment", "symbol": "#", 'parameters': ['comment'], "shape": "Rectangle" }, - "Register": { - "name": "Register", + "Neuron": { + "name": "Neuron", "slottypes": ["gen"], - "nodefunction_name": "register", + "nodefunction_name": "neuron", "gatetypes": ["gen"] }, "Sensor": { @@ -48,10 +57,10 @@ "nodefunction_name": "sensor", "gatetypes": ["gen"] }, - "Actor": { - "name": "Actor", + "Actuator": { + "name": "Actuator", "parameters": ["datatarget"], - "nodefunction_name": "actor", + "nodefunction_name": "actuator", "slottypes": ["gen"], "gatetypes": ["gen"] }, @@ -60,50 +69,6 @@ "slottypes": ["gen", "por", "ret", "sub", "sur", "cat", "exp"], "nodefunction_name": "pipe", "gatetypes": ["gen", "por", "ret", "sub", "sur", "cat", "exp"], - "gate_defaults": { - "gen": { - "minimum": -1, - "maximum": 1, - "threshold": -1, - "spreadsheaves": 0 - }, - "por": { - "minimum": -1, - "maximum": 1, - "threshold": -1, - "spreadsheaves": 0 - }, - "ret": { - "minimum": -1, - "maximum": 1, - "threshold": -1, - "spreadsheaves": 0 - }, - "sub": { - "minimum": -1, - "maximum": 1, - "threshold": -1, - "spreadsheaves": True - }, - "sur": { - "minimum": -1, - "maximum": 1, - "threshold": -1, - "spreadsheaves": 0 - }, - "cat": { - "minimum": -1, - "maximum": 1, - "threshold": -1, - "spreadsheaves": 1 - }, - "exp": { - "minimum": -1, - "maximum": 1, - "threshold": -1, - "spreadsheaves": 0 - } - }, "parameters": ["expectation", "wait"], "parameter_defaults": { "expectation": 1, @@ -123,39 +88,10 @@ "slottypes": ["gen", "por", "gin", "gou", "gfg"], "gatetypes": ["gen", "por", "gin", "gou", "gfg"], "nodefunction_name": "lstm", - "symbol": "◷", - "gate_defaults": { - "gen": { - "minimum": -1000, - "maximum": 1000, - "threshold": -1000 - }, - "por": { - "minimum": -1000, - "maximum": 1000, - "threshold": -1000 - }, - "gin": { - "minimum": -1000, - "maximum": 1000, - "threshold": -1000 - }, - "gou": { - "minimum": -1000, - "maximum": 1000, - "threshold": -1000 - }, - "gfg": { - "minimum": -1000, - "maximum": 1000, - "threshold": -1000 - } - } + "symbol": "◷" } } -NODENET_VERSION = 1 - class TheanoNodenet(Nodenet): """ @@ -172,14 +108,27 @@ def worldadapter_instance(self): @worldadapter_instance.setter def worldadapter_instance(self, _worldadapter_instance): - self._worldadapter_instance = _worldadapter_instance - self._rebuild_sensor_actor_indices() + typechange = True + if self._worldadapter_instance and self.worldadapter == _worldadapter_instance.__class__.__name__: + typechange = False + if self._worldadapter_instance != _worldadapter_instance: + self._worldadapter_instance = _worldadapter_instance + self._rebuild_sensor_actuator_indices() + + flow_io_types = self.generate_worldadapter_flow_types(delete_existing=typechange) + self.native_module_definitions.update(flow_io_types) + for key in flow_io_types: + self.native_modules[key] = FlowNodetype(nodenet=self, **flow_io_types[key]) + self.update_numeric_native_module_types() + self.generate_worldadapter_flow_instances() + if self._worldadapter_instance: + self._worldadapter_instance.nodenet = self @property def current_step(self): return self._step - def __init__(self, name="", worldadapter="Default", world=None, owner="", uid=None, native_modules={}, use_modulators=True, worldadapter_instance=None): + def __init__(self, persistency_path, name="", worldadapter="Default", world=None, owner="", uid=None, native_modules={}, use_modulators=True, worldadapter_instance=None, version=None): # map of string uids to positions. Not all nodes necessarily have an entry. self.positions = {} @@ -193,32 +142,34 @@ def __init__(self, name="", worldadapter="Default", world=None, owner="", uid=No # map of data targets to string node IDs self.actuatormap = {} - super(TheanoNodenet, self).__init__(name, worldadapter, world, owner, uid, use_modulators=use_modulators, worldadapter_instance=worldadapter_instance) + super().__init__(persistency_path, name, worldadapter, world, owner, uid, native_modules=native_modules, use_modulators=use_modulators, worldadapter_instance=worldadapter_instance, version=version) + + self.nodetypes = {} + for type, data in STANDARD_NODETYPES.items(): + self.nodetypes[type] = Nodetype(nodenet=self, **data) precision = settings['theano']['precision'] if precision == "32": T.config.floatX = "float32" + self.theanofloatX = "float32" self.scipyfloatX = scipy.float32 self.numpyfloatX = np.float32 self.byte_per_float = 4 elif precision == "64": T.config.floatX = "float64" + self.theanofloatX = "float64" self.scipyfloatX = scipy.float64 self.numpyfloatX = np.float64 self.byte_per_float = 8 - else: # pragma: no cover - self.logger.warn("Unsupported precision value from configuration: %s, falling back to float64", precision) - T.config.floatX = "float64" - self.scipyfloatX = scipy.float64 - self.numpyfloatX = np.float64 - self.byte_per_float = 8 + else: + raise RuntimeError("Unsupported float precision value") device = T.config.device self.logger.info("Theano configured to use %s", device) if device.startswith("gpu"): self.logger.info("Using CUDA with cuda_root=%s and theano_flags=%s", os.environ["CUDA_ROOT"], os.environ["THEANO_FLAGS"]) if T.config.floatX != "float32": - self.logger.warn("Precision set to %s, but attempting to use gpu.", precision) + self.logger.warning("Precision set to %s, but attempting to use gpu.", precision) self.netapi = TheanoNetAPI(self) @@ -230,14 +181,14 @@ def __init__(self, name="", worldadapter="Default", world=None, owner="", uid=No try: average_elements_per_node_assumption = int(configured_elements_per_node_assumption) except: # pragma: no cover - self.logger.warn("Unsupported elements_per_node_assumption value from configuration: %s, falling back to 4", configured_elements_per_node_assumption) + self.logger.warning("Unsupported elements_per_node_assumption value from configuration: %s, falling back to 4", configured_elements_per_node_assumption) initial_number_of_nodes = 2000 configured_initial_number_of_nodes = settings['theano']['initial_number_of_nodes'] try: initial_number_of_nodes = int(configured_initial_number_of_nodes) except: # pragma: no cover - self.logger.warn("Unsupported initial_number_of_nodes value from configuration: %s, falling back to 2000", configured_initial_number_of_nodes) + self.logger.warning("Unsupported initial_number_of_nodes value from configuration: %s, falling back to 2000", configured_initial_number_of_nodes) sparse = True configuredsparse = settings['theano']['sparse_weight_matrix'] @@ -246,7 +197,7 @@ def __init__(self, name="", worldadapter="Default", world=None, owner="", uid=No elif configuredsparse == "False": sparse = False else: # pragma: no cover - self.logger.warn("Unsupported sparse_weight_matrix value from configuration: %s, falling back to True", configuredsparse) + self.logger.warning("Unsupported sparse_weight_matrix value from configuration: %s, falling back to True", configuredsparse) sparse = True rootpartition = TheanoPartition(self, @@ -258,9 +209,8 @@ def __init__(self, name="", worldadapter="Default", world=None, owner="", uid=No self.rootpartition = rootpartition self.partitionmap = {} self.inverted_partitionmap = {} - self._rebuild_sensor_actor_indices(rootpartition) + self._rebuild_sensor_actuator_indices(rootpartition) - self._version = NODENET_VERSION # used to check compatibility of the node net data self._step = 0 self.proxycache = {} @@ -268,24 +218,32 @@ def __init__(self, name="", worldadapter="Default", world=None, owner="", uid=No self.stepoperators = [] self.initialize_stepoperators() - self._nodetypes = {} - for type, data in STANDARD_NODETYPES.items(): - self._nodetypes[type] = Nodetype(nodenet=self, **data) + self.native_module_definitions = {} + for key in native_modules: + if native_modules[key].get('engine', self.engine) == self.engine: + self.native_module_definitions[key] = native_modules[key] + + self.flow_module_instances = {} + self.flow_graphs = [] + self.thetas = {} - self.native_module_definitions = native_modules - self.native_modules = {} - for type, data in self.native_module_definitions.items(): - self.native_modules[type] = Nodetype(nodenet=self, **data) + flow_io_types = self.generate_worldadapter_flow_types(delete_existing=True) + self.native_module_definitions.update(flow_io_types) + for key in flow_io_types: + self.native_modules[key] = FlowNodetype(nodenet=self, **flow_io_types[key]) - self.create_nodespace(None, None, "Root", nodespace_to_id(1, rootpartition.pid)) + self.flowgraph = nx.MultiDiGraph() + self.is_flowbuilder_active = False + self.flowfunctions = [] + self.worldadapter_flow_nodes = {} + + self.create_nodespace(None, "Root", nodespace_to_id(1, rootpartition.pid)) self.initialize_nodenet({}) def get_data(self, complete=False, include_links=True): data = super().get_data(complete=complete, include_links=include_links) data['nodes'] = self.construct_nodes_dict(complete=complete, include_links=include_links) - # for uid in data['nodes']: - # data['nodes'][uid]['gate_parameters'] = self.get_node(uid).clone_non_default_gate_parameters() data['nodespaces'] = self.construct_nodespaces_dict(None, transitive=True) data['version'] = self._version data['modulators'] = self.construct_modulators_dict() @@ -296,7 +254,7 @@ def export_json(self): data['links'] = self.construct_links_list() return data - def get_nodes(self, nodespace_uids=[], include_links=True): + def get_nodes(self, nodespace_uids=[], include_links=True, links_to_nodespaces=[]): """ Returns a dict with contents for the given nodespaces """ @@ -310,122 +268,381 @@ def get_nodes(self, nodespace_uids=[], include_links=True): nodespace_uids = [self.get_nodespace(uid).uid for uid in nodespace_uids] if nodespace_uids: - nodespaces_by_partition = dict((spid, []) for spid in self.partitions) + nodespaces_by_partition = {} for nodespace_uid in nodespace_uids: + spid = self.get_partition(nodespace_uid).spid data['nodespaces'].update(self.construct_nodespaces_dict(nodespace_uid)) - nodespaces_by_partition[self.get_partition(nodespace_uid).spid].append(nodespace_from_id(nodespace_uid)) + if spid not in nodespaces_by_partition: + nodespaces_by_partition[spid] = [] + nodespaces_by_partition[spid].append(nodespace_from_id(nodespace_uid)) + + linked_nodespaces_by_partition = dict((spid, []) for spid in self.partitions) + if links_to_nodespaces: + # group by partition: + for uid in links_to_nodespaces: + spid = self.get_partition(uid).spid + linked_nodespaces_by_partition[spid].append(nodespace_from_id(uid)) - followupuids = [] for spid in nodespaces_by_partition: - if nodespaces_by_partition[spid]: - nodes, followups = self.partitions[spid].get_node_data(nodespace_ids=nodespaces_by_partition[spid], include_links=include_links) - data['nodes'].update(nodes) - followupuids.extend(followups) - - followups_by_partition = dict((spid, []) for spid in self.partitions) - for uid in followupuids: - followups_by_partition[self.get_partition(uid).spid].append(node_from_id(uid)) - - for spid in followups_by_partition: - if followups_by_partition[spid]: - nodes, _ = self.partitions[spid].get_node_data(ids=followups_by_partition[spid]) - for uid in nodes: - for gate in list(nodes[uid]['links'].keys()): - links = nodes[uid]['links'][gate] - for idx, l in enumerate(links): - p = self.get_partition(l['target_node_uid']) - if p.allocated_node_parents[node_from_id(l['target_node_uid'])] not in nodespaces_by_partition.get(p.spid, []): - del links[idx] - if len(nodes[uid]['links'][gate]) == 0: - del nodes[uid]['links'][gate] - data['nodes'].update(nodes) + nodes, links, _ = self.partitions[spid].get_node_data(nodespaces_by_partition=nodespaces_by_partition, include_links=include_links, linked_nodespaces_by_partition=linked_nodespaces_by_partition) + data['nodes'].update(nodes) + data['links'] = links else: data['nodespaces'] = self.construct_nodespaces_dict(None, transitive=True) for partition in self.partitions.values(): - nodes, _ = partition.get_node_data(include_links=include_links, include_followupnodes=False) + nodes, _, _ = partition.get_node_data(nodespaces_by_partition=None, include_links=include_links) data['nodes'].update(nodes) return data + def get_links_for_nodes(self, node_uids): + + nodes = {} + links = [] + + def linkid(linkdict): + return "%s:%s:%s:%s" % (linkdict['source_node_uid'], linkdict['source_gate_name'], linkdict['target_slot_name'], linkdict['target_node_uid']) + + innerlinks = {} + for uid in node_uids: + nid = node_from_id(uid) + partition = self.get_partition(uid) + + ntype = partition.allocated_nodes[nid] + nofel = get_elements_per_type(ntype, self.native_modules) + offset = partition.allocated_node_offsets[nid] + + elrange = np.asarray(range(offset, offset + nofel)) + weights = None + + num_nodetype = partition.allocated_nodes[nid] + str_nodetype = get_string_node_type(num_nodetype, self.native_modules) + obj_nodetype = self.get_nodetype(str_nodetype) + + # inner partition links: + w_matrix = partition.w.get_value(borrow=True) + + node_ids = [] + for i, el in enumerate(elrange): + from_els = np.nonzero(w_matrix[el, :])[1] + to_els = np.nonzero(w_matrix[:, el])[0] + if len(from_els): + slot_numerical = el - partition.allocated_node_offsets[nid] + slot_type = get_string_slot_type(slot_numerical, obj_nodetype) + if type(obj_nodetype) == HighdimensionalNodetype: + if slot_type.rstrip('0123456789') in obj_nodetype.dimensionality['slots']: + slot_type = slot_type.rstrip('0123456789') + '0' + from_nids = partition.allocated_elements_to_nodes[from_els] + node_ids.extend(from_nids) + + for j, from_el in enumerate(from_els): + source_uid = node_to_id(from_nids[j], partition.pid) + from_nodetype = partition.allocated_nodes[from_nids[j]] + from_obj_nodetype = self.get_nodetype(get_string_node_type(from_nodetype, self.native_modules)) + gate_numerical = from_el - partition.allocated_node_offsets[from_nids[j]] + gate_type = get_string_gate_type(gate_numerical, from_obj_nodetype) + if type(from_obj_nodetype) == HighdimensionalNodetype: + if gate_type.rstrip('0123456789') in from_obj_nodetype.dimensionality['gates']: + gate_type = gate_type.rstrip('0123456789') + '0' + ldict = { + 'source_node_uid': source_uid, + 'source_gate_name': gate_type, + 'target_node_uid': uid, + 'target_slot_name': slot_type, + 'weight': float(w_matrix[el, from_el]) + } + innerlinks[linkid(ldict)] = ldict + + if len(to_els): + gate_numerical = el - partition.allocated_node_offsets[nid] + gate_type = get_string_gate_type(gate_numerical, obj_nodetype) + if type(obj_nodetype) == HighdimensionalNodetype: + if gate_type.rstrip('0123456789') in obj_nodetype.dimensionality['gates']: + gate_type = gate_type.rstrip('0123456789') + '0' + to_nids = partition.allocated_elements_to_nodes[to_els] + node_ids.extend(to_nids) + for j, to_el in enumerate(to_els): + target_uid = node_to_id(to_nids[j], partition.pid) + to_nodetype = partition.allocated_nodes[to_nids[j]] + to_obj_nodetype = self.get_nodetype(get_string_node_type(to_nodetype, self.native_modules)) + slot_numerical = to_el - partition.allocated_node_offsets[to_nids[j]] + slot_type = get_string_slot_type(slot_numerical, to_obj_nodetype) + if type(to_obj_nodetype) == HighdimensionalNodetype: + if slot_type.rstrip('0123456789') in to_obj_nodetype.dimensionality['slots']: + slot_type = slot_type.rstrip('0123456789') + '0' + ldict = { + 'source_node_uid': uid, + 'source_gate_name': gate_type, + 'target_node_uid': target_uid, + 'target_slot_name': slot_type, + 'weight': float(w_matrix[to_el, el]) + } + innerlinks[linkid(ldict)] = ldict + + links = list(innerlinks.values()) + nodes.update(partition.get_node_data(ids=[x for x in node_ids if x != nid], include_links=False)[0]) + + # search links originating from this node + for to_partition in self.partitions.values(): + if partition.spid in to_partition.inlinks: + inlinks = to_partition.inlinks[partition.spid] + from_elements = inlinks[0].get_value(borrow=True) + node_gates = np.intersect1d(elrange, from_elements) + if len(node_gates): + to_elements = inlinks[1].get_value(borrow=True) + if inlinks[4] == 'identity': + slots = np.arange(len(from_elements)) + gates = np.arange(len(from_elements)) + weights = 1 + elif inlinks[4] == 'dense': + weights = inlinks[2].get_value(borrow=True) + slots, gates = np.nonzero(weights) + node_ids = set() + for index, gate_index in enumerate(gates): + if from_elements[gate_index] not in elrange: + continue + gate_numerical = from_elements[gate_index] - partition.allocated_node_offsets[nid] + gate_type = get_string_gate_type(gate_numerical, obj_nodetype) + slot_index = slots[index] + target_nid = to_partition.allocated_elements_to_nodes[to_elements[slot_index]] + node_ids.add(target_nid) + to_nodetype = to_partition.allocated_nodes[target_nid] + to_obj_nodetype = self.get_nodetype(get_string_node_type(to_nodetype, self.native_modules)) + slot_numerical = to_elements[slot_index] - to_partition.allocated_node_offsets[target_nid] + slot_type = get_string_slot_type(slot_numerical, to_obj_nodetype) + if type(to_obj_nodetype) == HighdimensionalNodetype: + if slot_type.rstrip('0123456789') in to_obj_nodetype.dimensionality['slots']: + slot_type = slot_type.rstrip('0123456789') + '0' + if type(obj_nodetype) == HighdimensionalNodetype: + if gate_type.rstrip('0123456789') in obj_nodetype.dimensionality['gates']: + gate_type = gate_type.rstrip('0123456789') + '0' + + links.append({ + 'source_node_uid': uid, + 'source_gate_name': gate_type, + 'target_node_uid': node_to_id(target_nid, to_partition.pid), + 'target_slot_name': slot_type, + 'weight': 1 if np.isscalar(weights) else float(weights[slot_index, gate_index]) + }) + nodes.update(to_partition.get_node_data(ids=list(node_ids), include_links=False)[0]) + + # search for links terminating at this node + for from_spid in partition.inlinks: + inlinks = partition.inlinks[from_spid] + from_partition = self.partitions[from_spid] + to_elements = inlinks[1].get_value(borrow=True) + node_slots = np.intersect1d(elrange, to_elements) + if len(node_slots): + from_elements = inlinks[0].get_value(borrow=True) + if inlinks[4] == 'identity': + slots = np.arange(len(from_elements)) + gates = np.arange(len(from_elements)) + weights = 1 + elif inlinks[4] == 'dense': + weights = inlinks[2].get_value(borrow=True) + slots, gates = np.nonzero(weights) + node_ids = set() + for index, slot_index in enumerate(slots): + if to_elements[slot_index] not in elrange: + continue + slot_numerical = to_elements[slot_index] - partition.allocated_node_offsets[nid] + slot_type = get_string_slot_type(slot_numerical, obj_nodetype) + gate_index = gates[index] + source_nid = from_partition.allocated_elements_to_nodes[from_elements[gate_index]] + node_ids.add(source_nid) + from_nodetype = from_partition.allocated_nodes[source_nid] + from_obj_nodetype = self.get_nodetype(get_string_node_type(from_nodetype, self.native_modules)) + gate_numerical = from_elements[gate_index] - from_partition.allocated_node_offsets[source_nid] + gate_type = get_string_gate_type(gate_numerical, from_obj_nodetype) + if type(from_obj_nodetype) == HighdimensionalNodetype: + if gate_type.rstrip('0123456789') in from_obj_nodetype.dimensionality['gates']: + gate_type = gate_type.rstrip('0123456789') + '0' + if type(obj_nodetype) == HighdimensionalNodetype: + if slot_type.rstrip('0123456789') in obj_nodetype.dimensionality['slots']: + slot_type = slot_type.rstrip('0123456789') + '0' + + links.append({ + 'source_node_uid': node_to_id(source_nid, from_partition.pid), + 'source_gate_name': gate_type, + 'target_node_uid': uid, + 'target_slot_name': slot_type, + 'weight': 1 if np.isscalar(weights) and weights == 1 else float(weights[slot_index, gate_index]) + }) + + nodes.update(from_partition.get_node_data(ids=list(node_ids), include_links=False)[0]) + + return links, nodes + def initialize_stepoperators(self): self.stepoperators = [ TheanoPropagate(), - TheanoCalculate(self)] + TheanoCalculate(self), + TheanoCalculateFlowmodules(self)] if self.use_modulators: self.stepoperators.append(DoernerianEmotionalModulators()) self.stepoperators.sort(key=lambda op: op.priority) - def save(self, filename): + def save(self, base_path=None, zipfile=None): + if base_path is None: + base_path = self.persistency_path # write json metadata, which will be used by runtime to manage the net - with open(filename, 'w+') as fp: - metadata = self.metadata - metadata['positions'] = self.positions - metadata['names'] = self.names - metadata['actuatormap'] = self.actuatormap - metadata['sensormap'] = self.sensormap - metadata['nodes'] = self.construct_native_modules_and_comments_dict() - metadata['monitors'] = self.construct_monitors_dict() - metadata['modulators'] = self.construct_modulators_dict() - metadata['partition_parents'] = self.inverted_partitionmap - fp.write(json.dumps(metadata, sort_keys=True, indent=4)) + metadata = self.metadata + metadata['positions'] = self.positions + metadata['names'] = self.names + metadata['actuatormap'] = self.actuatormap + metadata['sensormap'] = self.sensormap + metadata['nodes'] = self.construct_native_modules_and_comments_dict() + metadata['monitors'] = self.construct_monitors_dict() + metadata['modulators'] = self.construct_modulators_dict() + metadata['partition_parents'] = self.inverted_partitionmap + metadata['recorders'] = self.construct_recorders_dict() + metadata['worldadapter_flow_nodes'] = self.worldadapter_flow_nodes + if zipfile: + zipfile.writestr('nodenet.json', json.dumps(metadata)) + else: + with open(os.path.join(base_path, 'nodenet.json'), 'w+', encoding="utf-8") as fp: + fp.write(json.dumps(metadata, sort_keys=True, indent=4)) + + # write numpy states of native modules + numpy_states = self.construct_native_modules_numpy_state_dict() + for node_uid, states in numpy_states.items(): + if len(states) > 0: + filename = "%s_numpystate.npz" % node_uid + if zipfile: + stream = io.BytesIO() + np.savez(stream, **states) + stream.seek(0) + zipfile.writestr(filename, stream.getvalue()) + else: + np.savez(os.path.join(base_path, filename), **states) + + for node_uid in self.thetas: + # save thetas + data = {} + filename = "%s_thetas.npz" % node_uid + for idx, name in enumerate(self.thetas[node_uid]['names']): + data[name] = self.thetas[node_uid]['variables'][idx].get_value() + if zipfile: + stream = io.BytesIO() + np.savez(stream, **data) + stream.seek(0) + zipfile.writestr(filename, stream.getvalue()) + else: + np.savez(os.path.join(base_path, filename), **data) + + # write graph data + if zipfile: + stream = io.BytesIO() + nx.write_gpickle(self.flowgraph, stream) + stream.seek(0) + zipfile.writestr("flowgraph.pickle", stream.getvalue()) + else: + nx.write_gpickle(self.flowgraph, os.path.join(base_path, "flowgraph.pickle")) + + for recorder_uid in self._recorders: + self._recorders[recorder_uid].save() for partition in self.partitions.values(): - # write bulk data to our own numpy-based file format - datafilename = os.path.join(os.path.dirname(filename), self.uid + "-data-" + partition.spid) - partition.save(datafilename) + # save partitions + partition.save(base_path=base_path, zipfile=zipfile) - def load(self, filename): + def load(self): """Load the node net from a file""" - # try to access file + if self._version != NODENET_VERSION: + self.logger.error("Wrong version of nodenet data in nodenet %s, cannot load." % self.uid) + return False + + # try to access file + filename = os.path.join(self.persistency_path, 'nodenet.json') with self.netlock: initfrom = {} if os.path.isfile(filename): try: self.logger.info("Loading nodenet %s metadata from file %s", self.name, filename) - with open(filename) as file: + with open(filename, encoding="utf-8") as file: initfrom.update(json.load(file)) except ValueError: # pragma: no cover - self.logger.warn("Could not read nodenet metadata from file %s", filename) + self.logger.warning("Could not read nodenet metadata from file %s", filename) return False except IOError: # pragma: no cover - self.logger.warn("Could not open nodenet metadata file %s", filename) + self.logger.warning("Could not open nodenet metadata file %s", filename) return False # determine whether we have a complete json dump, or our theano npz partition files: nodes_data = initfrom.get('nodes', {}) + # pop the monitors: + monitors = initfrom.pop('monitors', {}) + # initialize - self.initialize_nodenet(initfrom) + invalid_uids = self.initialize_nodenet(initfrom) + + for uid in invalid_uids: + del nodes_data[uid] for partition in self.partitions.values(): - datafilename = os.path.join(os.path.dirname(filename), self.uid + "-data-" + partition.spid + ".npz") - partition.load_data(datafilename, nodes_data) + partition.load_data(nodes_data, invalid_uids=invalid_uids) for partition in self.partitions.values(): - datafilename = os.path.join(os.path.dirname(filename), self.uid + "-data-" + partition.spid + ".npz") - partition.load_inlinks(datafilename) + partition.load_inlinks() # reloading native modules ensures the types in allocated_nodes are up to date # (numerical native module types are runtime dependent and may differ from when allocated_nodes # was saved). + self.worldadapter_flow_nodes = initfrom.get('worldadapter_flow_nodes', {}) self.reload_native_modules(self.native_module_definitions) + # recover numpy states for native modules + for partition in self.partitions.values(): + nodeids = np.where((partition.allocated_nodes > MAX_STD_NODETYPE) | (partition.allocated_nodes == COMMENT))[0] + for node_id in nodeids: + node_uid = node_to_id(node_id, partition.pid) + file = os.path.join(self.persistency_path, '%s_numpystate.npz' % node_uid) + if os.path.isfile(file): + node = self.get_node(node_uid) + numpy_states = np.load(file) + node.set_persistable_state(node._state, numpy_states) + + for monitorid in monitors: + data = monitors[monitorid] + if hasattr(monitor, data['classname']): + mon = getattr(monitor, data['classname'])(self, **data) + self._monitors[mon.uid] = mon + else: + self.logger.warning('unknown classname for monitor: %s (uid:%s) ' % (data['classname'], monitorid)) + + for recorder_uid in initfrom.get('recorders', {}): + data = initfrom['recorders'][recorder_uid] + self._recorders[recorder_uid] = getattr(recorder, data['classname'])(self, **data) + + flowfile = os.path.join(self.persistency_path, 'flowgraph.pickle') + + if os.path.isfile(flowfile): + self.flowgraph = nx.read_gpickle(flowfile) + + for node_uid in nx.topological_sort(self.flowgraph): + if node_uid in self.flow_module_instances: + self.flow_module_instances[node_uid].ensure_initialized() + theta_file = os.path.join(self.persistency_path, "%s_thetas.npz" % node_uid) + if os.path.isfile(theta_file): + data = np.load(theta_file) + for key in data: + self.set_theta(node_uid, key, data[key]) + else: + self._delete_flow_module(node_uid) + + self.update_flow_graphs() + # re-initialize step operators for theano recompile to new shared variables self.initialize_stepoperators() - self._rebuild_sensor_actor_indices() + self._rebuild_sensor_actuator_indices() return True - def remove(self, filename): - neighbors = os.listdir(os.path.dirname(filename)) - for neighbor in neighbors: - if neighbor.startswith(self.uid): - os.remove(os.path.join(os.path.dirname(filename), neighbor)) - def initialize_nodenet(self, initfrom): self._modulators.update(initfrom.get("modulators", {})) @@ -435,29 +652,26 @@ def initialize_nodenet(self, initfrom): self._nodespace_ui_properties = initfrom.get('nodespace_ui_properties', {}) + invalid_uids = [] if len(initfrom) != 0: # now merge in all init data (from the persisted file typically) - self.merge_data(initfrom, keep_uids=True, native_module_instances_only=True) + invalid_uids = self.merge_data(initfrom, keep_uids=True, native_module_instances_only=True) if 'names' in initfrom: self.names = initfrom['names'] if 'positions' in initfrom: self.positions = initfrom['positions'] - # compatibility: - for key in self.positions: - if len(self.positions[key]) == 3: - break # already 3d coordinates - self.positions[key] = (self.positions[key] + [0] * 3)[:3] if 'actuatormap' in initfrom: self.actuatormap = initfrom['actuatormap'] if 'sensormap' in initfrom: self.sensormap = initfrom['sensormap'] if 'current_step' in initfrom: self._step = initfrom['current_step'] + return invalid_uids def merge_data(self, nodenet_data, keep_uids=False, native_module_instances_only=False): """merges the nodenet state with the current node net, might have to give new UIDs to some entities""" uidmap = {} - invalid_nodes = [] + invalid_nodes = {} # for dict_engine compatibility uidmap["Root"] = self.rootpartition.rootnodespace_uid @@ -491,6 +705,18 @@ def merge_data(self, nodenet_data, keep_uids=False, native_module_instances_only for nodespace in nodespaces_to_merge: self.merge_nodespace_data(nodespace, nodenet_data['nodespaces'], uidmap, keep_uids) + # make sure rootpartition has enough NoN, NoE + if native_module_instances_only: + non = noe = 0 + for uid in nodenet_data.get('nodes', {}): + non += 1 + try: + noe += get_elements_per_type(get_numerical_node_type(nodenet_data['nodes'][uid]['type'], self.native_modules), self.native_modules) + except ValueError: + pass # Unknown nodetype + if non > self.rootpartition.NoN or noe > self.rootpartition.NoE: + self.rootpartition.announce_nodes(non, math.ceil(noe / non)) + # merge in nodes for uid in nodenet_data.get('nodes', {}): data = nodenet_data['nodes'][uid] @@ -499,16 +725,33 @@ def merge_data(self, nodenet_data, keep_uids=False, native_module_instances_only if not keep_uids: parent_uid = uidmap[data['parent_nodespace']] id_to_pass = None - if data['type'] not in self._nodetypes and data['type'] not in self.native_modules: - self.logger.warn("Invalid nodetype %s for node %s" % (data['type'], uid)) - data['parameters'] = { - 'comment': 'There was a %s node here' % data['type'] - } - data['type'] = 'Comment' - del data['gate_parameters'] - invalid_nodes.append(uid) + if data['type'] not in self.nodetypes and data['type'] not in self.native_modules: + self.logger.error("Invalid nodetype %s for node %s" % (data['type'], uid)) + invalid_nodes[uid] = data + continue if native_module_instances_only: - node = TheanoNode(self, self.get_partition(uid), parent_uid, uid, get_numerical_node_type(data['type'], nativemodules=self.native_modules), parameters=data.get('parameters')) + if data.get('flow_module'): + if self.native_module_definitions[data['type']].get('flow_module'): + node = FlowModule( + self, + self.get_partition(uid), + data['parent_nodespace'], + data['uid'], + get_numerical_node_type(data['type'], nativemodules=self.native_modules), + parameters=data.get('parameters', {}), + inputmap=data.get('inputmap', {}), + outputmap=data.get('outputmap', {}), + is_copy_of=data.get('is_copy_of')) + self.flow_module_instances[node.uid] = node + else: + invalid_nodes[uid] = data + continue + else: + if not self.native_module_definitions[data['type']].get('flow_module'): + node = TheanoNode(self, self.get_partition(uid), parent_uid, uid, get_numerical_node_type(data['type'], nativemodules=self.native_modules), parameters=data.get('parameters')) + else: + invalid_nodes[uid] = data + continue self.proxycache[node.uid] = node new_uid = node.uid else: @@ -519,13 +762,12 @@ def merge_data(self, nodenet_data, keep_uids=False, native_module_instances_only name=data['name'], uid=id_to_pass, parameters=data.get('parameters'), - gate_parameters=data.get('gate_parameters'), - gate_functions=data.get('gate_functions')) + gate_configuration=data.get('gate_configuration')) uidmap[uid] = new_uid node_proxy = self.get_node(new_uid) - for gatetype in data.get('gate_activations', {}): # todo: implement sheaves + for gatetype in data.get('gate_activations', {}): if gatetype in node_proxy.nodetype.gatetypes: - node_proxy.get_gate(gatetype).activation = data['gate_activations'][gatetype]['default']['activation'] + node_proxy.get_gate(gatetype).activation = data['gate_activations'][gatetype] state = data.get('state', {}) if state is not None: for key, value in state.items(): @@ -533,9 +775,6 @@ def merge_data(self, nodenet_data, keep_uids=False, native_module_instances_only # merge in links links = nodenet_data.get('links', []) - if isinstance(links, dict): - # compatibility - links = links.values() for link in links: if link['source_node_uid'] in invalid_nodes or link['target_node_uid'] in invalid_nodes: continue @@ -558,12 +797,17 @@ def merge_data(self, nodenet_data, keep_uids=False, native_module_instances_only mon = getattr(monitor, data['classname'])(self, **data) self._monitors[mon.uid] = mon else: - self.logger.warn('unknown classname for monitor: %s (uid:%s) ' % (data['classname'], monitorid)) + self.logger.warning('unknown classname for monitor: %s (uid:%s) ' % (data['classname'], monitorid)) else: # Compatibility mode mon = monitor.NodeMonitor(self, name=data['node_name'], **data) self._monitors[mon.uid] = mon + for uid in invalid_nodes: + if invalid_nodes[uid].get('flow_module'): + self._delete_flow_module(uid) + return invalid_nodes.keys() + def merge_nodespace_data(self, nodespace_uid, data, uidmap, keep_uids=False): """ merges the given nodespace with the given nodespace data dict @@ -581,7 +825,6 @@ def merge_nodespace_data(self, nodespace_uid, data, uidmap, keep_uids=False): self.merge_nodespace_data(nodespace_to_id(parent_id, partition.pid), data, uidmap, keep_uids) self.create_nodespace( data[nodespace_uid].get('parent_nodespace'), - data[nodespace_uid].get('position'), name=data[nodespace_uid].get('name', 'Root'), uid=nodespace_uid ) @@ -612,6 +855,7 @@ def step(self): break else: del self.deleted_items[i] + self.user_prompt_response = {} def get_partition(self, uid): if uid is None: @@ -623,7 +867,10 @@ def get_node(self, uid): if partition is None: raise KeyError("No node with id %s exists", uid) if uid in partition.native_module_instances: - return partition.native_module_instances[uid] + if uid in self.flow_module_instances: + return self.flow_module_instances[uid] + else: + return partition.native_module_instances[uid] elif uid in partition.comment_instances: return partition.comment_instances[uid] elif uid in self.proxycache: @@ -631,7 +878,12 @@ def get_node(self, uid): elif self.is_node(uid): id = node_from_id(uid) parent_id = partition.allocated_node_parents[id] - node = TheanoNode(self, partition, nodespace_to_id(parent_id, partition.pid), uid, partition.allocated_nodes[id]) + nodetype = get_string_node_type(partition.allocated_nodes[id], self.native_modules) + if type(self.get_nodetype(nodetype)) == FlowNodetype: + node = FlowModule(self, partition, nodespace_to_id(parent_id, partition.pid), uid, partition.allocated_nodes[id]) + self.flow_module_instances[uid] = node + else: + node = TheanoNode(self, partition, nodespace_to_id(parent_id, partition.pid), uid, partition.allocated_nodes[id]) self.proxycache[node.uid] = node return node else: @@ -662,7 +914,441 @@ def announce_nodes(self, nodespace_uid, number_of_nodes, average_elements_per_no partition = self.get_partition(nodespace_uid) partition.announce_nodes(number_of_nodes, average_elements_per_node) - def create_node(self, nodetype, nodespace_uid, position, name=None, uid=None, parameters=None, gate_parameters=None, gate_functions=None): + def _create_flow_module(self, node): + self.flowgraph.add_node(node.uid, implementation=node.nodetype.implementation) + + def flow(self, source_uid, source_output, target_uid, target_input): + if source_uid == "worldadapter": + source_uid = self.worldadapter_flow_nodes['datasources'] + if target_uid == "worldadapter": + target_uid = self.worldadapter_flow_nodes['datatargets'] + self.flowgraph.add_edge(source_uid, target_uid, key="%s_%s" % (source_output, target_input)) + self.flow_module_instances[target_uid].set_input(target_input, source_uid, source_output) + self.flow_module_instances[source_uid].set_output(source_output, target_uid, target_input) + self.update_flow_graphs() + + def unflow(self, source_uid, source_output, target_uid, target_input): + if source_uid == "worldadapter": + source_uid = self.worldadapter_flow_nodes['datasources'] + if target_uid == "worldadapter": + target_uid = self.worldadapter_flow_nodes['datatargets'] + self.flowgraph.remove_edge(source_uid, target_uid, key="%s_%s" % (source_output, target_input)) + self.flow_module_instances[target_uid].unset_input(target_input) + self.flow_module_instances[source_uid].unset_output(source_output, target_uid, target_input) + self.update_flow_graphs() + + def _delete_flow_module(self, delete_uid): + if delete_uid in self.flowgraph.nodes(): + self.flowgraph.remove_node(delete_uid) + for uid, module in self.flow_module_instances.items(): + for name in module.inputmap: + if module.inputmap[name]: + source_uid, source_name = module.inputmap[name] + if source_uid == delete_uid: + module.unset_input(name) + for name in module.outputmap: + for target_uid, target_name in module.outputmap[name].copy(): + if target_uid == delete_uid: + module.unset_output(name, delete_uid, target_name) + if delete_uid in self.flow_module_instances: + del self.flow_module_instances[delete_uid] + self.update_flow_graphs() + + def update_flow_graphs(self, node_uids=None): + if self.is_flowbuilder_active: + return + self.flowfunctions = [] + startpoints = [] + endpoints = [] + pythonnodes = set() + + toposort = nx.topological_sort(self.flowgraph) + self.flow_toposort = toposort + for uid in toposort: + node = self.flow_module_instances.get(uid) + if node is not None: + if node.implementation == 'python': + pythonnodes.add(uid) + if node.is_input_node(): + startpoints.append(uid) + if node.is_output_node(): + endpoints.append(uid) + + graphs = [] + for enduid in endpoints: + ancestors = nx.ancestors(self.flowgraph, enduid) + node = self.flow_module_instances[enduid] + if ancestors or node.inputs == []: + fullpath = [uid for uid in toposort if uid in ancestors] + [enduid] + path = [] + for uid in reversed(fullpath): + if uid in endpoints and uid != enduid: + continue + path.insert(0, uid) + if path: + graphs.append(path) + + # worldadapter_names = [] + # if self.worldadapter_instance is not None: + # worldadapter_names += self.worldadapter_instance.get_available_flow_datasources() + self.worldadapter_instance.get_available_flow_datatargets() + + flowfunctions = {} + floworder = OrderedSet() + for idx, graph in enumerate(graphs): + # split graph in parts: + # node_uids = [uid for uid in graph if uid not in worldadapter_names] + node_uids = [uid for uid in graph] + nodes = [self.get_node(uid) for uid in node_uids] + paths = self.split_flow_graph_into_implementation_paths(nodes) + for p in paths: + floworder.add(p['hash']) + if p['hash'] not in flowfunctions: + func, dang_in, dang_out = self.compile_flow_subgraph([n.uid for n in p['members']], use_unique_input_names=True) + if func: + flowfunctions[p['hash']] = {'callable': func, 'members': p['members'], 'endnodes': set([nodes[-1]]), 'inputs': dang_in, 'outputs': dang_out} + else: + flowfunctions[p['hash']]['endnodes'].add(nodes[-1]) + for funcid in floworder: + self.flowfunctions.append(flowfunctions[funcid]) + + self.logger.debug("Compiled %d flowfunctions" % len(self.flowfunctions)) + + def split_flow_graph_into_implementation_paths(self, nodes): + paths = [] + for node in nodes: + if node.implementation == 'python': + paths.append({'implementation': 'python', 'members': [node], 'hash': node.uid}) + else: + if len(paths) == 0 or paths[-1]['implementation'] == 'python': + paths.append({'implementation': 'theano', 'members': [node], 'hash': node.uid}) + else: + paths[-1]['members'].append(node) + paths[-1]['hash'] += node.uid + + return paths + + def compile_flow_subgraph(self, node_uids, requested_outputs=None, use_different_thetas=False, use_unique_input_names=False): + """ Compile and return one callable for the given flow_module_uids. + If use_different_thetas is True, the callable expects an argument names "thetas". + Thetas are expected to be sorted in the same way collect_thetas() would return them. + + Parameters + ---------- + node_uids : list + the uids of the members of this graph + + requested_outputs : list, optional + list of tuples (node_uid, out_name) to filter the callable's return-values. defaults to None, returning all outputs + + use_different_thetas : boolean, optional + if true, return a callable that excepts a parameter "thetas" that will be used instead of existing thetas. defaults to False + + use_unique_input_names : boolen, optional + if true, the returned callable expects input-kwargs to be prefixe by node_uid: "UID_NAME". defaults to False, using only the name of the input + + Returns + ------- + callable : function + the compiled function for this subgraph + + dangling_inputs : list + list of tuples (node_uid, input) that the callable expectes as inputs + + dangling_outputs : list + list of tuples (node_uid, input) that the callable will return as output + + """ + subgraph = [self.get_node(uid) for uid in self.flow_toposort if uid in node_uids] + + # split the nodes into symbolic/non-symbolic paths + paths = self.split_flow_graph_into_implementation_paths(subgraph) + + dangling_inputs = [] + dangling_outputs = [] + + thunks = [] + + for path_idx, path in enumerate(paths): + thunk = { + 'implementation': path['implementation'], + 'function': None, + 'node': None, + 'outputs': [], + 'input_sources': [], + 'dangling_outputs': [], + 'list_outputs': [], + 'members': path['members'] + } + member_uids = [n.uid for n in path['members']] + outexpressions = {} + inputs = [] + outputs = [] + skip = False + + # index for outputs of this thunk, considering unpacked list outputs + thunk_flattened_output_index = 0 + + for node in path['members']: + buildargs = [] + # collect the inputs for this Flowmodule: + for in_idx, in_name in enumerate(node.inputs): + if not node.inputmap[in_name] or node.inputmap[in_name][0] not in member_uids: + # this input is not satisfied from within this path + in_expr = create_tensor(node.definition['inputdims'][in_idx], self.theanofloatX, name="%s_%s" % (node.uid, in_name)) + inputs.append(in_expr) + if not node.inputmap[in_name] or node.inputmap[in_name][0] not in node_uids: + # it's not even satisfied by another path within the subgraph, + # and needs to be provided as input to the emerging callable + if use_unique_input_names: + thunk['input_sources'].append(('kwargs', -1, "%s_%s" % (node.uid, in_name))) + else: + thunk['input_sources'].append(('kwargs', -1, in_name)) + dangling_inputs.append((node.uid, in_name)) + else: + # this input will be satisfied by another path within the subgraph + source_uid, source_name = node.inputmap[in_name] + for idx, p in enumerate(paths): + if self.get_node(source_uid) in p['members']: + # record which thunk, and which index of its output-array satisfies this input + thunk['input_sources'].append(('path', idx, thunks[idx]['outputs'].index((source_uid, source_name)))) + buildargs.append(in_expr) + else: + # this input is satisfied within this path + source_uid, source_name = node.inputmap[in_name] + buildargs.append(outexpressions[source_uid][self.get_node(source_uid).outputs.index(source_name)]) + + # build the outexpression + try: + if len(node.outputs) <= 1: + original_outex = [node.build(*buildargs)] + elif node.implementation == 'python': + func = node.build(*buildargs) + original_outex = [func] * len(node.outputs) + else: + original_outex = node.build(*buildargs) + except Exception as err: + import traceback as tb + frame = [f[0] for f in tb.walk_tb(err.__traceback__) if f[0].f_code.co_filename == node.definition.get('path', '')] + lineno = "" if len(frame) == 0 else str(frame[0].f_lineno) + self.logger.error("Error in Flowmodule %s at line %s: %s: %s" % (str(node), lineno, err.__class__.__name__, str(err))) + post_mortem() + skip = True + break + + outexpressions[node.uid] = original_outex + flattened_outex = [] + outputlengths = [] + flattened_markers = [] + # check if this node has a list as one of its return values: + for idx, ex in enumerate(original_outex): + if type(ex) == list: + # if so, flatten the outputs, and mark the offset and length of the flattened output + # so that we can later reconstruct the nested output-structure + flattened_markers.append((len(outputs) + idx, len(ex))) + outputlengths.append(len(ex)) + for item in ex: + flattened_outex.append(item) + else: + flattened_outex.append(ex) + outputlengths.append(1) + + # offset for indexing the flattened_outexpression by output_index + node_flattened_output_offset = 0 + + # go thorugh the nodes outputs, and see how they will be used: + for out_idx, out_name in enumerate(node.outputs): + dangling = ['external'] + if node.outputmap[out_name]: + # if this output is used, we have to see where every connection goes + # iterate through every connection, and note if it's used path-internally, + # subgraph-internally, or will produce an output of the emerging callable + dangling = [] + for pair in node.outputmap[out_name]: + if pair[0] in member_uids: + # path-internally satisfied + dangling.append(False) + elif pair[0] in node_uids: + # internal dangling aka subgraph-internally satisfied + dangling.append("internal") + else: + # externally dangling aka this will be a final output + dangling.append("external") + # now, handle internally or externally dangling outputs if there are any: + if set(dangling) != {False}: + thunk['outputs'].append((node.uid, out_name)) + if outputlengths[out_idx] > 1: + # if this is output should produce a list, note this, for later de-flattenation + # and append the flattened output to the output-collection + thunk['list_outputs'].append((thunk_flattened_output_index, outputlengths[out_idx])) + for i in range(outputlengths[out_idx]): + outputs.append(flattened_outex[out_idx + node_flattened_output_offset + i]) + node_flattened_output_offset += outputlengths[out_idx] - 1 + else: + outputs.append(flattened_outex[out_idx + node_flattened_output_offset]) + if "external" in dangling: + # this output will be a final one: + if requested_outputs is None or (node.uid, out_name) in requested_outputs: + dangling_outputs.append((node.uid, out_name)) + thunk['dangling_outputs'].append(thunk_flattened_output_index) + thunk_flattened_output_index += outputlengths[out_idx] + + if skip: + # thunk borked, skip + continue + + # now, set the function of this thunk. Either compile a theano function + # or assign the python function. + if not use_different_thetas: + if thunk['implementation'] == 'theano': + thunk['function'] = theano.function(inputs=inputs, outputs=outputs) + else: + thunk['node'] = path['members'][0] + thunk['function'] = outexpressions[thunk['node'].uid][0] + + else: + sharedvars = self.collect_thetas(node_uids) + dummies = [create_tensor(var.ndim, self.theanofloatX, name="Theta_%s" % var.name) for var in sharedvars] + if thunk['implementation'] == 'theano': + givens = list(zip(sharedvars, dummies)) + thunk['function'] = theano.function(inputs=inputs + dummies, outputs=outputs, givens=givens) + else: + thunk['node'] = path['members'][0] + thunk['function'] = outexpressions[thunk['node'].uid][0] + + thunks.append(thunk) + + if not use_unique_input_names: + # check for name collisions + for thunk in thunks: + if len(set(thunk['input_sources'])) != (len(thunk['input_sources'])): + raise RuntimeError(""" + Name Collision in inputs detected! + This graph can only be compiled as callable if you use unique_input_names. + set use_unique_input_names to True, and give the inputs as "UID_NAME" + where uid is the uid of the node getting this input, and name is the input name of this node""") + + def compiled(thetas=None, **kwargs): + """ Compiled callable for this subgraph """ + all_outputs = [] # outputs for use within this thunk + final_outputs = [] # final, external dangling outputs + for idx, thunk in enumerate(thunks): + funcargs = [] + # get the inputs: Either from the kwargs, or from the already existing outputs + for source, pidx, item in thunk['input_sources']: + if source == 'kwargs': + funcargs.append(kwargs[item]) + elif source == 'path': + funcargs.append(all_outputs[pidx][item]) + if thunk['implementation'] == 'python': + params = thunk['node'].clone_parameters() + out = thunk['function'](*funcargs, netapi=self.netapi, node=thunk['node'], parameters=params) + if len(thunk['node'].outputs) <= 1: + out = [out] + else: + if type(out) != tuple: + raise RuntimeError("""Output mismatch! + Node %s returned only one output instead of %d.""" % (str(thunk['node']), len(thunk['node'].outputs))) + elif len(out) != len(thunk['node'].outputs): + raise RuntimeError("""Output mismatch! + Node %s returned %d outputs instead of %d.""" % (str(thunk['node']), len(out), len(thunk['node'].outputs))) + else: + if thetas: + funcargs += thetas + out = thunk['function'](*funcargs) + if thunk['list_outputs']: + # if we have list_outputs, we need to nest the output of this thunk again + # to recreate the nested structure from a flat list of outputs + new_out = [] + out_iter = iter(out) + try: + for out_index in range(len(out)): + for offset, length in thunk['list_outputs']: + if offset == out_index: + sublist = [] + for i in range(length): + sublist.append(next(out_iter)) + new_out.append(sublist) + else: + new_out.append(next(out_iter)) + except StopIteration: + # iterator finished, we handled all items. + pass + out = new_out + if out: + all_outputs.append(out) + for idx in thunk['dangling_outputs']: + if requested_outputs is None or thunk['outputs'][idx] in requested_outputs: + final_outputs.append(out[idx]) + return final_outputs + + compiled.__doc__ = """Compiled subgraph of nodes %s + Inputs: %s + Outputs: %s + """ % (str(subgraph), str([("%s of %s" % x[::-1]) for x in dangling_inputs]), str([("%s of %s" % x[::-1]) for x in dangling_outputs])) + + return compiled, dangling_inputs, dangling_outputs + + def shadow_flowgraph(self, flow_modules): + """ Creates shallow copies of the given flow_modules, copying instances and internal connections. + Shallow copies will always have the parameters and shared variables of their originals + """ + copies = [] + copymap = {} + for node in flow_modules: + copy_uid = self.create_node( + node.type, + node.parent_nodespace, + node.position, + name=node.name, + parameters=node.clone_parameters()) + copy = self.get_node(copy_uid) + copy.is_copy_of = node.uid + copymap[node.uid] = copy + copies.append(copy) + for node in flow_modules: + for in_name in node.inputmap: + if node.inputmap[in_name]: + source_uid, source_name = node.inputmap[in_name] + if source_uid in copymap: + self.flow(copymap[source_uid].uid, source_name, copymap[node.uid].uid, in_name) + return copies + + def set_theta(self, node_uid, name, val): + if node_uid not in self.thetas: + self.thetas[node_uid] = { + 'names': [], + 'variables': [] + } + if name not in self.thetas[node_uid]['names']: + new_names = sorted(self.thetas[node_uid]['names'] + [name]) + self.thetas[node_uid]['names'] = new_names + index = self.thetas[node_uid]['names'].index(name) + if not isinstance(val, T.sharedvar.TensorSharedVariable): + val = theano.shared(value=val.astype(T.config.floatX), name=name, borrow=True) + self.thetas[node_uid]['variables'].insert(index, val) + else: + if not isinstance(val, T.sharedvar.TensorSharedVariable): + val = theano.shared(value=val.astype(T.config.floatX), name=name, borrow=True) + index = self.thetas[node_uid]['names'].index(name) + self.thetas[node_uid]['variables'][index].set_value(val.get_value(), borrow=True) + + def get_theta(self, node_uid, name): + data = self.thetas[node_uid] + index = data['names'].index(name) + return data['variables'][index] + + def collect_thetas(self, node_uids): + shared_vars = [] + for uid in node_uids: + node = self.get_node(uid) + if node.is_copy_of: + uid = node.is_copy_of + data = self.thetas.get(uid) + if data: + shared_vars.extend(data['variables']) + return shared_vars + + def create_node(self, nodetype, nodespace_uid, position, name=None, uid=None, parameters=None, gate_configuration=None): nodespace_uid = self.get_nodespace(nodespace_uid).uid partition = self.get_partition(nodespace_uid) nodespace_id = nodespace_from_id(nodespace_uid) @@ -671,14 +1357,12 @@ def create_node(self, nodetype, nodespace_uid, position, name=None, uid=None, pa if uid is not None: id_to_pass = node_from_id(uid) - id = partition.create_node(nodetype, nodespace_id, id_to_pass, parameters, gate_parameters, gate_functions) + id = partition.create_node(nodetype, nodespace_id, id_to_pass, parameters, gate_configuration) uid = node_to_id(id, partition.pid) if position is not None: position = (position + [0] * 3)[:3] self.positions[uid] = position - if name is not None and name != "" and name != uid: - self.names[uid] = name if parameters is None: parameters = {} @@ -686,14 +1370,24 @@ def create_node(self, nodetype, nodespace_uid, position, name=None, uid=None, pa if nodetype == "Sensor": if 'datasource' in parameters: self.get_node(uid).set_parameter("datasource", parameters['datasource']) - elif nodetype == "Actor": + if name is None or name == "" or name == uid: + name = parameters['datasource'] + elif nodetype == "Actuator": if 'datatarget' in parameters: self.get_node(uid).set_parameter("datatarget", parameters['datatarget']) + if name is None or name == "" or name == uid: + name = parameters['datatarget'] + + if nodetype in self.native_modules and type(self.native_modules[nodetype]) == FlowNodetype: + self._create_flow_module(self.get_node(uid)) + + if name is not None and name != "" and name != uid: + self.names[uid] = name return uid def delete_node(self, uid): - + self.close_figures(uid) partition = self.get_partition(uid) node_id = node_from_id(uid) @@ -709,27 +1403,35 @@ def delete_node(self, uid): element = partition.allocated_node_offsets[node_id] + numeric_slot from_elements = inlinks[0].get_value(borrow=True) to_elements = inlinks[1].get_value(borrow=True) - weights = inlinks[2].get_value(borrow=True) + if element in to_elements: - from_partition = self.partitions[partition_from_spid] - element_index = np.where(to_elements == element)[0][0] - slotrow = weights[element_index] - links_indices = np.nonzero(slotrow)[0] - for link_index in links_indices: - source_id = from_partition.allocated_elements_to_nodes[from_elements[link_index]] - associated_uids.append(node_to_id(source_id, from_partition.pid)) - # set all weights for this element to 0 - new_weights = np.delete(weights, element_index, 0) - if len(new_weights) == 0: - # if this was the last link, remove whole inlinks information for this partition pair - del partition.inlinks[partition_from_spid] - break - # find empty columns (elements linking only to this element) - zero_columns = np.where(~new_weights.any(axis=0))[0] - # remove empty columns from weight matrix: - new_weights = np.delete(new_weights, zero_columns, 1) - # save new weight matrix - partition.inlinks[partition_from_spid][2].set_value(new_weights) + inlink_type = inlinks[4] + if inlink_type == "dense": + weights = inlinks[2].get_value(borrow=True) + from_partition = self.partitions[partition_from_spid] + element_index = np.where(to_elements == element)[0][0] + slotrow = weights[element_index] + links_indices = np.nonzero(slotrow)[0] + for link_index in links_indices: + source_id = from_partition.allocated_elements_to_nodes[from_elements[link_index]] + associated_uids.append(node_to_id(source_id, from_partition.pid)) + # set all weights for this element to 0 + new_weights = np.delete(weights, element_index, 0) + if len(new_weights) == 0: + # if this was the last link, remove whole inlinks information for this partition pair + del partition.inlinks[partition_from_spid] + break + + # find empty columns (elements linking only to this element) + zero_columns = np.where(~new_weights.any(axis=0))[0] + # remove empty columns from weight matrix: + new_weights = np.delete(new_weights, zero_columns, 1) + # save new weight matrix + partition.inlinks[partition_from_spid][2].set_value(new_weights) + elif inlink_type == "identity": + element_index = np.where(to_elements == element)[0][0] + zero_columns = element_index + # remove this element partition.inlinks[partition_from_spid][1].set_value(np.delete(to_elements, element_index)) # remove from_elements @@ -743,26 +1445,35 @@ def delete_node(self, uid): inlinks = to_partition.inlinks[partition.spid] from_elements = inlinks[0].get_value(borrow=True) to_elements = inlinks[1].get_value(borrow=True) - weights = inlinks[2].get_value(borrow=True) + if element in from_elements: - element_index = np.where(from_elements == element)[0][0] - gatecolumn = weights[:, element_index] - links_indices = np.nonzero(gatecolumn)[0] - for link_index in links_indices: - target_id = to_partition.allocated_elements_to_nodes[to_elements[link_index]] - associated_uids.append(node_to_id(target_id, to_partition.pid)) - # set all weights for this element to 0 - new_weights = np.delete(weights, element_index, 1) - if len(new_weights) == 0: - # if this was the last link, remove whole inlinks information for target partition - del to_partition.inlinks[partition.spid] - break - # find empty rows (elements linked only by this node) - zero_rows = np.where(~new_weights.any(axis=1))[0] - # remove empty rows from weight matrix - new_weights = np.delete(new_weights, zero_rows, 0) - # save new weights - to_partition.inlinks[partition.spid][2].set_value(new_weights) + inlink_type = inlinks[4] + if inlink_type == "dense": + weights = inlinks[2].get_value(borrow=True) + element_index = np.where(from_elements == element)[0][0] + gatecolumn = weights[:, element_index] + links_indices = np.nonzero(gatecolumn)[0] + for link_index in links_indices: + target_id = to_partition.allocated_elements_to_nodes[to_elements[link_index]] + associated_uids.append(node_to_id(target_id, to_partition.pid)) + # set all weights for this element to 0 + new_weights = np.delete(weights, element_index, 1) + if len(new_weights) == 0: + # if this was the last link, remove whole inlinks information for target partition + del to_partition.inlinks[partition.spid] + break + + # find empty rows (elements linked only by this node) + zero_rows = np.where(~new_weights.any(axis=1))[0] + # remove empty rows from weight matrix + new_weights = np.delete(new_weights, zero_rows, 0) + # save new weights + to_partition.inlinks[partition.spid][2].set_value(new_weights) + + elif inlink_type == "identity": + element_index = np.where(from_elements == element)[0][0] + zero_columns = element_index + # remove this element to_partition.inlinks[partition.spid][0].set_value(np.delete(from_elements, element_index)) # remove to_elements @@ -770,6 +1481,9 @@ def delete_node(self, uid): partition.delete_node(node_id) + if uid in self.flow_module_instances: + self._delete_flow_module(uid) + # remove sensor association if there should be one if uid in self.sensormap.values(): self.sensormap = {k: v for k, v in self.sensormap.items() if v != uid} @@ -794,16 +1508,6 @@ def delete_node(self, uid): if uid_to_clear in self.proxycache: del self.proxycache[uid_to_clear] - def set_node_gate_parameter(self, uid, gate_type, parameter, value): - partition = self.get_partition(uid) - id = node_from_id(uid) - partition.set_node_gate_parameter(id, gate_type, parameter, value) - - def set_node_gatefunction_name(self, uid, gate_type, gatefunction_name): - partition = self.get_partition(uid) - id = node_from_id(uid) - partition.set_node_gatefunction_name(id, gate_type, gatefunction_name) - def set_nodespace_gatetype_activator(self, nodespace_uid, gate_type, activator_uid): partition = self.get_partition(nodespace_uid) activator_id = 0 @@ -846,7 +1550,7 @@ def get_nodespace_uids(self): def is_nodespace(self, uid): return uid in self.get_nodespace_uids() - def set_entity_positions(self, positions): + def set_node_positions(self, positions): for uid in positions: pos = (positions[uid] + [0] * 3)[:3] self.positions[uid] = pos @@ -870,7 +1574,7 @@ def create_partition(self, pid, parent_uid, sparse, initial_number_of_nodes, ave self.partitionmap[parent_uid] = [] self.partitionmap[parent_uid].append(partition) self.inverted_partitionmap[partition.spid] = parent_uid - self._rebuild_sensor_actor_indices(partition) + self._rebuild_sensor_actuator_indices(partition) return partition.spid def delete_partition(self, pid): @@ -896,7 +1600,7 @@ def delete_partition(self, pid): for s in node.get_slot_types(): node.get_slot(s).invalidate_caches() - def create_nodespace(self, parent_uid, position, name="", uid=None, options=None): + def create_nodespace(self, parent_uid, name="", uid=None, options=None): if options is None: options = {} new_partition = options.get('new_partition', False) @@ -927,7 +1631,7 @@ def create_nodespace(self, parent_uid, position, name="", uid=None, options=None try: average_elements_per_node_assumption = int(configured_elements_per_node_assumption) except: - self.logger.warn("Unsupported elements_per_node_assumption value from configuration: %s, falling back to 4", configured_elements_per_node_assumption) # pragma: no cover + self.logger.warning("Unsupported elements_per_node_assumption value from configuration: %s, falling back to 4", configured_elements_per_node_assumption) # pragma: no cover initial_number_of_nodes = 2000 if "initial_number_of_nodes" in options: @@ -937,7 +1641,7 @@ def create_nodespace(self, parent_uid, position, name="", uid=None, options=None try: initial_number_of_nodes = int(configured_initial_number_of_nodes) except: - self.logger.warn("Unsupported initial_number_of_nodes value from configuration: %s, falling back to 2000", configured_initial_number_of_nodes) # pragma: no cover + self.logger.warning("Unsupported initial_number_of_nodes value from configuration: %s, falling back to 2000", configured_initial_number_of_nodes) # pragma: no cover sparse = True if "sparse" in options: @@ -949,7 +1653,7 @@ def create_nodespace(self, parent_uid, position, name="", uid=None, options=None elif configuredsparse == "False": sparse = False else: - self.logger.warn("Unsupported sparse_weight_matrix value from configuration: %s, falling back to True", configuredsparse) # pragma: no cover + self.logger.warning("Unsupported sparse_weight_matrix value from configuration: %s, falling back to True", configuredsparse) # pragma: no cover sparse = True self.last_allocated_partition += 1 @@ -969,9 +1673,6 @@ def create_nodespace(self, parent_uid, position, name="", uid=None, options=None if name is not None and len(name) > 0 and name != uid: self.names[uid] = name - if position is not None: - position = (position + [0] * 3)[:3] - self.positions[uid] = position return uid @@ -1009,7 +1710,7 @@ def get_sensors(self, nodespace=None, datasource=None): sensors[uid] = self.get_node(uid) return sensors - def get_actors(self, nodespace=None, datatarget=None): + def get_actuators(self, nodespace=None, datatarget=None): actuators = {} actuatorlist = [] if datatarget is None: @@ -1021,10 +1722,13 @@ def get_actors(self, nodespace=None, datatarget=None): actuators[uid] = self.get_node(uid) return actuators - def create_link(self, source_node_uid, gate_type, target_node_uid, slot_type, weight=1, certainty=1): - return self.set_link_weight(source_node_uid, gate_type, target_node_uid, slot_type, weight) + def create_link(self, source_node_uid, gate_type, target_node_uid, slot_type, weight=1): + result = self.set_link_weight(source_node_uid, gate_type, target_node_uid, slot_type, weight) + if target_node_uid in self.flow_module_instances: + self.update_flow_graphs() + return result - def set_link_weight(self, source_node_uid, gate_type, target_node_uid, slot_type, weight=1, certainty=1): + def set_link_weight(self, source_node_uid, gate_type, target_node_uid, slot_type, weight=1): source_partition = self.get_partition(source_node_uid) target_partition = self.get_partition(target_node_uid) @@ -1072,30 +1776,57 @@ def set_link_weight(self, source_node_uid, gate_type, target_node_uid, slot_type return True def delete_link(self, source_node_uid, gate_type, target_node_uid, slot_type): - return self.set_link_weight(source_node_uid, gate_type, target_node_uid, slot_type, 0) + result = self.set_link_weight(source_node_uid, gate_type, target_node_uid, slot_type, 0) + if target_node_uid in self.flow_module_instances: + self.update_flow_graphs() + return result def reload_native_modules(self, native_modules): - self.native_module_definitions = native_modules - # check which instances need to be recreated because of gate/slot changes and keep their .data instances_to_recreate = {} instances_to_delete = {} + + # create the new nodetypes + self.native_module_definitions = {} + newnative_modules = {} + native_modules.update(self.generate_worldadapter_flow_types()) + for key, data in native_modules.items(): + if data.get('engine', self.engine) == self.engine: + try: + if data.get('flow_module'): + newnative_modules[key] = FlowNodetype(nodenet=self, **data) + elif data.get('dimensionality'): + newnative_modules[key] = HighdimensionalNodetype(nodenet=self, **data) + else: + newnative_modules[key] = Nodetype(nodenet=self, **data) + self.native_module_definitions[key] = data + except Exception as err: + self.logger.error("Can not instantiate node type %s: %s: %s" % (key, err.__class__.__name__, str(err))) + post_mortem() + for partition in self.partitions.values(): for uid, instance in partition.native_module_instances.items(): - if instance.type not in native_modules: - self.logger.warn("No more definition available for node type %s, deleting instance %s" % - (instance.type, uid)) + if instance.type not in newnative_modules: + self.logger.warning("No more definition available for node type %s, deleting instance %s" % + (instance.type, uid)) instances_to_delete[uid] = instance continue numeric_id = node_from_id(uid) - number_of_elements = len(np.where(partition.allocated_elements_to_nodes == numeric_id)[0]) - new_numer_of_elements = max(len(native_modules[instance.type].get('slottypes', [])), len(native_modules[instance.type].get('gatetypes', []))) - if number_of_elements != new_numer_of_elements: - self.logger.warn("Number of elements changed for node type %s from %d to %d, recreating instance %s" % - (instance.type, number_of_elements, new_numer_of_elements, uid)) - instances_to_recreate[uid] = instance.get_data(complete=True, include_links=False) + if uid in self.flow_module_instances: + if newnative_modules[instance.type].inputs != instance.inputs or newnative_modules[instance.type].outputs != instance.outputs: + self.logger.warning("Inputs or Outputs of flow node type %s changed, recreating instance %s" % + (instance.type, uid)) + instances_to_recreate[uid] = instance.get_data(complete=True, include_links=False) + + else: + number_of_elements = len(np.where(partition.allocated_elements_to_nodes == numeric_id)[0]) + new_number_of_elements = max(len(newnative_modules[instance.type].slottypes), len(newnative_modules[instance.type].gatetypes)) + if number_of_elements != new_number_of_elements: + self.logger.warning("Number of elements changed for node type %s from %d to %d, recreating instance %s" % + (instance.type, number_of_elements, new_number_of_elements, uid)) + instances_to_recreate[uid] = instance.get_data(complete=True, include_links=False) # actually remove the instances for uid in instances_to_delete.keys(): @@ -1103,66 +1834,140 @@ def reload_native_modules(self, native_modules): for uid in instances_to_recreate.keys(): self.delete_node(uid) - # update the node functions of all Nodetypes - self.native_modules = {} - for type, data in native_modules.items(): - self.native_modules[type] = Nodetype(nodenet=self, **native_modules[type]) + self.native_modules = newnative_modules # update the living instances that have the same slot/gate numbers - new_instances = {} - for id, instance in partition.native_module_instances.items(): + new_native_module_instances = {} + for uid, instance in partition.native_module_instances.items(): parameters = instance.clone_parameters() state = instance.clone_state() position = instance.position name = instance.name - partition = self.get_partition(id) - new_native_module_instance = TheanoNode(self, partition, instance.parent_nodespace, id, partition.allocated_nodes[node_from_id(id)]) - new_native_module_instance.position = position - new_native_module_instance.name = name + partition = self.get_partition(uid) + self.close_figures(uid) + if uid in self.flow_module_instances: + flowdata = instance.get_flow_data(complete=True) + new_instance = FlowModule( + self, + partition, + instance.parent_nodespace, + uid, + get_numerical_node_type(instance.type, self.native_modules), + inputmap=flowdata['inputmap'], + outputmap=flowdata['outputmap'], + parameters=parameters + ) + self.flow_module_instances[uid] = new_instance + else: + new_instance = TheanoNode(self, partition, instance.parent_nodespace, uid, partition.allocated_nodes[node_from_id(uid)]) + new_native_module_instances[uid] = new_instance + new_instance.position = position + new_instance.name = name for key, value in parameters.items(): - new_native_module_instance.set_parameter(key, value) + try: + new_instance.set_parameter(key, value) + except NameError: + pass # parameter not defined anymore for key, value in state.items(): - new_native_module_instance.set_state(key, value) - new_instances[id] = new_native_module_instance - partition.native_module_instances = new_instances + new_instance.set_state(key, value) - # recreate the deleted ones. Gate configurations and links will not be transferred. - for uid, data in instances_to_recreate.items(): - new_uid = self.create_node( - data['type'], - data['parent_nodespace'], - data['position'], - name=data['name'], - uid=uid, - parameters=data['parameters']) + partition.native_module_instances = new_native_module_instances + + self.update_numeric_native_module_types() + + # recreate the deleted ones. Gate configurations and links will not be transferred. + for uid, data in instances_to_recreate.items(): + new_uid = self.create_node( + data['type'], + data['parent_nodespace'], + data['position'], + name=data['name'], + uid=uid, + parameters=data['parameters']) + + for new_uid in nx.topological_sort(self.flowgraph): + if new_uid in instances_to_recreate: + self.get_node(new_uid).ensure_initialized() - # update native modules numeric types, as these may have been set with a different native module - # node types list + # recompile flow_graphs: + self.update_flow_graphs() + + def update_numeric_native_module_types(self): + """ + update native modules numeric types if the types have been updated + either due to reload_native_modules, or due to changing the worldadapter + """ + for key, partition in self.partitions.items(): native_module_ids = np.where(partition.allocated_nodes > MAX_STD_NODETYPE)[0] for id in native_module_ids: instance = self.get_node(node_to_id(id, partition.pid)) partition.allocated_nodes[id] = get_numerical_node_type(instance.type, self.native_modules) - def get_nodespace_data(self, nodespace_uid, include_links=True): - partition = self.get_partition(nodespace_uid) - data = { - 'nodes': self.construct_nodes_dict(nodespace_uid, 1000, include_links=include_links), - 'nodespaces': self.construct_nodespaces_dict(nodespace_uid), - 'monitors': self.construct_monitors_dict(), - 'modulators': self.construct_modulators_dict() - } - if include_links: - followupnodes = [] - for uid in data['nodes']: - followupnodes.extend(self.get_node(uid).get_associated_node_uids()) + def generate_worldadapter_flow_types(self, delete_existing=False): + """ returns native_module_definitions for datasources and targets from the configured worldadapter""" + + auto_nodetypes = [] + if delete_existing: + for key in list(self.native_modules.keys()): + if type(self.native_modules[key]) == FlowNodetype and self.native_modules[key].is_autogenerated: + auto_nodetypes.append(key) + + for uid in list(self.flow_module_instances.keys()): + if self.flow_module_instances[uid].type in auto_nodetypes: + self.delete_node(uid) - for uid in followupnodes: - followup_partition = self.get_partition(uid) - if followup_partition.pid != partition.pid or (partition.allocated_node_parents[node_from_id(uid)] != nodespace_from_id(nodespace_uid)): - data['nodes'][uid] = self.get_node(uid).get_data(complete=False, include_links=include_links) + for key in auto_nodetypes: + del self.native_modules[key] + del self.native_module_definitions[key] + self.worldadapter_flow_nodes = {} + + data = {} + if self.worldadapter_instance and self.worldadapter_instance.generate_flow_modules: + if self.worldadapter_instance.get_available_flow_datasources(): + data['datasources'] = { + 'flow_module': True, + 'implementation': 'python', + 'name': 'datasources', + 'outputs': self.worldadapter_instance.get_available_flow_datasources(), + 'inputs': [], + 'is_autogenerated': True + } + if self.worldadapter_instance.get_available_flow_datatargets(): + dtgroups = self.worldadapter_instance.get_available_flow_datatargets() + dtdims = [self.worldadapter_instance.get_flow_datatarget(name).ndim for name in dtgroups] + data['datatargets'] = { + 'flow_module': True, + 'implementation': 'python', + 'name': 'datatargets', + 'inputs': dtgroups, + 'outputs': [], + 'inputdims': dtdims, + 'is_autogenerated': True + } return data + def generate_worldadapter_flow_instances(self): + """ Generates flow module instances for the existing autogenerated worldadapter-flowmodule-types """ + for idx, key in enumerate(['datasources', 'datatargets']): + if key in self.native_module_definitions: + uid = self.worldadapter_flow_nodes.get(key) + if uid and uid in self.flow_module_instances: + node = self.flow_module_instances[uid] + for out in node.outputmap: + if out not in self.native_module_definitions[key]['outputs']: + for target_uid, name in node.outputmap[out].copy(): + self.unflow('worldadapter', out, target_uid, name) + for _in in node.inputmap: + if _in not in self.native_module_definitions[key]['inputs']: + for source_uid, name in node.inputmap[_in]: + self.unflow(source_uid, name, 'worldadapter', _in) + numerictype = get_numerical_node_type(key, self.native_modules) + self.flow_module_instances[uid] = FlowModule(self, node._partition, self.rootpartition.rootnodespace_uid, node.uid, numerictype, node.parameters, node.inputmap, node.outputmap) + else: + uid = self.create_node(key, None, [(idx + 2) * 100, 100], name=key) + self.worldadapter_flow_nodes[key] = uid + def get_activation_data(self, nodespace_uids=[], rounded=1): if rounded is not None: mult = math.pow(10, rounded) @@ -1174,9 +1979,13 @@ def get_activation_data(self, nodespace_uids=[], rounded=1): elements = get_elements_per_type(partition.allocated_nodes[id], self.native_modules) offset = partition.allocated_node_offsets[id] if rounded is None: - activations[node_to_id(id, partition.pid)] = [n.item() for n in partition.a.get_value()[offset:offset+elements]] + act = [n.item() for n in partition.a.get_value()[offset:offset+elements]] + if set(act) != {0}: + activations[node_to_id(id, partition.pid)] = act else: - activations[node_to_id(id, partition.pid)] = [n.item() / mult for n in np.rint(partition.a.get_value()[offset:offset+elements]*mult)] + act = [n.item() / mult for n in np.rint(partition.a.get_value()[offset:offset+elements]*mult)] + if set(act) != {0}: + activations[node_to_id(id, partition.pid)] = act else: for nsuid in nodespace_uids: nodespace = self.get_nodespace(nsuid) @@ -1187,14 +1996,18 @@ def get_activation_data(self, nodespace_uids=[], rounded=1): elements = get_elements_per_type(partition.allocated_nodes[id], self.native_modules) offset = partition.allocated_node_offsets[id] if rounded is None: - activations[node_to_id(id, partition.pid)] = [n.item() for n in partition.a.get_value()[offset:offset+elements]] + act = [n.item() for n in partition.a.get_value()[offset:offset+elements]] + if set(act) != {0}: + activations[node_to_id(id, partition.pid)] = act else: - activations[node_to_id(id, partition.pid)] = [n.item() / mult for n in np.rint(partition.a.get_value()[offset:offset+elements]*mult)] + act = [n.item() / mult for n in np.rint(partition.a.get_value()[offset:offset+elements]*mult)] + if set(act) != {0}: + activations[node_to_id(id, partition.pid)] = act return activations def get_nodetype(self, type): - if type in self._nodetypes: - return self._nodetypes[type] + if type in self.nodetypes: + return self.nodetypes[type] else: return self.native_modules.get(type) @@ -1206,36 +2019,39 @@ def construct_links_list(self, nodespace_uid=None): nspartition = self.get_partition(nodespace_uid) if nspartition != partition: continue - parent = nodespace_from_id(nodespace_uid) - node_ids = np.where(partition.allocated_node_parents == parent)[0] - else: - node_ids = np.nonzero(partition.allocated_nodes)[0] + w_matrix = partition.w.get_value(borrow=True) - for node_id in node_ids: - - source_type = partition.allocated_nodes[node_id] - for gate_type in range(get_gates_per_type(source_type, self.native_modules)): - gatecolumn = w_matrix[:, partition.allocated_node_offsets[node_id] + gate_type] - links_indices = np.nonzero(gatecolumn)[0] - for index in links_indices: - target_id = partition.allocated_elements_to_nodes[index] - target_type = partition.allocated_nodes[target_id] - target_slot_numerical = index - partition.allocated_node_offsets[target_id] - target_slot_type = get_string_slot_type(target_slot_numerical, self.get_nodetype(get_string_node_type(target_type, self.native_modules))) - source_gate_type = get_string_gate_type(gate_type, self.get_nodetype(get_string_node_type(source_type, self.native_modules))) - if partition.sparse: # sparse matrices return matrices of dimension (1,1) as values - weight = float(gatecolumn[index].data) - else: - weight = gatecolumn[index].item() - - data.append({ - "weight": weight, - "certainty": 1, - "target_slot_name": target_slot_type, - "target_node_uid": node_to_id(target_id, partition.pid), - "source_gate_name": source_gate_type, - "source_node_uid": node_to_id(node_id, partition.pid) - }) + link_to_indices, link_from_indices = np.nonzero(w_matrix) + + for i, link_from_index in enumerate(link_from_indices): + link_to_index = link_to_indices[i] + + source_id = partition.allocated_elements_to_nodes[link_from_index] + source_type = partition.allocated_nodes[source_id] + + if nodespace_uid is not None: + nid = nodespace_from_id(nodespace_uid) + if partition.allocated_node_parents[source_id] != nid: + continue + + target_id = partition.allocated_elements_to_nodes[link_to_index] + target_type = partition.allocated_nodes[target_id] + + target_slot_numerical = link_to_index - partition.allocated_node_offsets[target_id] + target_slot_type = get_string_slot_type(target_slot_numerical, self.get_nodetype(get_string_node_type(target_type, self.native_modules))) + + source_gate_numerical = link_from_index - partition.allocated_node_offsets[source_id] + source_gate_type = get_string_gate_type(source_gate_numerical, self.get_nodetype(get_string_node_type(source_type, self.native_modules))) + + weight = w_matrix[link_to_index, link_from_index].item() + + data.append({ + "weight": weight, + "target_slot_name": target_slot_type, + "target_node_uid": node_to_id(target_id, partition.pid), + "source_gate_name": source_gate_type, + "source_node_uid": node_to_id(source_id, partition.pid) + }) # find links going out to other partitions for partition_to_spid, to_partition in self.partitions.items(): @@ -1243,12 +2059,34 @@ def construct_links_list(self, nodespace_uid=None): inlinks = to_partition.inlinks[partition.spid] from_elements = inlinks[0].get_value(borrow=True) to_elements = inlinks[1].get_value(borrow=True) - weights = inlinks[2].get_value(borrow=True) - for i, element in enumerate(to_elements): - slotrow = weights[i] - links_indices = np.nonzero(slotrow)[0] - for link_index in links_indices: - source_id = partition.allocated_elements_to_nodes[from_elements[link_index]] + + inlink_type = inlinks[4] + if inlink_type == "dense": + weights = inlinks[2].get_value(borrow=True) + for i, element in enumerate(to_elements): + slotrow = weights[i] + links_indices = np.nonzero(slotrow)[0] + for link_index in links_indices: + source_id = partition.allocated_elements_to_nodes[from_elements[link_index]] + source_type = partition.allocated_nodes[source_id] + source_gate_numerical = from_elements[link_index] - partition.allocated_node_offsets[source_id] + source_gate_type = get_string_gate_type(source_gate_numerical, self.get_nodetype(get_string_node_type(source_type, self.native_modules))) + + target_id = to_partition.allocated_elements_to_nodes[element] + target_type = to_partition.allocated_nodes[target_id] + target_slot_numerical = element - to_partition.allocated_node_offsets[target_id] + target_slot_type = get_string_slot_type(target_slot_numerical, self.get_nodetype(get_string_node_type(target_type, self.native_modules))) + + data.append({ + "weight": float(weights[i, link_index]), + "target_slot_name": target_slot_type, + "target_node_uid": node_to_id(target_id, to_partition.pid), + "source_gate_name": source_gate_type, + "source_node_uid": node_to_id(source_id, partition.pid) + }) + elif inlink_type == "identity": + for i, element in enumerate(to_elements): + source_id = partition.allocated_elements_to_nodes[from_elements[i]] source_type = partition.allocated_nodes[source_id] source_gate_numerical = from_elements[link_index] - partition.allocated_node_offsets[source_id] source_gate_type = get_string_gate_type(source_gate_numerical, self.get_nodetype(get_string_node_type(source_type, self.native_modules))) @@ -1259,8 +2097,7 @@ def construct_links_list(self, nodespace_uid=None): target_slot_type = get_string_slot_type(target_slot_numerical, self.get_nodetype(get_string_node_type(target_type, self.native_modules))) data.append({ - "weight": float(weights[i, link_index]), - "certainty": 1, + "weight": 1., "target_slot_name": target_slot_type, "target_node_uid": node_to_id(target_id, to_partition.pid), "source_gate_name": source_gate_type, @@ -1278,11 +2115,22 @@ def construct_native_modules_and_comments_dict(self): i += 1 node_uid = node_to_id(node_id, partition.pid) data[node_uid] = self.get_node(node_uid).get_data(complete=True) + if node_uid in self.flow_module_instances: + data[node_uid].update(self.flow_module_instances[node_uid].get_flow_data()) return data - def construct_nodes_dict(self, nodespace_uid=None, max_nodes=-1, complete=False, include_links=True): - data = {} + def construct_native_modules_numpy_state_dict(self): + numpy_states = {} i = 0 + for partition in self.partitions.values(): + nodeids = np.where((partition.allocated_nodes > MAX_STD_NODETYPE) | (partition.allocated_nodes == COMMENT))[0] + for node_id in nodeids: + node_uid = node_to_id(node_id, partition.pid) + numpy_states[node_uid] = self.get_node(node_uid).get_persistable_state()[1] + return numpy_states + + def construct_nodes_dict(self, nodespace_uid=None, complete=False, include_links=True): + data = {} for partition in self.partitions.values(): if nodespace_uid is not None: nodespace_partition = self.get_partition(nodespace_uid) @@ -1294,11 +2142,8 @@ def construct_nodes_dict(self, nodespace_uid=None, max_nodes=-1, complete=False, parent_id = nodespace_from_id(nodespace_uid) nodeids = np.where(partition.allocated_node_parents == parent_id)[0] for node_id in nodeids: - i += 1 node_uid = node_to_id(node_id, partition.pid) data[node_uid] = self.get_node(node_uid).get_data(complete=complete, include_links=include_links) - if max_nodes > 0 and i > max_nodes: - break return data def construct_nodespaces_dict(self, nodespace_uid, transitive=False): @@ -1368,19 +2213,22 @@ def set_sensors_and_actuator_feedback_values(self): for partition in self.partitions.values(): a_array = partition.a.get_value(borrow=True) - a_array[partition.sensor_indices] = sensor_values - a_array[partition.actuator_indices] = actuator_feedback_values + valid = np.where(partition.sensor_indices >= 0)[0] + a_array[partition.sensor_indices[valid]] = sensor_values[valid] + valid = np.where(partition.actuator_indices >= 0)[0] + a_array[partition.actuator_indices[valid]] = actuator_feedback_values[valid] partition.a.set_value(a_array, borrow=True) def set_actuator_values(self): """ Writes the values from the actuators to datatargets and modulators """ - actuator_values_to_write = np.zeros_like(self.rootpartition.actuator_indices) + actuator_values_to_write = np.zeros(self.rootpartition.actuator_indices.shape) for partition in self.partitions.values(): a_array = partition.a.get_value(borrow=True) - actuator_values_to_write = actuator_values_to_write + a_array[partition.actuator_indices] - if self.use_modulators and bool(self.actuatormap): + valid = np.where(partition.actuator_indices >= 0) + actuator_values_to_write[valid] += a_array[partition.actuator_indices[valid]] + if self.use_modulators: writeables = sorted(DoernerianEmotionalModulators.writeable_modulators) # remove modulators from actuator values modulator_values = actuator_values_to_write[-len(writeables):] @@ -1389,19 +2237,21 @@ def set_actuator_values(self): if key in self.actuatormap: self.set_modulator(key, modulator_values[idx]) if self._worldadapter_instance: - self._worldadapter_instance.set_datatarget_values(actuator_values_to_write) + self._worldadapter_instance.add_datatarget_values(actuator_values_to_write) - def _rebuild_sensor_actor_indices(self, partition=None): + def _rebuild_sensor_actuator_indices(self, partition=None): """ - Rebuilds the actor and sensor indices of the given partition or all partitions if None + Rebuilds the actuator and sensor indices of the given partition or all partitions if None """ if partition is not None: partitions = [partition] else: partitions = self.partitions.values() for partition in partitions: - partition.sensor_indices = np.zeros(len(self.get_datasources()), np.int32) - partition.actuator_indices = np.zeros(len(self.get_datatargets()), np.int32) + partition.sensor_indices = np.empty(len(self.get_datasources()), np.int32) + partition.sensor_indices.fill(-1) + partition.actuator_indices = np.empty(len(self.get_datatargets()), np.int32) + partition.actuator_indices.fill(-1) for datatarget, node_id in self.actuatormap.items(): if not isinstance(node_id, str): node_id = node_id[0] @@ -1417,7 +2267,7 @@ def _rebuild_sensor_actor_indices(self, partition=None): def get_datasources(self): """ Returns a sorted list of available datasources, including worldadapter datasources and readable modulators""" - datasources = self.worldadapter_instance.get_available_datasources() if self.worldadapter_instance else [] + datasources = list(self.worldadapter_instance.get_available_datasources()) if self.worldadapter_instance else [] if self.use_modulators: for item in sorted(DoernerianEmotionalModulators.readable_modulators): datasources.append(item) @@ -1426,7 +2276,7 @@ def get_datasources(self): def get_datatargets(self): """ Returns a sorted list of available datatargets, including worldadapter datatargets and writeable modulators""" - datatargets = self.worldadapter_instance.get_available_datatargets() if self.worldadapter_instance else [] + datatargets = list(self.worldadapter_instance.get_available_datatargets()) if self.worldadapter_instance else [] if self.use_modulators: for item in sorted(DoernerianEmotionalModulators.writeable_modulators): datatargets.append(item) @@ -1462,24 +2312,16 @@ def group_nodes_by_ids(self, nodespace_uid, node_uids, group_name, gatetype="gen partition.group_nodes_by_ids(nodespace_uid, ids, group_name, gatetype) + def group_highdimensional_elements(self, node_uid, gate=None, slot=None, group_name=None): + partition = self.get_partition(node_uid) + partition.group_highdimensional_elements(node_uid, gate=gate, slot=slot, group_name=group_name) + def ungroup_nodes(self, nodespace_uid, group): if nodespace_uid is None: nodespace_uid = self.get_nodespace(None).uid partition = self.get_partition(nodespace_uid) partition.ungroup_nodes(nodespace_uid, group) - def dump_group(self, nodespace_uid, group): - if nodespace_uid is None: - nodespace_uid = self.get_nodespace(None).uid - partition = self.get_partition(nodespace_uid) - - ids = partition.nodegroups[nodespace_uid][group] - for element in ids: - nid = partition.allocated_elements_to_nodes[element] - uid = node_to_id(nid, partition.pid) - node = self.get_node(uid) - print("%s %s" % (node.uid, node.name)) - def get_activations(self, nodespace_uid, group): if nodespace_uid is None: nodespace_uid = self.get_nodespace(None).uid @@ -1492,17 +2334,17 @@ def set_activations(self, nodespace_uid, group, new_activations): partition = self.get_partition(nodespace_uid) partition.set_activations(nodespace_uid, group, new_activations) - def get_thetas(self, nodespace_uid, group): + def get_gate_configurations(self, nodespace_uid, group, gatefunction_parameter=None): if nodespace_uid is None: nodespace_uid = self.get_nodespace(None).uid partition = self.get_partition(nodespace_uid) - return partition.get_thetas(nodespace_uid, group) + return partition.get_gate_configurations(nodespace_uid, group, gatefunction_parameter) - def set_thetas(self, nodespace_uid, group, new_thetas): + def set_gate_configurations(self, nodespace_uid, group, gatefunction, gatefunction_parameter=None, parameter_values=None): if nodespace_uid is None: nodespace_uid = self.get_nodespace(None).uid partition = self.get_partition(nodespace_uid) - partition.set_thetas(nodespace_uid, group, new_thetas) + partition.set_gate_configurations(nodespace_uid, group, gatefunction, gatefunction_parameter, parameter_values) def get_link_weights(self, nodespace_from_uid, group_from, nodespace_to_uid, group_to): if nodespace_from_uid is None: @@ -1518,10 +2360,26 @@ def get_link_weights(self, nodespace_from_uid, group_from, nodespace_to_uid, gro if nodespace_to_uid not in partition_to.nodegroups or group_to not in partition_to.nodegroups[nodespace_to_uid]: raise ValueError("Group %s does not exist in nodespace %s." % (group_to, nodespace_to_uid)) + from_els = partition_from.nodegroups[nodespace_from_uid][group_from] + to_els = partition_to.nodegroups[nodespace_to_uid][group_to] + zero_weights = np.zeros((len(to_els), len(from_els))) + + if partition_from.spid not in partition_to.inlinks: + return zero_weights + inlinks = partition_to.inlinks[partition_from.spid] - indices_from = np.searchsorted(inlinks[0].get_value(borrow=True), partition_from.nodegroups[nodespace_from_uid][group_from]) - indices_to = np.searchsorted(inlinks[1].get_value(borrow=True), partition_to.nodegroups[nodespace_to_uid][group_to]) - cols, rows = np.meshgrid(indices_from, indices_to) + from_indices = inlinks[0].get_value(borrow=True) + to_indices = inlinks[1].get_value(borrow=True) + + if len(np.union1d(from_indices, from_els)) > len(from_indices) or len(np.union1d(to_indices, to_els)) > len(to_indices): + self.set_link_weights(nodespace_from_uid, group_from, nodespace_to_uid, group_to, zero_weights) + inlinks = partition_to.inlinks[partition_from.spid] + from_indices = inlinks[0].get_value(borrow=True) + to_indices = inlinks[1].get_value(borrow=True) + + search_from = np.searchsorted(from_indices, from_els) + search_to = np.searchsorted(to_indices, to_els) + cols, rows = np.meshgrid(search_from, search_to) return inlinks[2].get_value(borrow=True)[rows, cols] else: return partition_from.get_link_weights(nodespace_from_uid, group_from, nodespace_to_uid, group_to) @@ -1541,8 +2399,8 @@ def set_link_weights(self, nodespace_from_uid, group_from, nodespace_to_uid, gro if nodespace_to_uid not in partition_to.nodegroups or group_to not in partition_to.nodegroups[nodespace_to_uid]: raise ValueError("Group %s does not exist in nodespace %s." % (group_to, nodespace_to_uid)) - elements_from_indices = partition_from.nodegroups[nodespace_from_uid][group_from] - elements_to_indices = partition_to.nodegroups[nodespace_to_uid][group_to] + elements_from_indices = np.array(partition_from.nodegroups[nodespace_from_uid][group_from], dtype='int32') + elements_to_indices = np.array(partition_to.nodegroups[nodespace_to_uid][group_to], dtype='int32') partition_to.set_inlink_weights(partition_from.spid, elements_from_indices, elements_to_indices, new_w) else: @@ -1550,15 +2408,21 @@ def set_link_weights(self, nodespace_from_uid, group_from, nodespace_to_uid, gro self.proxycache.clear() - # uids_to_invalidate = self.get_node_uids(nodespace_from_uid, group_from) - # uids_to_invalidate.extend(self.get_node_uids(nodespace_to_uid, group_to)) - - # for uid in uids_to_invalidate: - # if uid in self.proxycache: - # del self.proxycache[uid] - def get_available_gatefunctions(self): - return ["identity", "absolute", "sigmoid", "tanh", "rect", "one_over_x"] + return { + "identity": {}, + "absolute": {}, + "sigmoid": {'bias': 0}, + "elu": {'bias': 0}, + "relu": {'bias': 0}, + "one_over_x": {}, + "threshold": { + "minimum": 0, + "maximum": 1, + "amplification": 1, + "threshold": 0 + } + } def add_slot_monitor(self, node_uid, slot, **_): raise RuntimeError("Theano engine does not support slot monitors") @@ -1574,7 +2438,7 @@ def has_nodespace_changes(self, nodespace_uids=[], since_step=0): return True return False - def get_nodespace_changes(self, nodespace_uids=[], since_step=0): + def get_nodespace_changes(self, nodespace_uids=[], since_step=0, include_links=True): result = { 'nodes_dirty': {}, 'nodespaces_dirty': {}, @@ -1585,6 +2449,14 @@ def get_nodespace_changes(self, nodespace_uids=[], since_step=0): if nodespace_uids == []: nodespace_uids = self.get_nodespace_uids() + nodespaces_by_partition = {} + for nodespace_uid in nodespace_uids: + spid = self.get_partition(nodespace_uid).spid + if spid not in nodespaces_by_partition: + nodespaces_by_partition[spid] = [] + nodespace_uid = self.get_nodespace(nodespace_uid).uid # b/c of None == Root + nodespaces_by_partition[spid].append(nodespace_from_id(nodespace_uid)) + for nsuid in nodespace_uids: nodespace = self.get_nodespace(nsuid) partition = self.get_partition(nodespace.uid) @@ -1593,14 +2465,31 @@ def get_nodespace_changes(self, nodespace_uids=[], since_step=0): result['nodespaces_deleted'].extend(self.deleted_items[i].get('nodespaces_deleted', [])) result['nodes_deleted'].extend(self.deleted_items[i].get('nodes_deleted', [])) changed_nodes, changed_nodespaces = partition.get_nodespace_changes(nodespace.uid, since_step) - for uid in changed_nodes: - uid = node_to_id(uid, partition.pid) - result['nodes_dirty'][uid] = self.get_node(uid).get_data(include_links=True) + nodes, _, _ = partition.get_node_data(ids=changed_nodes, nodespaces_by_partition=nodespaces_by_partition, include_links=include_links) + result['nodes_dirty'].update(nodes) for uid in changed_nodespaces: uid = nodespace_to_id(uid, partition.pid) result['nodespaces_dirty'][uid] = self.get_nodespace(uid).get_data() return result + def add_gate_activation_recorder(self, group_definition, name, interval=1): + """ Adds an activation recorder to a group of nodes.""" + rec = recorder.GateActivationRecorder(self, group_definition, name, interval=interval) + self._recorders[rec.uid] = rec + return rec + + def add_node_activation_recorder(self, group_definition, name, interval=1): + """ Adds an activation recorder to a group of nodes.""" + rec = recorder.NodeActivationRecorder(self, group_definition, name, interval=interval) + self._recorders[rec.uid] = rec + return rec + + def add_linkweight_recorder(self, from_group_definition, to_group_definition, name, interval=1): + """ Adds a linkweight recorder to links between to groups.""" + rec = recorder.LinkweightRecorder(self, from_group_definition, to_group_definition, name, interval=interval) + self._recorders[rec.uid] = rec + return rec + def get_dashboard(self): data = super(TheanoNodenet, self).get_dashboard() data['count_nodes'] = 0 diff --git a/micropsi_core/nodenet/theano_engine/theano_nodespace.py b/micropsi_core/nodenet/theano_engine/theano_nodespace.py index 5330bd70..cb671b42 100644 --- a/micropsi_core/nodenet/theano_engine/theano_nodespace.py +++ b/micropsi_core/nodenet/theano_engine/theano_nodespace.py @@ -22,19 +22,6 @@ def index(self): def index(self, index): raise NotImplementedError("index can not be set in theano_engine") - @property - def position(self): - return self._nodenet.positions.get(self.uid, [10, 10, 0]) - - @position.setter - def position(self, position): - if position is None and self.uid in self._nodenet.positions: - del self._nodenet.positions[self.uid] - else: - position = list(position) - position = (position + [0] * 3)[:3] - self._nodenet.positions[self.uid] = position - @property def name(self): return self._nodenet.names.get(self.uid, self.uid) diff --git a/micropsi_core/nodenet/theano_engine/theano_partition.py b/micropsi_core/nodenet/theano_engine/theano_partition.py index a7adbac0..a39e0500 100644 --- a/micropsi_core/nodenet/theano_engine/theano_partition.py +++ b/micropsi_core/nodenet/theano_engine/theano_partition.py @@ -1,5 +1,6 @@ +import io import os import theano @@ -9,6 +10,7 @@ import theano.sparse as ST from theano.tensor import nnet as N +from micropsi_core.nodenet.node import FlowNodetype, HighdimensionalNodetype from micropsi_core.nodenet.theano_engine.theano_definitions import * @@ -91,24 +93,14 @@ def has_gatefunction_sigmoid(self, value): self.__has_gatefunction_sigmoid = value @property - def has_gatefunction_tanh(self): - return self.__has_gatefunction_tanh + def has_gatefunction_relu(self): + return self.__has_gatefunction_relu - @has_gatefunction_tanh.setter - def has_gatefunction_tanh(self, value): - if value != self.__has_gatefunction_tanh: + @has_gatefunction_relu.setter + def has_gatefunction_relu(self, value): + if value != self.__has_gatefunction_relu: self.__has_new_usages = True - self.__has_gatefunction_tanh = value - - @property - def has_gatefunction_rect(self): - return self.__has_gatefunction_rect - - @has_gatefunction_rect.setter - def has_gatefunction_rect(self, value): - if value != self.__has_gatefunction_rect: - self.__has_new_usages = True - self.__has_gatefunction_rect = value + self.__has_gatefunction_relu = value @property def has_gatefunction_one_over_x(self): @@ -120,6 +112,26 @@ def has_gatefunction_one_over_x(self, value): self.__has_new_usages = True self.__has_gatefunction_one_over_x = value + @property + def has_gatefunction_elu(self): + return self.__has_gatefunction_elu + + @has_gatefunction_elu.setter + def has_gatefunction_elu(self, value): + if value != self.__has_gatefunction_elu: + self.__has_new_usages = True + self.__has_gatefunction_elu = value + + @property + def has_gatefunction_threshold(self): + return self.__has_gatefunction_threshold + + @has_gatefunction_threshold.setter + def has_gatefunction_threshold(self, value): + if value != self.__has_gatefunction_threshold: + self.__has_new_usages = True + self.__has_gatefunction_threshold = value + def __init__(self, nodenet, pid, sparse=True, initial_number_of_nodes=2000, average_elements_per_node_assumption=5, initial_number_of_nodespaces=10): # logger used by this partition @@ -199,14 +211,15 @@ def __init__(self, nodenet, pid, sparse=True, initial_number_of_nodes=2000, aver self.a_prev = None # vector of output activations at t-1 (not all gate types maintain this) self.g_factor = None # vector of gate factors, controlled by activators, semantics differ by node type - self.g_threshold = None # vector of thresholds (gate parameters) + + # gatefunction parameters + self.g_bias = None # vector of biases + self.g_threshold = None # vector of thresholds self.g_amplification = None # vector of amplification factors self.g_min = None # vector of lower bounds self.g_max = None # vector of upper bounds - self.g_function_selector = None # vector of gate function selectors - - self.g_theta = None # vector of thetas (i.e. biases, use depending on gate function) + self.g_function_selector = None # vector of gate function selectors self.g_expect = None # vector of expectations self.g_countdown = None # vector of number of steps until expectation needs to be met @@ -237,8 +250,8 @@ def __init__(self, nodenet, pid, sparse=True, initial_number_of_nodes=2000, aver self.allocated_elements_to_activators = np.zeros(self.NoE, dtype=np.int32) - self.sensor_indices = np.zeros(0, dtype=np.int32) # index := datasource, value:=node_id - self.actuator_indices = np.zeros(0, dtype=np.int32) # index := datatarget, value:=node_id + self.sensor_indices = np.zeros(0, dtype=np.int32) # index := datasource, value:=element index + self.actuator_indices = np.zeros(0, dtype=np.int32) # index := datatarget, value:=element index self.inlinks = {} @@ -265,11 +278,11 @@ def __init__(self, nodenet, pid, sparse=True, initial_number_of_nodes=2000, aver a_prev_array = np.zeros(self.NoE, dtype=nodenet.numpyfloatX) self.a_prev = theano.shared(value=a_prev_array.astype(T.config.floatX), name="a_prev", borrow=True) - g_theta_array = np.zeros(self.NoE, dtype=nodenet.numpyfloatX) - self.g_theta = theano.shared(value=g_theta_array.astype(T.config.floatX), name="theta", borrow=True) + g_bias_array = np.zeros(self.NoE, dtype=nodenet.numpyfloatX) + self.g_bias = theano.shared(value=g_bias_array.astype(T.config.floatX), name="bias", borrow=True) - g_theta_shifted_matrix = np.lib.stride_tricks.as_strided(g_theta_array, shape=(self.NoE, 7), strides=(nodenet.byte_per_float, nodenet.byte_per_float)) - self.g_theta_shifted = theano.shared(value=g_theta_shifted_matrix.astype(T.config.floatX), name="g_theta_shifted_shifted", borrow=True) + g_bias_shifted_matrix = np.lib.stride_tricks.as_strided(g_bias_array, shape=(self.NoE, 7), strides=(nodenet.byte_per_float, nodenet.byte_per_float)) + self.g_bias_shifted = theano.shared(value=g_bias_shifted_matrix.astype(T.config.floatX), name="g_bias_shifted_shifted", borrow=True) g_factor_array = np.ones(self.NoE, dtype=nodenet.numpyfloatX) self.g_factor = theano.shared(value=g_factor_array.astype(T.config.floatX), name="g_factor", borrow=True) @@ -315,8 +328,10 @@ def __init__(self, nodenet, pid, sparse=True, initial_number_of_nodes=2000, aver self.__has_gatefunction_absolute = False self.__has_gatefunction_sigmoid = False self.__has_gatefunction_tanh = False - self.__has_gatefunction_rect = False self.__has_gatefunction_one_over_x = False + self.__has_gatefunction_elu = False + self.__has_gatefunction_relu = False + self.__has_gatefunction_threshold = False self.por_ret_dirty = True self.last_allocated_node = 0 @@ -335,7 +350,7 @@ def compile_propagate(self): def compile_calculate_nodes(self): slots = self.a_shifted - biases = self.g_theta_shifted + biases = self.g_bias_shifted countdown = self.g_countdown por_linked = self.n_node_porlinked ret_linked = self.n_node_retlinked @@ -363,11 +378,10 @@ def compile_calculate_nodes(self): # ### gen plumbing - pipe_gen_sur_exp = slots[:, 11] + slots[:, 13] # sum of sur and exp as default + pipe_gen_sur_exp = (slots[:, 11] + slots[:, 13]) * slots[:, 10] # sum of sur and exp as default # drop to 0 if < expectation pipe_gen_sur_exp = T.switch(T.lt(pipe_gen_sur_exp, self.g_expect) * T.gt(pipe_gen_sur_exp, 0), 0, pipe_gen_sur_exp) - pipe_gen = slots[:, 7] * slots[:, 10] # gen * sub pipe_gen = T.switch(abs(pipe_gen) > 0.1, pipe_gen, pipe_gen_sur_exp) # drop to def. if below 0.1 # drop to def. if por == 0 and por slot is linked @@ -412,11 +426,12 @@ def compile_calculate_nodes(self): countdown_sur = T.switch(cd_reset_cond, self.g_wait, T.maximum(countdown - 1, -1)) pipe_sur_cond = T.eq(por_linked, 0) + T.gt(slots[:, 4], 0) # not por-linked or por > 0 + pipe_sur_cond *= slots[:, 6] # and sub > 0 pipe_sur_cond = T.gt(pipe_sur_cond, 0) pipe_sur = slots[:, 7] # start with sur pipe_sur = pipe_sur + T.gt(slots[:, 3], 0.2) # add gen-loop 1 - pipe_sur = pipe_sur + (slots[:, 9] * slots[:, 6]) # add exp * sub + pipe_sur = pipe_sur + slots[:, 9] # add exp # drop to zero if < expectation pipe_sur = T.switch(T.lt(pipe_sur, self.g_expect) * T.gt(pipe_sur, 0), 0, pipe_sur) # check if we're in timeout @@ -431,12 +446,11 @@ def compile_calculate_nodes(self): pipe_cat_cond = T.switch(T.eq(por_linked, 1), T.gt(slots[:, 3], 0), 1) # (if linked, por must be > 0) pipe_cat_cond = pipe_cat_cond * T.eq(slots[:, 2], 0) # and (gen == 0) - pipe_cat = T.clip(slots[:, 6], 0, 1) # bubble: start with sur if sur > 0 - pipe_cat = pipe_cat + slots[:, 5] # add sub + pipe_cat = slots[:, 5] # start with sub pipe_cat = pipe_cat + slots[:, 7] # add cat pipe_cat = pipe_cat * pipe_cat_cond # apply conditions # add cat (for search) if sub=sur=0 - pipe_cat = pipe_cat + (slots[:, 7] * T.eq(slots[:, 5], 0) * T.eq(slots[:, 6], 0)) + pipe_cat = pipe_cat + (slots[:, 7] * T.eq(slots[:, 5], 0) * T.eq(slots[:, 6], 0) * T.eq(pipe_cat, 0)) ### exp plumbing pipe_exp = slots[:, 5] # start with sur @@ -537,28 +551,37 @@ def compile_calculate_nodes(self): gate_function_output = T.switch(T.eq(self.g_function_selector, GATE_FUNCTION_ABSOLUTE), abs(gate_function_output), gate_function_output) # apply GATE_FUNCTION_SIGMOID to masked gates if self.has_gatefunction_sigmoid: - gate_function_output = T.switch(T.eq(self.g_function_selector, GATE_FUNCTION_SIGMOID), N.sigmoid(gate_function_output + self.g_theta), gate_function_output) - # apply GATE_FUNCTION_TANH to masked gates - if self.has_gatefunction_tanh: - gate_function_output = T.switch(T.eq(self.g_function_selector, GATE_FUNCTION_TANH), T.tanh(gate_function_output + self.g_theta), gate_function_output) - # apply GATE_FUNCTION_RECT to masked gates - if self.has_gatefunction_rect: - gate_function_output = T.switch(T.eq(self.g_function_selector, GATE_FUNCTION_RECT), T.switch(gate_function_output + self.g_theta > 0, gate_function_output - self.g_theta, 0), gate_function_output) + x = gate_function_output + self.g_bias + gate_function_output = T.switch(T.eq(self.g_function_selector, GATE_FUNCTION_SIGMOID), N.sigmoid(x), gate_function_output) + # apply GATE_FUNCTION_ELU to masked gates + if self.has_gatefunction_elu: + x = gate_function_output + self.g_bias + gate_function_output = T.switch(T.eq(self.g_function_selector, GATE_FUNCTION_ELU), T.switch(gate_function_output > 0., x, T.exp(x) - 1.), gate_function_output) + # apply GATE_FUNCTION_RELU to masked gates + if self.has_gatefunction_relu: + x = gate_function_output + self.g_bias + gate_function_output = T.switch(T.eq(self.g_function_selector, GATE_FUNCTION_RELU), T.maximum(x, 0.), gate_function_output) + # wait for theano 0.7.1 for this to work + #gate_function_output = T.switch(T.eq(self.g_function_selector, GATE_FUNCTION_RELU), T.nnet.relu(x), gate_function_output) # apply GATE_FUNCTION_DIST to masked gates if self.has_gatefunction_one_over_x: gate_function_output = T.switch(T.eq(self.g_function_selector, GATE_FUNCTION_DIST), T.switch(T.neq(0, gate_function_output), 1 / gate_function_output, 0), gate_function_output) - # apply threshold - thresholded_gate_function_output = \ - T.switch(T.ge(gate_function_output, self.g_threshold), gate_function_output, 0) + if self.has_gatefunction_threshold: + + # apply threshold + thresholded_gate_function_output = T.switch(T.eq(self.g_function_selector, GATE_FUNCTION_THRESHOLD), \ + T.switch(T.ge(gate_function_output, self.g_threshold), gate_function_output, 0), gate_function_output) - # apply amplification - amplified_gate_function_output = thresholded_gate_function_output * self.g_amplification + # apply amplification + amplified_gate_function_output = T.switch(T.eq(self.g_function_selector, GATE_FUNCTION_THRESHOLD), thresholded_gate_function_output * self.g_amplification, thresholded_gate_function_output) - # apply minimum and maximum - limited_gate_function_output = T.clip(amplified_gate_function_output, self.g_min, self.g_max) + # apply minimum and maximum + limited_gate_function_output = T.switch(T.eq(self.g_function_selector, GATE_FUNCTION_THRESHOLD), T.clip(amplified_gate_function_output, self.g_min, self.g_max), amplified_gate_function_output) - gatefunctions = limited_gate_function_output + gatefunctions = limited_gate_function_output + else: + gatefunctions = gate_function_output # put the theano graph into a callable function to be executed if self.has_pipes: @@ -571,6 +594,10 @@ def get_compiled_propagate_inlinks(self, from_partition, from_elements, to_eleme a_in = T.inc_subtensor(self.a_in[to_elements], propagated_a, inplace=True, tolerate_inplace_aliasing=True) return theano.function([], None, updates=[(self.a_in, a_in)], accept_inplace=True) + def get_compiled_propagate_identity_inlinks(self, from_partition, from_elements, to_elements): + a_in = T.inc_subtensor(self.a_in[to_elements], from_partition.a[from_elements], inplace=True, tolerate_inplace_aliasing=True) + return theano.function([], None, updates=[(self.a_in, a_in)], accept_inplace=True) + def calculate(self): self.t.set_value(np.int32(self.nodenet.current_step)) @@ -590,6 +617,7 @@ def calculate(self): self.__rebuild_shifted() if self.has_directional_activators or self.__has_sampling_activators: self.__calculate_g_factors() + self.__clean_native_module_gates() self.calculate_nodes() self.__calculate_native_modules() @@ -597,6 +625,11 @@ def __take_native_module_slot_snapshots(self): for uid, instance in self.native_module_instances.items(): instance.take_slot_activation_snapshot() + def __clean_native_module_gates(self): + for uid, instance in self.native_module_instances.items(): + for gate_type in instance.get_gate_types(): + instance.get_gate(gate_type).activation = 0 + def __calculate_native_modules(self): for uid, instance in self.native_module_instances.items(): instance.node_function() @@ -613,10 +646,10 @@ def __rebuild_shifted(self): a_shifted_matrix = np.lib.stride_tricks.as_strided(a_rolled_array, shape=(self.NoE, 14), strides=(self.nodenet.byte_per_float, self.nodenet.byte_per_float)) self.a_shifted.set_value(a_shifted_matrix, borrow=True) - g_theta_array = self.g_theta.get_value(borrow=True) - g_theta_rolled_array = np.roll(g_theta_array, 7) - g_theta_shifted_matrix = np.lib.stride_tricks.as_strided(g_theta_rolled_array, shape=(self.NoE, 14), strides=(self.nodenet.byte_per_float, self.nodenet.byte_per_float)) - self.g_theta_shifted.set_value(g_theta_shifted_matrix, borrow=True) + g_bias_array = self.g_bias.get_value(borrow=True) + g_bias_rolled_array = np.roll(g_bias_array, 7) + g_bias_shifted_matrix = np.lib.stride_tricks.as_strided(g_bias_rolled_array, shape=(self.NoE, 14), strides=(self.nodenet.byte_per_float, self.nodenet.byte_per_float)) + self.g_bias_shifted.set_value(g_bias_shifted_matrix, borrow=True) def rebuild_por_linked(self): @@ -692,7 +725,9 @@ def grow_number_of_nodes(self, growby): self.NoN = new_NoN self.has_new_usages = True - def save(self, datafilename): + def save(self, base_path=None, zipfile=None): + if base_path is None: + base_path = self.nodenet.persistency_path allocated_nodes = self.allocated_nodes allocated_node_offsets = self.allocated_node_offsets @@ -717,7 +752,7 @@ def save(self, datafilename): w = sp.csr_matrix(w) a = self.a.get_value(borrow=True) - g_theta = self.g_theta.get_value(borrow=True) + g_bias = self.g_bias.get_value(borrow=True) g_factor = self.g_factor.get_value(borrow=True) g_threshold = self.g_threshold.get_value(borrow=True) g_amplification = self.g_amplification.get_value(borrow=True) @@ -731,90 +766,78 @@ def save(self, datafilename): sizeinformation = [self.NoN, self.NoE, self.NoNS] - inlink_from_element_count = 0 - inlink_to_element_count = 0 - weight_count = 0 for spid, inlinks in self.inlinks.items(): - inlink_from_element_count += len(inlinks[0].get_value(borrow=True)) - inlink_to_element_count += len(inlinks[1].get_value(borrow=True)) - weight_count += len(inlinks[0].get_value(borrow=True)) * len(inlinks[1].get_value(borrow=True)) - inlinks_pids = np.zeros(len(self.inlinks), dtype=np.int16) - inlink_from_lengths = np.zeros(len(self.inlinks), dtype=np.int32) - inlink_to_lengths = np.zeros(len(self.inlinks), dtype=np.int32) - inlink_from_elements = np.zeros(inlink_from_element_count, dtype=np.int32) - inlink_to_elements = np.zeros(inlink_to_element_count, dtype=np.int32) - inlink_weights = np.zeros(weight_count, dtype=self.nodenet.numpyfloatX) - - from_offset = 0 - to_offset = 0 - weight_offset = 0 - for i, spid in enumerate(self.inlinks.keys()): - inlinks_pids[i] = int(spid) - from_elements = self.inlinks[spid][0].get_value(borrow=True) - to_elements = self.inlinks[spid][1].get_value(borrow=True) - weights = self.inlinks[spid][2].get_value(borrow=True) - from_length = len(from_elements) - to_length = len(to_elements) - inlink_from_lengths[i] = from_length - inlink_to_lengths[i] = to_length - inlink_from_elements[from_offset:from_offset+from_length] = from_elements - inlink_to_elements[to_offset:to_offset+to_length] = to_elements - inlink_weights[weight_offset:weight_offset+(from_length*to_length)] = np.ravel(weights) - weight_offset += from_length * to_length - from_offset += from_length - to_offset += to_length - - np.savez(datafilename, - allocated_nodes=allocated_nodes, - allocated_node_offsets=allocated_node_offsets, - allocated_elements_to_nodes=allocated_elements_to_nodes, - allocated_node_parents=allocated_node_parents, - allocated_nodespaces=allocated_nodespaces, - w_data=w.data, - w_indices=w.indices, - w_indptr=w.indptr, - a=a, - g_theta=g_theta, - g_factor=g_factor, - g_threshold=g_threshold, - g_amplification=g_amplification, - g_min=g_min, - g_max=g_max, - g_function_selector=g_function_selector, - g_expect=g_expect, - g_countdown=g_countdown, - g_wait=g_wait, - n_function_selector=n_function_selector, - sizeinformation=sizeinformation, - allocated_elements_to_activators=allocated_elements_to_activators, - allocated_nodespaces_por_activators=allocated_nodespaces_por_activators, - allocated_nodespaces_ret_activators=allocated_nodespaces_ret_activators, - allocated_nodespaces_sub_activators=allocated_nodespaces_sub_activators, - allocated_nodespaces_sur_activators=allocated_nodespaces_sur_activators, - allocated_nodespaces_cat_activators=allocated_nodespaces_cat_activators, - allocated_nodespaces_exp_activators=allocated_nodespaces_exp_activators, - allocated_nodespaces_sampling_activators=allocated_nodespaces_sampling_activators, - inlink_pids=inlinks_pids, - inlink_from_lengths=inlink_from_lengths, - inlink_to_lengths=inlink_to_lengths, - inlink_from_elements=inlink_from_elements, - inlink_to_elements=inlink_to_elements, - inlink_weights=inlink_weights) - - def load_data(self, datafilename, nodes_data): + filename = "inlinks-%s-from-%s.npz" % (self.spid, spid) + data = { + 'from_partition_id': spid, + 'from_ids': inlinks[0].get_value(borrow=True), + 'to_ids': inlinks[1].get_value(borrow=True), + 'weights': inlinks[2].get_value(borrow=True) if inlinks[2] else None, + 'inlink_type': inlinks[4] + } + if zipfile: + stream = io.BytesIO() + np.savez(stream, **data) + stream.seek(0) + zipfile.writestr(filename, stream.getvalue()) + else: + np.savez(os.path.join(base_path, filename), **data) + filename = "partition-%s.npz" % self.spid + data = { + 'allocated_nodes': allocated_nodes, + 'allocated_node_offsets': allocated_node_offsets, + 'allocated_elements_to_nodes': allocated_elements_to_nodes, + 'allocated_node_parents': allocated_node_parents, + 'allocated_nodespaces': allocated_nodespaces, + 'w_data': w.data, + 'w_indices': w.indices, + 'w_indptr': w.indptr, + 'a': a, + 'g_bias': g_bias, + 'g_factor': g_factor, + 'g_threshold': g_threshold, + 'g_amplification': g_amplification, + 'g_min': g_min, + 'g_max': g_max, + 'g_function_selector': g_function_selector, + 'g_expect': g_expect, + 'g_countdown': g_countdown, + 'g_wait': g_wait, + 'n_function_selector': n_function_selector, + 'sizeinformation': sizeinformation, + 'allocated_elements_to_activators': allocated_elements_to_activators, + 'allocated_nodespaces_por_activators': allocated_nodespaces_por_activators, + 'allocated_nodespaces_ret_activators': allocated_nodespaces_ret_activators, + 'allocated_nodespaces_sub_activators': allocated_nodespaces_sub_activators, + 'allocated_nodespaces_sur_activators': allocated_nodespaces_sur_activators, + 'allocated_nodespaces_cat_activators': allocated_nodespaces_cat_activators, + 'allocated_nodespaces_exp_activators': allocated_nodespaces_exp_activators, + 'allocated_nodespaces_sampling_activators': allocated_nodespaces_sampling_activators + } + if zipfile: + stream = io.BytesIO() + np.savez(stream, **data) + stream.seek(0) + zipfile.writestr(filename, stream.getvalue()) + else: + np.savez(os.path.join(base_path, filename), **data) + + def load_data(self, nodes_data, invalid_uids=[]): """Load the node net from a file""" # try to access file + base_path = self.nodenet.persistency_path + filename = os.path.join(base_path, "partition-%s.npz" % self.spid) datafile = None - if os.path.isfile(datafilename): + if os.path.isfile(filename): try: - self.logger.info("Loading nodenet %s partition %i bulk data from file %s" % (self.nodenet.name, self.pid, datafilename)) - datafile = np.load(datafilename) + self.logger.info("Loading nodenet %s partition %i bulk data from file %s" % (self.nodenet.name, self.pid, filename)) + datafile = np.load(filename) except ValueError: # pragma: no cover - self.logger.warn("Could not read nodenet data from file %s" % datafile) + self.logger.warning("Could not read partition data from file %s" % filename) return False except IOError: # pragma: no cover - self.logger.warn("Could not open nodenet file %s" % datafile) + self.logger.warning("Could not open partition file %s" % filename) return False if not datafile: @@ -834,73 +857,73 @@ def load_data(self, datafilename, nodes_data): self.a_prev = theano.shared(value=a_prev_array.astype(T.config.floatX), name="a_prev", borrow=True) else: - self.logger.warn("no sizeinformation in file, falling back to defaults") # pragma: no cover + self.logger.warning("no sizeinformation in file, falling back to defaults") # pragma: no cover # the load bulk data into numpy arrays if 'allocated_nodes' in datafile: self.allocated_nodes = datafile['allocated_nodes'] else: - self.logger.warn("no allocated_nodes in file, falling back to defaults") # pragma: no cover + self.logger.warning("no allocated_nodes in file, falling back to defaults") # pragma: no cover if 'allocated_node_offsets' in datafile: self.allocated_node_offsets = datafile['allocated_node_offsets'] else: - self.logger.warn("no allocated_node_offsets in file, falling back to defaults") # pragma: no cover + self.logger.warning("no allocated_node_offsets in file, falling back to defaults") # pragma: no cover if 'allocated_elements_to_nodes' in datafile: self.allocated_elements_to_nodes = datafile['allocated_elements_to_nodes'] else: - self.logger.warn("no allocated_elements_to_nodes in file, falling back to defaults") # pragma: no cover + self.logger.warning("no allocated_elements_to_nodes in file, falling back to defaults") # pragma: no cover if 'allocated_nodespaces' in datafile: self.allocated_nodespaces = datafile['allocated_nodespaces'] else: - self.logger.warn("no allocated_nodespaces in file, falling back to defaults") # pragma: no cover + self.logger.warning("no allocated_nodespaces in file, falling back to defaults") # pragma: no cover if 'allocated_node_parents' in datafile: self.allocated_node_parents = datafile['allocated_node_parents'] else: - self.logger.warn("no allocated_node_parents in file, falling back to defaults") # pragma: no cover + self.logger.warning("no allocated_node_parents in file, falling back to defaults") # pragma: no cover if 'allocated_elements_to_activators' in datafile: self.allocated_elements_to_activators = datafile['allocated_elements_to_activators'] else: - self.logger.warn("no allocated_elements_to_activators in file, falling back to defaults") # pragma: no cover + self.logger.warning("no allocated_elements_to_activators in file, falling back to defaults") # pragma: no cover if 'allocated_nodespaces_por_activators' in datafile: self.allocated_nodespaces_por_activators = datafile['allocated_nodespaces_por_activators'] else: - self.logger.warn("no allocated_nodespaces_por_activators in file, falling back to defaults") # pragma: no cover + self.logger.warning("no allocated_nodespaces_por_activators in file, falling back to defaults") # pragma: no cover if 'allocated_nodespaces_ret_activators' in datafile: self.allocated_nodespaces_ret_activators = datafile['allocated_nodespaces_ret_activators'] else: - self.logger.warn("no allocated_nodespaces_ret_activators in file, falling back to defaults") # pragma: no cover + self.logger.warning("no allocated_nodespaces_ret_activators in file, falling back to defaults") # pragma: no cover if 'allocated_nodespaces_sub_activators' in datafile: self.allocated_nodespaces_sub_activators = datafile['allocated_nodespaces_sub_activators'] else: - self.logger.warn("no allocated_nodespaces_sub_activators in file, falling back to defaults") # pragma: no cover + self.logger.warning("no allocated_nodespaces_sub_activators in file, falling back to defaults") # pragma: no cover if 'allocated_nodespaces_sur_activators' in datafile: self.allocated_nodespaces_sur_activators = datafile['allocated_nodespaces_sur_activators'] else: - self.logger.warn("no allocated_nodespaces_sur_activators in file, falling back to defaults") # pragma: no cover + self.logger.warning("no allocated_nodespaces_sur_activators in file, falling back to defaults") # pragma: no cover if 'allocated_nodespaces_cat_activators' in datafile: self.allocated_nodespaces_cat_activators = datafile['allocated_nodespaces_cat_activators'] else: - self.logger.warn("no allocated_nodespaces_cat_activators in file, falling back to defaults") # pragma: no cover + self.logger.warning("no allocated_nodespaces_cat_activators in file, falling back to defaults") # pragma: no cover if 'allocated_nodespaces_exp_activators' in datafile: self.allocated_nodespaces_exp_activators = datafile['allocated_nodespaces_exp_activators'] else: - self.logger.warn("no allocated_nodespaces_exp_activators in file, falling back to defaults") # pragma: no cover + self.logger.warning("no allocated_nodespaces_exp_activators in file, falling back to defaults") # pragma: no cover if 'allocated_nodespaces_sampling_activators' in datafile: self.allocated_nodespaces_sampling_activators = datafile['allocated_nodespaces_sampling_activators'] else: - self.logger.warn("no allocated_nodespaces_por_activators in file, falling back to defaults") # pragma: no cover + self.logger.warning("no allocated_nodespaces_por_activators in file, falling back to defaults") # pragma: no cover if 'w_data' in datafile and 'w_indices' in datafile and 'w_indptr' in datafile: w = sp.csr_matrix((datafile['w_data'], datafile['w_indices'], datafile['w_indptr']), shape = (self.NoE, self.NoE)) @@ -911,62 +934,62 @@ def load_data(self, datafilename, nodes_data): self.a = theano.shared(value=datafile['a'].astype(T.config.floatX), name="a", borrow=False) self.a_in = theano.shared(value=np.zeros_like(datafile['a']).astype(T.config.floatX), name="a_in", borrow=False) else: - self.logger.warn("no w_data, w_indices or w_indptr in file, falling back to defaults") # pragma: no cover + self.logger.warning("no w_data, w_indices or w_indptr in file, falling back to defaults") # pragma: no cover - if 'g_theta' in datafile: - self.g_theta = theano.shared(value=datafile['g_theta'].astype(T.config.floatX), name="theta", borrow=False) + if 'g_bias' in datafile: + self.g_bias = theano.shared(value=datafile['g_bias'].astype(T.config.floatX), name="bias", borrow=False) else: - self.logger.warn("no g_theta in file, falling back to defaults") # pragma: no cover + self.logger.warning("no g_bias in file, falling back to defaults") # pragma: no cover if 'g_factor' in datafile: self.g_factor = theano.shared(value=datafile['g_factor'].astype(T.config.floatX), name="g_factor", borrow=False) else: - self.logger.warn("no g_factor in file, falling back to defaults") # pragma: no cover + self.logger.warning("no g_factor in file, falling back to defaults") # pragma: no cover if 'g_threshold' in datafile: self.g_threshold = theano.shared(value=datafile['g_threshold'].astype(T.config.floatX), name="g_threshold", borrow=False) else: - self.logger.warn("no g_threshold in file, falling back to defaults") # pragma: no cover + self.logger.warning("no g_threshold in file, falling back to defaults") # pragma: no cover if 'g_amplification' in datafile: self.g_amplification = theano.shared(value=datafile['g_amplification'].astype(T.config.floatX), name="g_amplification", borrow=False) else: - self.logger.warn("no g_amplification in file, falling back to defaults") # pragma: no cover + self.logger.warning("no g_amplification in file, falling back to defaults") # pragma: no cover if 'g_min' in datafile: self.g_min = theano.shared(value=datafile['g_min'].astype(T.config.floatX), name="g_min", borrow=False) else: - self.logger.warn("no g_min in file, falling back to defaults") # pragma: no cover + self.logger.warning("no g_min in file, falling back to defaults") # pragma: no cover if 'g_max' in datafile: self.g_max = theano.shared(value=datafile['g_max'].astype(T.config.floatX), name="g_max", borrow=False) else: - self.logger.warn("no g_max in file, falling back to defaults") # pragma: no cover + self.logger.warning("no g_max in file, falling back to defaults") # pragma: no cover if 'g_function_selector' in datafile: self.g_function_selector = theano.shared(value=datafile['g_function_selector'], name="gatefunction", borrow=False) else: - self.logger.warn("no g_function_selector in file, falling back to defaults") # pragma: no cover + self.logger.warning("no g_function_selector in file, falling back to defaults") # pragma: no cover if 'g_expect' in datafile: self.g_expect = theano.shared(value=datafile['g_expect'], name="expectation", borrow=False) else: - self.logger.warn("no g_expect in file, falling back to defaults") # pragma: no cover + self.logger.warning("no g_expect in file, falling back to defaults") # pragma: no cover if 'g_countdown' in datafile: self.g_countdown = theano.shared(value=datafile['g_countdown'], name="countdown", borrow=False) else: - self.logger.warn("no g_countdown in file, falling back to defaults") # pragma: no cover + self.logger.warning("no g_countdown in file, falling back to defaults") # pragma: no cover if 'g_wait' in datafile: self.g_wait = theano.shared(value=datafile['g_wait'], name="wait", borrow=False) else: - self.logger.warn("no g_wait in file, falling back to defaults") # pragma: no cover + self.logger.warning("no g_wait in file, falling back to defaults") # pragma: no cover if 'n_function_selector' in datafile: self.n_function_selector = theano.shared(value=datafile['n_function_selector'], name="nodefunction_per_gate", borrow=False) else: - self.logger.warn("no n_function_selector in file, falling back to defaults") # pragma: no cover + self.logger.warning("no n_function_selector in file, falling back to defaults") # pragma: no cover # reconstruct other states self.por_ret_dirty = True @@ -987,17 +1010,33 @@ def load_data(self, datafilename, nodes_data): self.has_sampling_activators = np.sum(self.allocated_nodespaces_sampling_activators) > 0 self.has_gatefunction_absolute = GATE_FUNCTION_ABSOLUTE in g_function_selector self.has_gatefunction_sigmoid = GATE_FUNCTION_SIGMOID in g_function_selector - self.has_gatefunction_tanh = GATE_FUNCTION_TANH in g_function_selector - self.has_gatefunction_rect = GATE_FUNCTION_RECT in g_function_selector + self.has_gatefunction_relu = GATE_FUNCTION_RELU in g_function_selector self.has_gatefunction_one_over_x = GATE_FUNCTION_DIST in g_function_selector + self.has_gatefunction_elu = GATE_FUNCTION_ELU in g_function_selector + self.has_gatefunction_threshold = GATE_FUNCTION_THRESHOLD in g_function_selector else: - self.logger.warn("no g_function_selector in file, falling back to defaults") + self.logger.warning("no g_function_selector in file, falling back to defaults") + + for uid in invalid_uids: + if self.nodenet.get_partition(uid) == self: + w_matrix = self.w.get_value() + id = node_from_id(uid) + self.allocated_nodes[id] = 0 + self.allocated_node_parents[id] = 0 + els = self.allocated_elements_to_nodes[np.where(self.allocated_elements_to_nodes == id)] + w_matrix[els] = 0 + self.allocated_elements_to_nodes[np.where(self.allocated_elements_to_nodes == id)] = 0 + self.w.set_value(w_matrix) for id in np.nonzero(self.allocated_nodes)[0]: if self.allocated_nodes[id] > MAX_STD_NODETYPE: uid = node_to_id(id, self.pid) if uid in nodes_data: - self.allocated_nodes[id] = get_numerical_node_type(nodes_data[uid]['type'], self.nodenet.native_modules) + try: + self.allocated_nodes[id] = get_numerical_node_type(nodes_data[uid]['type'], self.nodenet.native_modules) + except ValueError: + self.allocated_nodes[id] = 0 + self.allocated_elements_to_nodes[np.where(self.allocated_elements_to_nodes == id)] = 0 if self.allocated_nodes[id] > MAX_STD_NODETYPE: self.native_module_instances[uid] = self.nodenet.get_node(uid) elif self.allocated_nodes[id] == COMMENT: @@ -1023,54 +1062,23 @@ def load_data(self, datafilename, nodes_data): if self.has_directional_activators or self.__has_sampling_activators: self.__calculate_g_factors() - def load_inlinks(self, datafilename): - datafile = None - if os.path.isfile(datafilename): - try: - datafile = np.load(datafilename) - except ValueError: # pragma: no cover - self.logger.warn("Could not read nodenet data from file %s" % datafile) - return False - except IOError: # pragma: no cover - self.logger.warn("Could not open nodenet file %s" % datafile) - return False - - if not datafile: - return - - if 'inlink_pids' in datafile and \ - 'inlink_from_lengths' in datafile and \ - 'inlink_to_lengths' in datafile and \ - 'inlink_from_elements' in datafile and \ - 'inlink_to_elements' in datafile and \ - 'inlink_weights' in datafile: + def load_inlinks(self): + base_path = self.nodenet.persistency_path + for spid in self.nodenet.partitions: + filename = os.path.join(base_path, "inlinks-%s-from-%s.npz" % (self.spid, spid)) + if os.path.isfile(filename): + datafile = np.load(filename) - inlink_pids = datafile['inlink_pids'] - inlink_from_lengths = datafile['inlink_from_lengths'] - inlink_to_lengths = datafile['inlink_to_lengths'] - - inlink_from_offset = 0 - inlink_to_offset = 0 - weight_offset = 0 - - for i, pid in enumerate(inlink_pids): - - inlink_from_elements = datafile['inlink_from_elements'][inlink_from_offset:inlink_from_offset+inlink_from_lengths[i]] - inlink_to_elements = datafile['inlink_to_elements'][inlink_to_offset:inlink_to_offset+inlink_to_lengths[i]] - inlink_weights = datafile['inlink_weights'][weight_offset:weight_offset+(inlink_from_lengths[i]*inlink_to_lengths[i])] + if str(datafile['inlink_type']) == 'identity': + weights = 1 + else: + weights = datafile['weights'] self.set_inlink_weights( - "%03i" % pid, - inlink_from_elements.astype(np.int32), - inlink_to_elements.astype(np.int32), - np.reshape(inlink_weights, (inlink_to_lengths[i], inlink_from_lengths[i])) - ) - - weight_offset += inlink_from_lengths[i]*inlink_to_lengths[i] - inlink_from_offset += inlink_from_lengths[i] - inlink_to_offset += inlink_to_lengths[i] - else: - self.logger.warn("no or incomplete inlink information in file, no inter-partition links will be loaded") # pragma: no cover + str(datafile['from_partition_id']), + datafile['from_ids'], + datafile['to_ids'], + weights) def grow_number_of_nodespaces(self, growby): @@ -1153,12 +1161,12 @@ def grow_number_of_elements(self, growby): new_a_prev[0:self.NoE] = self.a_prev.get_value(borrow=True) self.a_prev.set_value(new_a_prev, borrow=True) - new_g_theta = np.zeros(new_NoE, dtype=self.nodenet.numpyfloatX) - new_g_theta[0:self.NoE] = self.g_theta.get_value(borrow=True) - self.g_theta.set_value(new_g_theta, borrow=True) + new_g_bias = np.zeros(new_NoE, dtype=self.nodenet.numpyfloatX) + new_g_bias[0:self.NoE] = self.g_bias.get_value(borrow=True) + self.g_bias.set_value(new_g_bias, borrow=True) - new_g_theta_shifted = np.lib.stride_tricks.as_strided(new_g_theta, shape=(self.NoE, 7), strides=(self.nodenet.byte_per_float, self.nodenet.byte_per_float)) - self.g_theta_shifted.set_value(new_g_theta_shifted, borrow=True) + new_g_bias_shifted = np.lib.stride_tricks.as_strided(new_g_bias, shape=(self.NoE, 7), strides=(self.nodenet.byte_per_float, self.nodenet.byte_per_float)) + self.g_bias_shifted.set_value(new_g_bias_shifted, borrow=True) new_g_factor = np.ones(new_NoE, dtype=self.nodenet.numpyfloatX) new_g_factor[0:self.NoE] = self.g_factor.get_value(borrow=True) @@ -1177,7 +1185,7 @@ def grow_number_of_elements(self, growby): self.g_min.set_value(new_g_min, borrow=True) new_g_max = np.ones(new_NoE, dtype=self.nodenet.numpyfloatX) - new_g_max[0:self.NoE] = self.g_max.get_value(borrow=True) + new_g_max[0:self.NoE] = self.g_max.get_value(borrow=True) self.g_max.set_value(new_g_max, borrow=True) new_g_function_selector = np.zeros(new_NoE, dtype=np.int8) @@ -1230,7 +1238,7 @@ def announce_nodes(self, number_of_nodes, average_elements_per_node): self.logger.info("Per announcement in partition %i, growing elements vectors by %d elements" % (self.pid, growby)) self.grow_number_of_elements(gap + (gap //3)) - def create_node(self, nodetype, nodespace_id, id=None, parameters=None, gate_parameters=None, gate_functions=None): + def create_node(self, nodetype, nodespace_id, id=None, parameters=None, gate_configuration=None): # find a free ID / index in the allocated_nodes vector to hold the node type if id is None: @@ -1298,11 +1306,14 @@ def create_node(self, nodetype, nodespace_id, id=None, parameters=None, gate_par # due to the order of initializing, nodespaces might just not be here yet. self.nodespaces_contents_last_changed[nodespace_id] = self.nodenet.current_step - for element in range (0, get_elements_per_type(self.allocated_nodes[id], self.nodenet.native_modules)): - self.allocated_elements_to_nodes[offset + element] = id + if number_of_elements > 0: + elrange = np.asarray(range(offset, offset + number_of_elements)) + self.allocated_elements_to_nodes[elrange] = id if parameters is None: parameters = {} + if gate_configuration is None: + gate_configuration = {} nto = self.nodenet.get_nodetype(nodetype) @@ -1331,7 +1342,7 @@ def create_node(self, nodetype, nodespace_id, id=None, parameters=None, gate_par self.allocated_node_offsets[self.allocated_nodespaces_exp_activators[nodespace_id]] if nto.parameter_defaults.get('expectation'): - value = nto.parameter_defaults['expectation'] + value = float(parameters.get('expectation', nto.parameter_defaults['expectation'])) g_expect_array = self.g_expect.get_value(borrow=True) g_expect_array[offset + GEN] = float(value) g_expect_array[offset + SUR] = float(value) @@ -1339,7 +1350,7 @@ def create_node(self, nodetype, nodespace_id, id=None, parameters=None, gate_par self.g_expect.set_value(g_expect_array, borrow=True) if nto.parameter_defaults.get('wait'): - value = nto.parameter_defaults['wait'] + value = int(parameters.get('wait', nto.parameter_defaults['wait'])) g_wait_array = self.g_wait.get_value(borrow=True) g_wait_array[offset + SUR] = int(min(value, 128)) g_wait_array[offset + POR] = int(min(value, 128)) @@ -1385,24 +1396,14 @@ def create_node(self, nodetype, nodespace_id, id=None, parameters=None, gate_par for key in self.nodenet.get_standard_nodetype_definitions()[nodetype]['parameters']: node_proxy.set_parameter(key, parameters.get(key, '')) - for gate, parameters in nto.gate_defaults.items(): - if gate in nto.gatetypes: - for gate_parameter in parameters: - self.set_node_gate_parameter(id, gate, gate_parameter, parameters[gate_parameter]) - if gate_parameters is not None: - for gate, parameters in gate_parameters.items(): - if gate in nto.gatetypes: - for gate_parameter in parameters: - self.set_node_gate_parameter(id, gate, gate_parameter, parameters[gate_parameter]) - - if gate_functions is not None: - for gate, gate_function in gate_functions.items(): - if gate in nto.gatetypes: - self.set_node_gatefunction_name(id, gate, gate_function) + for gate, conf in gate_configuration.items(): + idx = offset + get_numerical_gate_type(gate) + for param, value in conf['gatefunction_parameters'].items(): + self._set_gate_config_for_elements([idx], conf['gatefunction'], param, [value]) # initialize activation to zero a_array = self.a.get_value(borrow=True) - for element in range (0, get_elements_per_type(get_numerical_node_type(nodetype, self.nodenet.native_modules), self.nodenet.native_modules)): + for element in range(0, get_elements_per_type(get_numerical_node_type(nodetype, self.nodenet.native_modules), self.nodenet.native_modules)): a_array[offset + element] = 0 self.a.set_value(a_array) @@ -1423,19 +1424,22 @@ def delete_node(self, node_id): self.allocated_node_offsets[node_id] = 0 self.allocated_node_parents[node_id] = 0 g_function_selector_array = self.g_function_selector.get_value(borrow=True) - for element in range (0, get_elements_per_type(type, self.nodenet.native_modules)): + + element = 0 + while self.allocated_elements_to_nodes[offset + element] == node_id: self.allocated_elements_to_nodes[offset + element] = 0 g_function_selector_array[offset + element] = 0 + element += 1 + self.g_function_selector.set_value(g_function_selector_array, borrow=True) - self.allocated_elements_to_nodes[np.where(self.allocated_elements_to_nodes == node_id)[0]] = 0 if type == SENSOR: - sensor_index = np.where(self.sensor_indices == node_id)[0] - self.sensor_indices[sensor_index] = 0 + sensor_index = np.where(self.sensor_indices == offset)[0] + self.sensor_indices[sensor_index] = -1 if type == ACTUATOR: - actuator_index = np.where(self.actuator_indices == node_id)[0] - self.actuator_indices[actuator_index] = 0 + actuator_index = np.where(self.actuator_indices == offset)[0] + self.actuator_indices[actuator_index] = -1 if type == PIPE: n_function_selector_array = self.n_function_selector.get_value(borrow=True) @@ -1571,55 +1575,6 @@ def delete_nodespace(self, nodespace_id): self.nodenet._track_deletion('nodespaces', nodespace_to_id(nodespace_id, self.pid)) self.nodespaces_contents_last_changed[self.allocated_nodespaces[nodespace_id]] = self.nodenet.current_step - def set_node_gate_parameter(self, id, gate_type, parameter, value): - numerical_node_type = self.allocated_nodes[id] - nodetype = None - if numerical_node_type > MAX_STD_NODETYPE: - nodetype = self.nodenet.get_nodetype(get_string_node_type(numerical_node_type, self.nodenet.native_modules)) - - elementindex = self.allocated_node_offsets[id] + get_numerical_gate_type(gate_type, nodetype) - if parameter == 'threshold': - g_threshold_array = self.g_threshold.get_value(borrow=True) - g_threshold_array[elementindex] = value - self.g_threshold.set_value(g_threshold_array, borrow=True) - elif parameter == 'amplification': - g_amplification_array = self.g_amplification.get_value(borrow=True) - g_amplification_array[elementindex] = value - self.g_amplification.set_value(g_amplification_array, borrow=True) - elif parameter == 'minimum': - g_min_array = self.g_min.get_value(borrow=True) - g_min_array[elementindex] = value - self.g_min.set_value(g_min_array, borrow=True) - elif parameter == 'maximum': - g_max_array = self.g_max.get_value(borrow=True) - g_max_array[elementindex] = value - self.g_max.set_value(g_max_array, borrow=True) - elif parameter == 'theta': - g_theta_array = self.g_theta.get_value(borrow=True) - g_theta_array[elementindex] = value - self.g_theta.set_value(g_theta_array, borrow=True) - - def set_node_gatefunction_name(self, id, gate_type, gatefunction_name): - numerical_node_type = self.allocated_nodes[id] - nodetype = None - if numerical_node_type > MAX_STD_NODETYPE: - nodetype = self.nodenet.get_nodetype(get_string_node_type(numerical_node_type, self.nodenet.native_modules)) - - elementindex = self.allocated_node_offsets[id] + get_numerical_gate_type(gate_type, nodetype) - g_function_selector = self.g_function_selector.get_value(borrow=True) - g_function_selector[elementindex] = get_numerical_gatefunction_type(gatefunction_name) - self.g_function_selector.set_value(g_function_selector, borrow=True) - if g_function_selector[elementindex] == GATE_FUNCTION_ABSOLUTE: - self.has_gatefunction_absolute = True - elif g_function_selector[elementindex] == GATE_FUNCTION_SIGMOID: - self.has_gatefunction_sigmoid = True - elif g_function_selector[elementindex] == GATE_FUNCTION_TANH: - self.has_gatefunction_tanh = True - elif g_function_selector[elementindex] == GATE_FUNCTION_RECT: - self.has_gatefunction_rect = True - elif g_function_selector[elementindex] == GATE_FUNCTION_DIST: - self.has_gatefunction_one_over_x = True - def set_nodespace_gatetype_activator(self, nodespace_id, gate_type, activator_id): if gate_type == "por": self.allocated_nodespaces_por_activators[nodespace_id] = activator_id @@ -1725,6 +1680,24 @@ def group_nodes_by_ids(self, nodespace_uid, ids, group_name, gatetype="gen"): gate = get_numerical_gate_type(gatetype) self.nodegroups[nodespace_uid][group_name] = self.allocated_node_offsets[ids] + gate + def group_highdimensional_elements(self, node_uid, gate=None, slot=None, group_name=None): + node_id = node_from_id(node_uid) + nodespace_id = self.allocated_node_parents[node_id] + nodespace_uid = nodespace_to_id(nodespace_id, self.pid) + if nodespace_uid not in self.nodegroups: + self.nodegroups[nodespace_uid] = {} + strnodetype = get_string_node_type(self.allocated_nodes[node_id], self.nodenet.native_modules) + nodetype = self.nodenet.get_nodetype(strnodetype) + if gate: + element = get_numerical_gate_type("%s0" % gate, nodetype) + dimensionality = nodetype.get_gate_dimensionality(gate) + elif slot: + element = get_numerical_slot_type("%s0" % slot, nodetype) + dimensionality = nodetype.get_slot_dimensionality(slot) + start = self.allocated_node_offsets[node_id] + element + stop = start + dimensionality + self.nodegroups[nodespace_uid][group_name] = np.arange(start, stop) + def ungroup_nodes(self, nodespace_uid, group): if nodespace_uid in self.nodegroups and group in self.nodegroups[nodespace_uid]: del self.nodegroups[nodespace_uid][group] @@ -1742,18 +1715,88 @@ def set_activations(self, nodespace_uid, group, new_activations): a_array[self.nodegroups[nodespace_uid][group]] = new_activations self.a.set_value(a_array, borrow=True) - def get_thetas(self, nodespace_uid, group): + def get_gate_configurations(self, nodespace_uid, group, gatefunction_parameter=None): if nodespace_uid not in self.nodegroups or group not in self.nodegroups[nodespace_uid]: raise ValueError("Group %s does not exist in nodespace %s." % (group, nodespace_uid)) - g_theta_array = self.g_theta.get_value(borrow=True) - return g_theta_array[self.nodegroups[nodespace_uid][group]] - def set_thetas(self, nodespace_uid, group, thetas): + groupindexes = self.nodegroups[nodespace_uid][group] + g_function_selector = self.g_function_selector.get_value(borrow=True) + num_gatefunc = g_function_selector[groupindexes] + if len(np.unique(num_gatefunc)) > 1: + raise("Heterogenous gatefunction configuration") + data = {'gatefunction': get_string_gatefunction_type(np.unique(num_gatefunc)[0])} + if gatefunction_parameter == 'bias': + g_bias = self.g_bias.get_value(borrow=True) + data['parameter_values'] = g_bias[groupindexes] + if gatefunction_parameter == 'minimum': + g_min = self.g_min.get_value(borrow=True) + data['parameter_values'] = g_min[groupindexes] + if gatefunction_parameter == 'maximum': + g_max = self.g_max.get_value(borrow=True) + data['parameter_values'] = g_max[groupindexes] + if gatefunction_parameter == 'amplification': + g_amplification = self.g_amplification.get_value(borrow=True) + data['parameter_values'] = g_amplification[groupindexes] + if gatefunction_parameter == 'threshold': + g_threshold = self.g_threshold.get_value(borrow=True) + data['parameter_values'] = g_threshold[groupindexes] + return data + + def set_gate_configurations(self, nodespace_uid, group, gatefunction, gatefunction_parameter=None, parameter_values=None): if nodespace_uid not in self.nodegroups or group not in self.nodegroups[nodespace_uid]: raise ValueError("Group %s does not exist in nodespace %s." % (group, nodespace_uid)) - g_theta_array = self.g_theta.get_value(borrow=True) - g_theta_array[self.nodegroups[nodespace_uid][group]] = thetas - self.g_theta.set_value(g_theta_array, borrow=True) + + groupindexes = self.nodegroups[nodespace_uid][group] + self._set_gate_config_for_elements(groupindexes, gatefunction, gatefunction_parameter, parameter_values) + + def _set_gate_config_for_elements(self, elements, gatefunction, gatefunction_parameter=None, parameter_values=None): + g_function_selector = self.g_function_selector.get_value(borrow=True) + g_bias = self.g_bias.get_value(borrow=True) + g_threshold = self.g_threshold.get_value(borrow=True) + g_amplification = self.g_amplification.get_value(borrow=True) + g_min = self.g_min.get_value(borrow=True) + g_max = self.g_max.get_value(borrow=True) + + # set gatefunction + num_gatefunc = get_numerical_gatefunction_type(gatefunction) + g_function_selector[elements] = num_gatefunc + self.g_function_selector.set_value(g_function_selector, borrow=True) + + # first, unset any old values + g_bias[elements] = 0 + if num_gatefunc != GATE_FUNCTION_THRESHOLD: + g_threshold[elements] = 0 + g_amplification[elements] = 1 + g_min[elements] = 0 + g_max[elements] = 1 + + if num_gatefunc == GATE_FUNCTION_SIGMOID or num_gatefunc == GATE_FUNCTION_ELU or num_gatefunc == GATE_FUNCTION_RELU: + if gatefunction_parameter == 'bias': + g_bias[elements] = parameter_values + if num_gatefunc == GATE_FUNCTION_ELU: + self.has_gatefunction_elu = True + elif num_gatefunc == GATE_FUNCTION_RELU: + self.has_gatefunction_relu = True + elif num_gatefunc == GATE_FUNCTION_SIGMOID: + self.has_gatefunction_sigmoid = True + + elif num_gatefunc == GATE_FUNCTION_THRESHOLD: + self.has_gatefunction_threshold = True + if gatefunction_parameter == 'threshold': + g_threshold[elements] = parameter_values + if gatefunction_parameter == 'amplification': + g_amplification[elements] = parameter_values + if gatefunction_parameter == 'minimum': + g_min[elements] = parameter_values + if gatefunction_parameter == 'maximum': + g_max[elements] = parameter_values + + self.g_function_selector.set_value(g_function_selector, borrow=True) + self.g_bias.set_value(g_bias, borrow=True) + self.g_threshold.set_value(g_threshold, borrow=True) + self.g_amplification.set_value(g_amplification, borrow=True) + self.g_min.set_value(g_min, borrow=True) + self.g_max.set_value(g_max, borrow=True) def get_link_weights(self, nodespace_from_uid, group_from, nodespace_to_uid, group_to): if nodespace_from_uid not in self.nodegroups or group_from not in self.nodegroups[nodespace_from_uid]: @@ -1777,9 +1820,13 @@ def set_link_weights(self, nodespace_from_uid, group_from, nodespace_to_uid, gro #if len(self.nodegroups[nodespace_to_uid][group_to]) != new_w.shape[0]: # raise ValueError("group_to %s has length %i, but new_w.shape[0] is %i" % (group_to, len(self.nodegroups[nodespace_to_uid][group_to]), new_w.shape[0])) - w_matrix = self.w.get_value(borrow=True) grp_from = self.nodegroups[nodespace_from_uid][group_from] grp_to = self.nodegroups[nodespace_to_uid][group_to] + if np.isscalar(new_w) and new_w == 1: + if len(grp_from) != len(grp_to): + raise ValueError("from_elements and to_elements need to have equal lengths for identity links") + new_w = np.eye(len(grp_from)) + w_matrix = self.w.get_value(borrow=True) cols, rows = np.meshgrid(grp_from, grp_to) w_matrix[rows, cols] = new_w self.w.set_value(w_matrix, borrow=True) @@ -1793,50 +1840,66 @@ def set_link_weights(self, nodespace_from_uid, group_from, nodespace_to_uid, gro self.por_ret_dirty = self.has_pipes def set_inlink_weights(self, partition_from_spid, new_from_elements, new_to_elements, new_weights): + + inlink_type = None + from_partition = self.nodenet.partitions[partition_from_spid] if partition_from_spid in self.inlinks: + inlink_type = self.inlinks[partition_from_spid][4] + if inlink_type != "dense": + raise NotImplementedError("Update of non-dense partition connections not yet implemented: "+inlink_type) + theano_from_elements = self.inlinks[partition_from_spid][0] theano_to_elements = self.inlinks[partition_from_spid][1] theano_weights = self.inlinks[partition_from_spid][2] + old_from_elements = theano_from_elements.get_value(borrow=True) old_to_elements = theano_to_elements.get_value(borrow=True) old_weights = theano_weights.get_value(borrow=True) propagation_function = self.inlinks[partition_from_spid][3] - else: - old_from_elements = np.zeros(0, dtype=np.int32) - old_to_elements = np.zeros(0, dtype=np.int32) - old_weights = np.eye(0, dtype=T.config.floatX) + from_elements = np.union1d(old_from_elements, new_from_elements) + to_elements = np.union1d(old_to_elements, new_to_elements) + weights = np.zeros((len(to_elements), len(from_elements)), dtype=T.config.floatX) + + old_from_indices = np.searchsorted(from_elements, old_from_elements) + old_to_indices = np.searchsorted(to_elements, old_to_elements) + oldcols, oldrows = np.meshgrid(old_from_indices, old_to_indices) + weights[oldrows, oldcols] = old_weights + + new_from_indices = np.searchsorted(from_elements, new_from_elements) + new_to_indices = np.searchsorted(to_elements, new_to_elements) + newcols, newrows = np.meshgrid(new_from_indices, new_to_indices) + weights[newrows, newcols] = new_weights + + theano_from_elements.set_value(from_elements, borrow=True) + theano_to_elements.set_value(to_elements, borrow=True) + theano_weights.set_value(weights, borrow=True) + + else: weightsname = "w_%s_%s" % (partition_from_spid, self.spid) fromname = "in_from_%s_%s" % (partition_from_spid, self.spid) toname = "in_to_%s_%s" % (partition_from_spid, self.spid) - theano_from_elements = theano.shared(value=old_from_elements, name=fromname, borrow=True) - theano_to_elements = theano.shared(value=old_to_elements, name=toname, borrow=True) - theano_weights = theano.shared(value=old_weights.astype(T.config.floatX), name=weightsname, borrow=True) - - propagation_function = self.get_compiled_propagate_inlinks( - from_partition, - theano_from_elements, - theano_to_elements, - theano_weights) - - from_elements = np.union1d(old_from_elements, new_from_elements) - to_elements = np.union1d(old_to_elements, new_to_elements) - weights = np.zeros((len(to_elements), len(from_elements)), dtype=T.config.floatX) - - old_from_indices = np.searchsorted(from_elements, old_from_elements) - old_to_indices = np.searchsorted(to_elements, old_to_elements) - oldcols, oldrows = np.meshgrid(old_from_indices, old_to_indices) - weights[oldrows, oldcols] = old_weights - - new_from_indices = np.searchsorted(from_elements, new_from_elements) - new_to_indices = np.searchsorted(to_elements, new_to_elements) - newcols, newrows = np.meshgrid(new_from_indices, new_to_indices) - weights[newrows, newcols] = new_weights - - theano_from_elements.set_value(from_elements, borrow=True) - theano_to_elements.set_value(to_elements, borrow=True) - theano_weights.set_value(weights, borrow=True) + theano_from_elements = theano.shared(value=new_from_elements, name=fromname, borrow=True) + theano_to_elements = theano.shared(value=new_to_elements, name=toname, borrow=True) + + if np.isscalar(new_weights) and new_weights == 1: + if len(new_from_elements) != len(new_to_elements): + raise ValueError("from_elements and to_elements need to have equal lengths for identity links") + inlink_type = "identity" + theano_weights = None + propagation_function = self.get_compiled_propagate_identity_inlinks( + from_partition, + theano_from_elements, + theano_to_elements) + else: + inlink_type = "dense" + theano_weights = theano.shared(value=new_weights.astype(T.config.floatX), name=weightsname, borrow=True) + propagation_function = self.get_compiled_propagate_inlinks( + from_partition, + theano_from_elements, + theano_to_elements, + theano_weights) for id in from_partition.allocated_elements_to_nodes[theano_from_elements.get_value()]: from_partition.nodes_last_changed[id] = self.nodenet.current_step @@ -1849,7 +1912,8 @@ def set_inlink_weights(self, partition_from_spid, new_from_elements, new_to_elem theano_from_elements, theano_to_elements, theano_weights, - propagation_function) + propagation_function, + inlink_type) def has_nodespace_changes(self, nodespace_uid, since_step): ns_id = nodespace_from_id(nodespace_uid) @@ -1863,73 +1927,73 @@ def get_nodespace_changes(self, nodespace_uid, since_step): nodespace_ids = nodespace_ids[np.where(self.allocated_nodespaces[nodespace_ids] == ns_id)[0]] return node_ids, nodespace_ids - def get_node_data(self, ids=None, nodespace_ids=None, complete=False, include_links=True, include_followupnodes=True): - + def get_node_data(self, ids=None, nodespaces_by_partition=None, complete=False, include_links=True, linked_nodespaces_by_partition={}): a = self.a.get_value(borrow=True) - g_threshold_array = self.g_threshold.get_value(borrow=True) - g_amplification_array = self.g_amplification.get_value(borrow=True) - g_min_array = self.g_min.get_value(borrow=True) - g_max_array = self.g_max.get_value(borrow=True) - g_theta = self.g_theta.get_value(borrow=True) + g_threshold = self.g_threshold.get_value(borrow=True) + g_amplification = self.g_amplification.get_value(borrow=True) + g_min = self.g_min.get_value(borrow=True) + g_max = self.g_max.get_value(borrow=True) + g_bias = self.g_bias.get_value(borrow=True) g_function_selector = self.g_function_selector.get_value(borrow=True) w = self.w.get_value(borrow=True) - if nodespace_ids is not None: - node_ids = np.where(self.allocated_node_parents == nodespace_ids)[0] + if nodespaces_by_partition is not None: + fetchall = False + node_ids = np.where(self.allocated_node_parents == nodespaces_by_partition[self.spid])[0] else: + fetchall = True node_ids = np.nonzero(self.allocated_nodes)[0] if ids is not None: + fetchall = False node_ids = np.intersect1d(node_ids, ids) + if len(node_ids) and linked_nodespaces_by_partition == {}: + linked_nodespaces_by_partition[self.spid] = self.allocated_node_parents[node_ids] nodes = {} - followupuids = set() + node_numpy_data = {} + highdim_nodes = [] + additional_links = [] + for id in node_ids: uid = node_to_id(id, self.pid) strtype = get_string_node_type(self.allocated_nodes[id], self.nodenet.native_modules) nodetype = self.nodenet.get_nodetype(strtype) - gate_functions = {} - gate_parameters = {} + gate_configurations = {} gate_activations = {} - links = {} - for gate in self.nodenet.get_nodetype(strtype).gatetypes: - numericalgate = get_numerical_gate_type(gate, self.nodenet.get_nodetype(strtype)) - element = self.allocated_node_offsets[id] + numericalgate - gate_functions[gate] = get_string_gatefunction_type(g_function_selector[element]) - - parameters = {} - threshold = g_threshold_array[element].item() - if 'threshold' not in nodetype.gate_defaults[gate] or threshold != nodetype.gate_defaults[gate]['threshold']: - parameters['threshold'] = float(threshold) - amplification = g_amplification_array[element].item() - if 'amplification' not in nodetype.gate_defaults[gate] or amplification != nodetype.gate_defaults[gate]['amplification']: - parameters['amplification'] = float(amplification) - - minimum = g_min_array[element].item() - if 'minimum' not in nodetype.gate_defaults[gate] or minimum != nodetype.gate_defaults[gate]['minimum']: - parameters['minimum'] = float(minimum) - - maximum = g_max_array[element].item() - if 'maximum' not in nodetype.gate_defaults[gate] or maximum != nodetype.gate_defaults[gate]['maximum']: - parameters['maximum'] = float(maximum) - - theta = g_theta[element].item() - if 'theta' not in nodetype.gate_defaults[gate] or theta != nodetype.gate_defaults[gate]['theta']: - parameters['theta'] = float(theta) - - if not len(parameters) == 0: - gate_parameters[gate] = parameters + if type(nodetype) == HighdimensionalNodetype: + gates = nodetype.gategroups + highdim_nodes.append(uid) + else: + gates = nodetype.gatetypes - gate_activations[gate] = {"default": { - "name": "default", - "uid": "default", - "activation": float(a[element])}} + for gate in gates: + numericalgate = get_numerical_gate_type(gate, nodetype) + element = self.allocated_node_offsets[id] + numericalgate + num_gatefunc = g_function_selector[element] + if num_gatefunc != GATE_FUNCTION_IDENTITY: + gate_configurations[gate] = { + 'gatefunction': get_string_gatefunction_type(num_gatefunc), + 'gatefunction_parameters': {} + } + if num_gatefunc == GATE_FUNCTION_SIGMOID or num_gatefunc == GATE_FUNCTION_ELU or num_gatefunc == GATE_FUNCTION_RELU: + gate_configurations[gate]['gatefunction_parameters'] = {'bias': float(g_bias[element])} + elif num_gatefunc == GATE_FUNCTION_THRESHOLD: + gate_configurations[gate]['gatefunction_parameters'] = { + 'minimum': float(g_min[element]), + 'maximum': float(g_max[element]), + 'threshold': float(g_threshold[element]), + 'amplification': float(g_amplification[element]) + } + + gate_activations[gate] = float(a[element]) state = None if uid in self.native_module_instances: - state = self.native_module_instances.get(uid).clone_state() + state, numpy_state = self.native_module_instances[uid].get_persistable_state() + node_numpy_data[uid] = numpy_state parameters = {} if strtype == "Sensor": @@ -1939,7 +2003,7 @@ def get_node_data(self, ids=None, nodespace_ids=None, complete=False, include_li parameters['datasource'] = None else: parameters['datasource'] = self.nodenet.get_datasources()[datasource_index[0]] - elif strtype == "Actor": + elif strtype == "Actuator": actuator_element = self.allocated_node_offsets[id] + GEN datatarget_index = np.where(self.actuator_indices == actuator_element)[0] if len(datatarget_index) == 0: @@ -1970,9 +2034,9 @@ def get_node_data(self, ids=None, nodespace_ids=None, complete=False, include_li g_wait_array = self.g_wait.get_value(borrow=True) parameters['wait'] = g_wait_array[self.allocated_node_offsets[id] + get_numerical_gate_type("sur")].item() elif strtype == "Comment": - parameters = self.comment_instances.get(uid).clone_parameters() + parameters = self.comment_instances[uid].clone_parameters() elif strtype in self.nodenet.native_modules: - parameters = self.native_module_instances.get(uid).clone_parameters() + parameters = self.native_module_instances[uid].clone_parameters() data = {"uid": uid, "name": self.nodenet.names.get(uid, uid), @@ -1981,17 +2045,18 @@ def get_node_data(self, ids=None, nodespace_ids=None, complete=False, include_li "type": strtype, "parameters": parameters, "state": state, - "gate_parameters": gate_parameters, - "sheaves": {"default": {"name": "default", - "uid": "default", - "activation": float(a[self.allocated_node_offsets[id] + GEN])}}, "activation": float(a[self.allocated_node_offsets[id] + GEN]), "gate_activations": gate_activations, - "gate_functions": gate_functions} + "gate_configuration": gate_configurations, + "is_highdimensional": type(nodetype) == HighdimensionalNodetype} + if type(nodetype) == FlowNodetype: + data.update(self.nodenet.flow_module_instances[uid].get_flow_data()) if complete: - data['index'] = id + data['index'] = int(id) if include_links: data['links'] = {} + data['outlinks'] = 0 + data['inlinks'] = 0 nodes[uid] = data @@ -2001,38 +2066,85 @@ def get_node_data(self, ids=None, nodespace_ids=None, complete=False, include_li for index, gate_index in enumerate(gates): source_id = self.allocated_elements_to_nodes[gate_index] source_uid = node_to_id(source_id, self.pid) - if source_uid not in nodes: - continue + slot_index = slots[index] + target_id = self.allocated_elements_to_nodes[slot_index] + target_uid = node_to_id(target_id, self.pid) + + if not fetchall: + if source_uid not in nodes and target_uid in nodes: + if self.allocated_node_parents[source_id] not in linked_nodespaces_by_partition.get(self.spid, []): + nodes[target_uid]['inlinks'] += 1 + continue + elif target_uid not in nodes and source_uid in nodes: + if self.allocated_node_parents[target_id] not in linked_nodespaces_by_partition.get(self.spid, []): + nodes[source_uid]['outlinks'] += 1 + continue + elif source_uid not in nodes or target_uid not in nodes: + # links between two nodes outside this nodespace. + continue source_type = self.allocated_nodes[source_id] source_nodetype = self.nodenet.get_nodetype(get_string_node_type(source_type, self.nodenet.native_modules)) source_gate_numerical = gate_index - self.allocated_node_offsets[source_id] source_gate_type = get_string_gate_type(source_gate_numerical, source_nodetype) + if source_uid in highdim_nodes: + if source_gate_type.rstrip('0123456789') in source_nodetype.dimensionality['gates']: + source_gate_type = source_gate_type.rstrip('0123456789') + '0' - slot_index = slots[index] - target_id = self.allocated_elements_to_nodes[slot_index] - target_uid = node_to_id(target_id, self.pid) target_type = self.allocated_nodes[target_id] target_nodetype = self.nodenet.get_nodetype(get_string_node_type(target_type, self.nodenet.native_modules)) target_slot_numerical = slot_index - self.allocated_node_offsets[target_id] target_slot_type = get_string_slot_type(target_slot_numerical, target_nodetype) + if target_uid in highdim_nodes: + if target_slot_type.rstrip('0123456789') in target_nodetype.dimensionality['slots']: + target_slot_type = target_slot_type.rstrip('0123456789') + '0' linkdict = {"weight": float(w[slot_index, gate_index]), - "certainty": 1, "target_slot_name": target_slot_type, "target_node_uid": target_uid} - if source_gate_type not in nodes[source_uid]["links"]: - nodes[source_uid]["links"][source_gate_type] = [] - nodes[source_uid]["links"][source_gate_type].append(linkdict) - followupuids.add(target_uid) + + if source_uid in nodes: + if source_gate_type not in nodes[source_uid]["links"]: + nodes[source_uid]["links"][source_gate_type] = [] + if source_uid in highdim_nodes: + if linkdict not in nodes[source_uid]['links'][source_gate_type]: + nodes[source_uid]["links"][source_gate_type].append(linkdict) # Doik: why is this check needed? possibly expensive. /Doik + else: + nodes[source_uid]["links"][source_gate_type].append(linkdict) + elif target_uid in nodes: + linkdict['source_node_uid'] = source_uid + linkdict['source_gate_name'] = source_gate_type + additional_links.append(linkdict) # outgoing cross-partition links for partition_to_spid, to_partition in self.nodenet.partitions.items(): if self.spid in to_partition.inlinks: inlinks = to_partition.inlinks[self.spid] from_elements = inlinks[0].get_value(borrow=True) + + if not fetchall and partition_to_spid not in nodespaces_by_partition and linked_nodespaces_by_partition.get(partition_to_spid, []) == []: + nids = self.allocated_elements_to_nodes[from_elements] + if inlinks[4] == 'identity': + for nid in nids: + uid = node_to_id(nid, self.pid) + if uid in nodes: + nodes[uid]['outlinks'] += 1 + elif inlinks[4] == 'dense': + w = inlinks[2].get_value(borrow=True).transpose() + for idx, el in enumerate(from_elements): + uid = node_to_id(self.allocated_elements_to_nodes[el], self.pid) + if uid in nodes: + nodes[uid]['outlinks'] += np.count_nonzero(w[idx]) + continue + to_elements = inlinks[1].get_value(borrow=True) - w = inlinks[2].get_value(borrow=True) - slots, gates = np.nonzero(w) + inlink_type = inlinks[4] + if inlink_type == "dense": + w = inlinks[2].get_value(borrow=True) + slots, gates = np.nonzero(w) + elif inlink_type == "identity": + slots = np.arange(len(from_elements)) + gates = np.arange(len(from_elements)) + for index, gate_index in enumerate(gates): source_id = self.allocated_elements_to_nodes[from_elements[gate_index]] source_uid = node_to_id(source_id, self.pid) @@ -2043,6 +2155,9 @@ def get_node_data(self, ids=None, nodespace_ids=None, complete=False, include_li source_nodetype = self.nodenet.get_nodetype(get_string_node_type(source_type, self.nodenet.native_modules)) source_gate_numerical = from_elements[gate_index] - self.allocated_node_offsets[source_id] source_gate_type = get_string_gate_type(source_gate_numerical, source_nodetype) + if source_uid in highdim_nodes: + if source_gate_type.rstrip('0123456789') in source_nodetype.dimensionality['gates']: + source_gate_type = source_gate_type.rstrip('0123456789') + '0' slot_index = slots[index] target_id = to_partition.allocated_elements_to_nodes[to_elements[slot_index]] @@ -2051,36 +2166,95 @@ def get_node_data(self, ids=None, nodespace_ids=None, complete=False, include_li target_nodetype = to_partition.nodenet.get_nodetype(get_string_node_type(target_type, to_partition.nodenet.native_modules)) target_slot_numerical = to_elements[slot_index] - to_partition.allocated_node_offsets[target_id] target_slot_type = get_string_slot_type(target_slot_numerical, target_nodetype) - linkdict = {"weight": float(w[slot_index, gate_index]), - "certainty": 1, + if target_uid in highdim_nodes: + if target_slot_type.rstrip('0123456789') in target_nodetype.dimensionality['slots']: + target_slot_type = target_slot_type.rstrip('0123456789') + '0' + + if inlink_type == "dense": + weight = float(w[slot_index, gate_index]) + elif inlink_type == "identity": + weight = 1. + + linkdict = {"weight": weight, "target_slot_name": target_slot_type, "target_node_uid": target_uid} if source_gate_type not in nodes[source_uid]["links"]: nodes[source_uid]["links"][source_gate_type] = [] + if type(target_nodetype) == HighdimensionalNodetype: + target_slot_type = target_slot_type.rstrip('0123456789') + if target_slot_type.rstrip('0123456789') in target_nodetype.dimensionality['slots']: + target_slot_type = target_slot_type + '0' + nodes[source_uid]["links"][source_gate_type].append(linkdict) - followupuids.add(target_uid) - - # incoming cross-partition links need to be checked for followup nodes in the other partition - # even though we're not interested in the links themselves as they will be delivered with the nodes - # in the other partition. - # having to deliver followupnodes for links that aren't even our business is really annoying. - for from_partition_id, inlinks in self.inlinks.items(): - from_partition = self.nodenet.partitions[from_partition_id] - from_elements = inlinks[0].get_value(borrow=True) - to_elements = inlinks[1].get_value(borrow=True) - w = inlinks[2].get_value(borrow=True) - slots, gates = np.nonzero(w) - for index, gate_index in enumerate(gates): - source_id = from_partition.allocated_elements_to_nodes[from_elements[gate_index]] - source_uid = node_to_id(source_id, from_partition.pid) - - slot_index = slots[index] - target_id = self.allocated_elements_to_nodes[to_elements[slot_index]] - target_uid = node_to_id(target_id, self.pid) - if target_uid in nodes: - followupuids.add(source_uid) - - return nodes, followupuids + + # incoming cross-partition-links: + if not fetchall: + # incoming cross-partition links + for from_partition_id, inlinks in self.inlinks.items(): + if from_partition_id not in nodespaces_by_partition and linked_nodespaces_by_partition.get(from_partition_id, []) == []: + to_elements = inlinks[1].get_value(borrow=True) + nids = self.allocated_elements_to_nodes[to_elements] + if inlinks[4] == 'identity': + for nid in nids: + uid = node_to_id(nid, self.pid) + if uid in nodes: + nodes[uid]['inlinks'] += 1 + elif inlinks[4] == 'dense': + w = inlinks[2].get_value(borrow=True) + for idx, el in enumerate(to_elements): + uid = node_to_id(self.allocated_elements_to_nodes[el], self.pid) + if uid in nodes: + nodes[uid]['inlinks'] += np.count_nonzero(w[idx]) + else: + from_partition = self.nodenet.partitions[from_partition_id] + from_elements = inlinks[0].get_value(borrow=True) + to_elements = inlinks[1].get_value(borrow=True) + + inlink_type = inlinks[4] + if inlink_type == "dense": + w = inlinks[2].get_value(borrow=True) + slots, gates = np.nonzero(w) + elif inlink_type == "identity": + slots = np.arange(len(from_elements)) + gates = np.arange(len(from_elements)) + + for index, gate_index in enumerate(gates): + source_id = from_partition.allocated_elements_to_nodes[from_elements[gate_index]] + source_uid = node_to_id(source_id, from_partition.pid) + + source_type = from_partition.allocated_nodes[source_id] + source_nodetype = from_partition.nodenet.get_nodetype(get_string_node_type(source_type, from_partition.nodenet.native_modules)) + source_gate_numerical = from_elements[gate_index] - from_partition.allocated_node_offsets[source_id] + source_gate_type = get_string_gate_type(source_gate_numerical, source_nodetype) + + slot_index = slots[index] + target_id = self.allocated_elements_to_nodes[to_elements[slot_index]] + target_uid = node_to_id(target_id, self.pid) + + target_type = self.allocated_nodes[target_id] + target_nodetype = self.nodenet.get_nodetype(get_string_node_type(target_type, self.nodenet.native_modules)) + target_slot_numerical = to_elements[slot_index] - self.allocated_node_offsets[target_id] + target_slot_type = get_string_slot_type(target_slot_numerical, target_nodetype) + + if inlink_type == 'dense': + weight = float(w[slot_index][gate_index]) + elif inlink_type == 'identity': + weight = 1 + + if type(target_nodetype) == HighdimensionalNodetype: + if target_slot_type.rstrip('0123456789') in target_nodetype.dimensionality['slots']: + target_slot_type = target_slot_type.rstrip('0123456789') + '0' + if type(source_nodetype) == HighdimensionalNodetype: + if source_gate_type.rstrip('0123456789') in source_nodetype.dimensionality['gates']: + source_gate_type = source_gate_type.rstrip('0123456789') + '0' + + additional_links.append({"weight": weight, + "target_slot_name": target_slot_type, + "target_node_uid": target_uid, + "source_node_uid": source_uid, + "source_gate_name": source_gate_type}) + + return nodes, additional_links, node_numpy_data def integrity_check(self): diff --git a/micropsi_core/nodenet/theano_engine/theano_stepoperators.py b/micropsi_core/nodenet/theano_engine/theano_stepoperators.py index 56adbb5e..a6362e83 100644 --- a/micropsi_core/nodenet/theano_engine/theano_stepoperators.py +++ b/micropsi_core/nodenet/theano_engine/theano_stepoperators.py @@ -65,3 +65,91 @@ def execute(self, nodenet, nodes, netapi): partition.calculate() if nodenet.use_modulators: self.count_success_and_failure(nodenet) + + +class TheanoCalculateFlowmodules(Propagate): + + @property + def priority(self): + return 0 + + def __init__(self, nodenet): + self.nodenet = nodenet + + def value_guard(self, value, source, name): + if value is None: + return None + if self.nodenet.runner_config.get('runner_infguard'): + if type(value) == list: + for val in value: + self._guard(val, source, name) + else: + self._guard(value, source, name) + return value + + def _guard(self, value, source, name): + if np.isnan(np.sum(value)): + raise ValueError("NAN value in flow datected: %s" % self.format_error(source, name)) + elif np.isinf(np.sum(value)): + raise ValueError("INF value in flow datected: %s" % self.format_error(source, name)) + + def format_error(self, source, name): + if type(source) == dict: + if len(source['members']) == 1: + msg = "output %s of %s" % (name, source['members'][0]) + else: + msg = "output %s of graph %s" % (name, str(source['members'])) + else: + msg = "output %s of %s" % (name, str(source)) + return msg + + def execute(self, nodenet, nodes, netapi): + if not nodenet.flow_module_instances: + return + for uid, item in nodenet.flow_module_instances.items(): + item.is_part_of_active_graph = False + item.take_slot_activation_snapshot() + flowio = {} + if nodenet.worldadapter_instance: + if 'datasources' in nodenet.worldadapter_flow_nodes: + sourcenode = nodenet.get_node(nodenet.worldadapter_flow_nodes['datasources']) + flowio[sourcenode.uid] = {} + for key in sourcenode.outputs: + flowio[sourcenode.uid][key] = self.value_guard(nodenet.worldadapter_instance.get_flow_datasource(key), nodenet.worldadapter, key) + + for target_uid, target_name in sourcenode.outputmap[key]: + if target_uid == nodenet.worldadapter_flow_nodes.get('datatargets', False): + nodenet.worldadapter_instance.add_to_flow_datatarget(target_name, flowio[sourcenode.uid][key]) + + for func in nodenet.flowfunctions: + if any([node.is_requested() for node in func['endnodes']]): + skip = False + inputs = {} + for node_uid, in_name in func['inputs']: + if not nodenet.get_node(node_uid).inputmap[in_name]: + raise RuntimeError("Missing Flow-input %s of node %s" % (in_name, str(nodenet.get_node(node_uid)))) + source_uid, source_name = nodenet.get_node(node_uid).inputmap[in_name] + if flowio[source_uid][source_name] is None: + # netapi.logger.debug("Skipping graph bc. empty inputs") + skip = True + break + else: + inputs["%s_%s" % (node_uid, in_name)] = flowio[source_uid][source_name] + if skip: + for node_uid, out_name in func['outputs']: + if node_uid not in flowio: + flowio[node_uid] = {} + flowio[node_uid][out_name] = None + continue + out = func['callable'](**inputs) + for n in func['members']: + n.is_part_of_active_graph = True + for index, (node_uid, out_name) in enumerate(func['outputs']): + if node_uid not in flowio: + flowio[node_uid] = {} + if 'datatargets' in nodenet.worldadapter_flow_nodes: + targetnode = nodenet.get_node(nodenet.worldadapter_flow_nodes['datatargets']) + for uid, name in nodenet.get_node(node_uid).outputmap[out_name]: + if uid == targetnode.uid and node_uid != nodenet.worldadapter_flow_nodes.get('datasources', False): + nodenet.worldadapter_instance.add_to_flow_datatarget(name, out[index]) + flowio[node_uid][out_name] = self.value_guard(out[index], func, out_name) if out is not None else None diff --git a/micropsi_core/nodenet/vizapi.py b/micropsi_core/nodenet/vizapi.py deleted file mode 100644 index c36b72d5..00000000 --- a/micropsi_core/nodenet/vizapi.py +++ /dev/null @@ -1,161 +0,0 @@ -import os -import numpy as np - -import matplotlib -import platform - -# we need special backends to work around the default behaviour -# expecting the main-thread to do gui-stuff, since we're -# (a) a multithreaded webserver, and -# (b) plot from the runner-thread as well as the frontend -# find os: http://stackoverflow.com/q/1854 -# find supported backends: http://stackoverflow.com/a/13731150 -# if tested, include here: -# if platform.system() == "Darwin": -# matplotlib.use('macosx') -# else: -matplotlib.use('agg') - -import matplotlib.pyplot as plt -import matplotlib.gridspec as gridspec - -from io import BytesIO -import base64 - - -class NodenetPlot(object): - """ A NodenetPlot object represents an image, that can hold various plots - in a grid-layout. You can specify the size of the image, and the layout in - rows and cols. - Then, you can add plots to the image, which will be filled into the gridlayout line by line. - If the image is complete, you can either retrieve a base64-encoded string-representation of the - image, that can be delivered to the client, or save the generated image to a file - e.g.: - >>> image = NodenetPlot(cols=2) - >>> image.add_activation_plot(netapi.get_activations(ns1, group1)) - >>> image.add_linkweights_plot(netapi.get_link_weights(ns1, group1, ns2, group2)) - >>> image.save_to_file('/tmp/plot.png') - """ - - def __init__(self, plotsize=(6.0, 6.0), rows=1, cols=1, wspace=0.1, hspace=0.1): - """ Creates a new empty figure. - The figure can contain a variable number of plots, that are specified via - the rows and cols parameters. - Parameters: - plotsize - A tuple indicating the (x, y) size of the Image, defaults to (6, 6) - rows - the number of rows of plots, defaults to 1 - cols - the number of cols of plots, defaults to 1 - wspace - vertical spacing between plots, defaults to 0.1 - hspace - horizontal spacing between plots, defaults to 0.1 - """ - plt.close() # attempt to close old instance - self.figure = plt.figure(figsize=plotsize) - self.plotindex = 0 - self.rows = rows - self.cols = cols - self.grid = gridspec.GridSpec(rows, cols, wspace=wspace, hspace=hspace) - - def add_activation_plot(self, activations, rows=-1, cols=-1, vmin=None, vmax=None): - """ Adds a plot of node-activations to the figure. - Per default, the plot will attempt to render the activations into a square image - If you have non-quadratic data, you have to give numbers for rows and cols so that the - numbers can be reshaped accordingly - Parameters: - activations - array of activations - rows - number of rows, defaults to sqrt() - cols - number of cols, defaults to sqrt() - vmin - minimal value, defaults to 0 - vmax - maximal value, defaults to 1 - """ - data = np.array(activations) - if rows > 0 or cols > 0: - matrix = data.reshape((rows, cols)) - else: - sz = int(np.ceil(np.sqrt(data.shape[0]))) - matrix = data.reshape((sz, sz)) - - self.add_2d_matrix_plot(matrix, vmin=vmin, vmax=vmax) - - def add_linkweights_plot(self, linkweights, wspace=0.1, hspace=0.1, rows_outer=0, cols_outer=0, rows_inner=0, cols_inner=0): - """ Adds a plot of linkweights to the figure. - Parameters: - linkweights - output of netapi.get_link_weights - wspace - vertical spacing, defaults to 0.1 - hspace - horizontal spacing, defaults to 0.1 - rows_outer - number of rows of linkweight-plots, defaults to sqrt() - cols_outer - number of cols of linkweight-plots, defaults to sqrt() - rows_inner - number of pixel-rows per linkweight-plot, defaults to sqrt() - cols_inner - number of pixel-cols per linkweight-plot, defaults to sqrt() - """ - data = np.array(linkweights) - (r, c) = data.shape - outer_sqrt = int(np.ceil(np.sqrt(r))) - inner_sqrt = int(np.ceil(np.sqrt(c))) - matrix = data.reshape(( - rows_outer or outer_sqrt, - cols_outer or outer_sqrt, - rows_inner or inner_sqrt, - cols_inner or inner_sqrt - )) - self.add_4d_matrix_plot(matrix, wspace=wspace, hspace=hspace) - - def add_2d_matrix_plot(self, matrix, vmin=None, vmax=None): - """ General plotter function to add a two-dimensional plot. The shape - of the passed matrix determins the layout in rows and cols of the - plot - Parameters: - data - 2-dimensional numpy matrix - vmin - minimal value - vmax - maximal value - """ - ax = plt.Subplot(self.figure, self.grid[self.plotindex]) - ax.set_xticks([]) - ax.set_yticks([]) - ax.imshow(matrix, cmap=matplotlib.cm.gray, vmin=vmin, vmax=vmax) - self.figure.add_subplot(ax) - self.plotindex += 1 - - def add_4d_matrix_plot(self, data, wspace=0, hspace=0, vmin=None, vmax=None): - """ General plotter function to add a grid of several two-dimensional plots - The shape of the passed matrix determins the layout in rows and cols of the - plot - Parameters: - data - 4-dimensional numpy matrix - wspace - vertical spacing - hspace - horizontal spacing - vmin - minimal value - vmax - maximal value - """ - # compute rows & cols - (row, col, inner_row, inner_col) = data.shape - grid = gridspec.GridSpecFromSubplotSpec(row, col, subplot_spec=self.grid[self.plotindex], wspace=wspace, hspace=hspace) - for r in range(row): - row_data = data[r, :] - for c in range(col): - ax = plt.Subplot(self.figure, grid[(r * col + c)]) - ax.set_xticks([]) - ax.set_yticks([]) - ax.imshow(row_data[c, :], cmap=matplotlib.cm.gray, vmin=vmin, vmax=vmax) - self.figure.add_subplot(ax) - self.plotindex += 1 - - def save_to_file(self, filename, format="png", **params): - """ saves the generated figure to the given file - Parameters: - filename - the target filename. expects absolute paths, or saves to toolkit-root - format - the file-format. defaults to png - takes additional keyword-arguments for savefig, see http://matplotlib.org/api/pyplot_api.html#matplotlib.pyplot.savefig - """ - filepath = os.path.abspath(filename) - self.figure.savefig(filepath, format=format, **params) - return filepath - - def to_base64(self, format="png", **params): - """ returns the base64 encoded bytestring of the generated figure - Parameters: - format - the file-format. defaults to png - takes additional keyword-arguments for savefig, see http://matplotlib.org/api/pyplot_api.html#matplotlib.pyplot.savefig - """ - bio = BytesIO() - self.figure.savefig(bio, format=format, **params) - return base64.encodebytes(bio.getvalue()).decode("utf-8") diff --git a/micropsi_core/runtime.py b/micropsi_core/runtime.py index 664e0212..ed4a42e2 100755 --- a/micropsi_core/runtime.py +++ b/micropsi_core/runtime.py @@ -6,68 +6,76 @@ maintains a set of users, worlds (up to one per user), and nodenets, and provides an interface to external clients """ -from micropsi_core._runtime_api_world import * -from micropsi_core._runtime_api_monitors import * -import re - __author__ = 'joscha' __date__ = '10.05.12' -from configuration import config as cfg - -from micropsi_core.nodenet import node_alignment -from micropsi_core import config -from micropsi_core.tools import Bunch - +import re import os import sys -from micropsi_core import tools +import time import json +import signal +import logging +import zipfile import threading + +from code import InteractiveConsole from datetime import datetime, timedelta -import time -import signal -import logging +from micropsi_core.config import ConfigurationManager -from .micropsi_logger import MicropsiLogger +from micropsi_core._runtime_api_world import * +from micropsi_core._runtime_api_monitors import * +from micropsi_core.nodenet import node_alignment +from micropsi_core.micropsi_logger import MicropsiLogger +from micropsi_core.tools import Bunch, post_mortem, generate_uid NODENET_DIRECTORY = "nodenets" WORLD_DIRECTORY = "worlds" -signal_handler_registry = [] -runner = {'timestep': 1000, 'runner': None, 'factor': 1} +runner = {'timestep': 1000, 'runner': None, 'infguard': True} nodenet_lock = threading.Lock() # global variables set by intialize() RESOURCE_PATH = None PERSISTENCY_PATH = None +WORLD_PATH = None +AUTOSAVE_PATH = None -configs = None +runtime_config = None +runner_config = None logger = None worlds = {} +world_data = {} nodenets = {} +nodenet_data = {} + native_modules = {} custom_recipes = {} custom_operations = {} +world_classes = {} +worldadapter_classes = {} +worldobject_classes = {} netapi_consoles = {} -from code import InteractiveConsole +initialized = False + +auto_save_intervals = None class FileCacher(): - "Cache the stdout text so we can analyze it before returning it" + """Cache the stdout text so we can analyze it before returning it""" def __init__(self): self.reset() def reset(self): self.out = [] - def write(self,line): + def write(self, line): self.out.append(line) def flush(self): @@ -77,7 +85,7 @@ def flush(self): class NetapiShell(InteractiveConsole): - "Wrapper around Python that can filter input/output to the shell" + """Wrapper around Python that can filter input/output to the shell""" def __init__(self, netapi): self.stdout = sys.stdout self.stderr = sys.stderr @@ -94,41 +102,45 @@ def return_output(self): sys.stdout = self.stdout sys.stderr = self.stderr - def push(self,line): + def push(self, line): self.get_output() - incomplete = InteractiveConsole.push(self,line) + incomplete = InteractiveConsole.push(self, line) if incomplete: - InteractiveConsole.push(self,'\n') + InteractiveConsole.push(self, '\n') self.return_output() err = self.errcache.flush() if err and err.startswith('Traceback'): parts = err.strip().split('\n') - return False, "%s: %s" % (parts[-3], parts[-1]) + if len(parts) > 10: + if ":" in parts[10]: + return False, parts[10] + else: + return False, "%s: %s" % (parts[10], parts[12]) + else: + return False, err out = self.outcache.flush() return True, out.strip() -def add_signal_handler(handler): - signal_handler_registry.append(handler) - - def signal_handler(signal, frame): logging.getLogger('system').info("Shutting down") - for handler in signal_handler_registry: - handler(signal, frame) + kill_runners() + for uid in worlds: + worlds[uid].signal_handler(signal, frame) sys.exit(0) class MicropsiRunner(threading.Thread): - sum_of_durations = 0 + sum_of_calc_durations = 0 + sum_of_step_durations = 0 number_of_samples = 0 total_steps = 0 granularity = 10 def __init__(self): threading.Thread.__init__(self) - if cfg['micropsi2'].get('profile_runner'): + if runtime_config['micropsi2'].get('profile_runner'): import cProfile self.profiler = cProfile.Profile() else: @@ -144,72 +156,121 @@ def run(self): if self.paused: self.state.wait() - if configs['runner_timestep'] > 1000: - step = timedelta(seconds=configs['runner_timestep'] / 1000) + if runner_config['runner_timestep'] > 1000: + step = timedelta(seconds=runner_config['runner_timestep'] / 1000) else: - step = timedelta(milliseconds=configs['runner_timestep']) + step = timedelta(milliseconds=runner_config['runner_timestep']) start = datetime.now() log = False - uids = list(nodenets.keys()) + uids = [uid for uid in nodenets if nodenets[uid].is_active] + nodenets_to_save = [] + if self.profiler: + self.profiler.enable() for uid in uids: if uid in nodenets: nodenet = nodenets[uid] if nodenet.is_active: if nodenet.check_stop_runner_condition(): - nodenet.is_active = False + stop_nodenetrunner(uid) + # nodenet.is_active = False continue log = True try: - if self.profiler: - self.profiler.enable() - nodenet.timed_step() - if self.profiler: - self.profiler.disable() - nodenet.update_monitors() + nodenet.timed_step(runner_config.data) + nodenet.update_monitors_and_recorders() except: - if self.profiler: - self.profiler.disable() - nodenet.is_active = False - logging.getLogger("agent.%s" % uid).error("Exception in NodenetRunner:", exc_info=1) + stop_nodenetrunner(uid) + # nodenet.is_active = False + logging.getLogger("agent.%s" % uid).error("Exception in Agent:", exc_info=1) + post_mortem() MicropsiRunner.last_nodenet_exception[uid] = sys.exc_info() - if nodenet.world and nodenet.current_step % runner['factor'] == 0: - try: - worlds[nodenet.world].step() - except: - nodenet.is_active = False - logging.getLogger("world").error("Exception in WorldRunner:", exc_info=1) - MicropsiRunner.last_world_exception[nodenets[uid].world] = sys.exc_info() - - elapsed = datetime.now() - start + + if auto_save_intervals is not None: + for val in auto_save_intervals: + if nodenet.current_step % val == 0: + nodenets_to_save.append((nodenet.uid, val)) + break + + if self.profiler: + self.profiler.disable() + + for uid, interval in nodenets_to_save: + if uid in nodenets: + try: + net = nodenets[uid] + savefile = os.path.join(AUTOSAVE_PATH, "%s_%d.zip" % (uid, interval)) + logging.getLogger("system").info("Auto-saving nodenet %s at step %d (interval %d)" % (uid, net.current_step, interval)) + zipobj = zipfile.ZipFile(savefile, 'w', zipfile.ZIP_STORED) + net.save(zipfile=zipobj) + zipobj.close() + except Exception as err: + logging.getLogger("system").error("Auto-save failure for nodenet %s: %s: %s" % (uid, type(err).__name__, str(err))) + + calc_time = datetime.now() - start + if step.total_seconds() > 0: + left = step - calc_time + if left.total_seconds() > 0: + time.sleep(left.total_seconds()) + elif left.total_seconds() < 0: + logging.getLogger("system").warning("Overlong step %d took %.4f secs, allowed are %.4f secs!" % + (self.total_steps, calc_time.total_seconds(), step.total_seconds())) + + if self.profiler: + self.profiler.enable() + for wuid, world in worlds.items(): + if world.is_active: + uids.append(wuid) + try: + world.step() + except: + for uid in nodenets: + if nodenets[uid].world == wuid and nodenets[uid].is_active: + stop_nodenetrunner(uid) + logging.getLogger("world").error("Exception in Environment:", exc_info=1) + MicropsiRunner.last_world_exception[nodenets[uid].world] = sys.exc_info() + post_mortem() + if self.profiler: + self.profiler.disable() + if log: - ms = elapsed.seconds + ((elapsed.microseconds // 1000) / 1000) - self.sum_of_durations += ms + step_time = datetime.now() - start + calc_ms = calc_time.seconds + ((calc_time.microseconds // 1000) / 1000) + step_ms = step_time.seconds + ((step_time.microseconds // 1000) / 1000) + self.sum_of_calc_durations += calc_ms + self.sum_of_step_durations += step_ms self.number_of_samples += 1 self.total_steps += 1 - average_duration = self.sum_of_durations / self.number_of_samples + if self.total_steps % (self.granularity/10) == 0: + average_step_duration = self.sum_of_step_durations / self.number_of_samples + if average_step_duration > 0: + nodenet.frequency = round((1 / average_step_duration) * 1000) + else: + nodenet.frequency = 0 + if self.total_steps % self.granularity == 0: + average_calc_duration = self.sum_of_calc_durations / self.number_of_samples if self.profiler: import pstats import io s = io.StringIO() sortby = 'cumtime' ps = pstats.Stats(self.profiler, stream=s).sort_stats(sortby) - ps.print_stats('nodenet') + ps.print_stats('micropsi_') logging.getLogger("system").debug(s.getvalue()) - logging.getLogger("system").debug("Step %d: Avg. %.8f sec" % (self.total_steps, average_duration)) - self.sum_of_durations = 0 + logging.getLogger("system").debug("Step %d: Avg. %.8f sec" % (self.total_steps, average_calc_duration)) + self.sum_of_calc_durations = 0 + self.sum_of_step_durations = 0 self.number_of_samples = 0 - if average_duration < 0.0001: + if average_calc_duration < 0.0001: self.granularity = 10000 - elif average_duration < 0.001: + elif average_calc_duration < 0.001: self.granularity = 1000 else: self.granularity = 100 - left = step - elapsed - if left.total_seconds() > 0: - time.sleep(left.total_seconds()) + if len(uids) == 0: + self.pause() def resume(self): with self.state: @@ -226,21 +287,15 @@ def pause(self): def kill_runners(signal=None, frame=None): - for uid in worlds: - if hasattr(worlds[uid], 'kill_minecraft_thread'): - worlds[uid].kill_minecraft_thread() + for uid in nodenets: + if nodenets[uid].is_active: + stop_nodenetrunner(uid) + # nodenets[uid].is_active = False runner['runner'].resume() runner['running'] = False runner['runner'].join() -def _get_world_uid_for_nodenet_uid(nodenet_uid): - """ get the world uid to a given nodenet uid.""" - if nodenet_uid in nodenet_data: - return nodenet_data[nodenet_uid].world - return None - - # MicroPsi API @@ -248,7 +303,7 @@ def _get_world_uid_for_nodenet_uid(nodenet_uid): def set_logging_levels(logging_levels): for key in logging_levels: if key == 'agent': - cfg['logging']['level_agent'] = logging_levels[key] + runtime_config['logging']['level_agent'] = logging_levels[key] else: logger.set_logging_level(key, logging_levels[key]) return True @@ -262,9 +317,9 @@ def get_logger_messages(loggers=[], after=0): return logger.get_logs(loggers, after) -def get_monitoring_info(nodenet_uid, logger=[], after=0, monitor_from=0, monitor_count=-1): +def get_monitoring_info(nodenet_uid, logger=[], after=0, monitor_from=0, monitor_count=-1, with_recorders=False): """ Returns log-messages and monitor-data for the given nodenet.""" - data = get_monitor_data(nodenet_uid, 0, monitor_from, monitor_count) + data = get_monitor_data(nodenet_uid, 0, monitor_from, monitor_count, with_recorders=with_recorders) data['logs'] = get_logger_messages(logger, after) return data @@ -272,11 +327,20 @@ def get_monitoring_info(nodenet_uid, logger=[], after=0, monitor_from=0, monitor def get_logging_levels(nodenet_uid=None): levels = {} for key in logging.Logger.manager.loggerDict: - levels[key] = logging.getLevelName(logging.getLogger(key).getEffectiveLevel()) - levels['agent'] = cfg['logging']['level_agent'] + if key.startswith('agent') or key in ['world', 'system']: + levels[key] = logging.getLevelName(logging.getLogger(key).getEffectiveLevel()) + if 'agent' not in levels: + levels['agent'] = runtime_config['logging']['level_agent'] return levels +def benchmark_info(): + from micropsi_core.benchmark_system import benchmark_system + benchmarks = {} + benchmarks["benchmark"] = benchmark_system() + return benchmarks + + # Nodenet def get_available_nodenets(owner=None): """Returns a dict of uids: Nodenet of available (running and stored) nodenets. @@ -299,10 +363,19 @@ def get_nodenet(nodenet_uid): if nodenet_uid in get_available_nodenets(): load_nodenet(nodenet_uid) else: - raise KeyError("Unknown nodenet") + return None return nodenets[nodenet_uid] +def get_nodenet_uid_by_name(name): + """ Returns the uid of the nodenet with the given name or None if no nodenet was found""" + for uid in nodenet_data: + if nodenet_data[uid]['name'] == name: + return uid + else: + return None + + def load_nodenet(nodenet_uid): """ Load the nodenet with the given uid into memeory Arguments: @@ -317,7 +390,7 @@ def load_nodenet(nodenet_uid): with nodenet_lock: - if cfg['micropsi2'].get('single_agent_mode'): + if runtime_config['micropsi2'].get('single_agent_mode'): # unload all other nodenets if single_agent_mode is selected for uid in list(nodenets.keys()): if uid != nodenet_uid: @@ -328,34 +401,38 @@ def load_nodenet(nodenet_uid): worldadapter_instance = None if hasattr(data, 'world') and data.world: + load_world(data.world) if data.world in worlds: world_uid = data.world worldadapter = data.get('worldadapter') else: - logging.getLogger("system").warn("World %s for nodenet %s not found" % (data.world, data.uid)) + logging.getLogger("system").warning("Environment %s for agent %s not found" % (data.world, data.uid)) if world_uid: - result, worldadapter_instance = worlds[world_uid].register_nodenet(worldadapter, nodenet_uid) + result, worldadapter_instance = worlds[world_uid].register_nodenet(worldadapter, nodenet_uid, nodenet_name=data['name'], config=data.get('worldadapter_config', {})) if not result: - logging.getLogger('system').warn(worldadapter_instance) + logging.getLogger('system').warning(worldadapter_instance) worldadapter_instance = None worldadapter = None world_uid = None engine = data.get('engine') or 'dict_engine' - logger.register_logger("agent.%s" % nodenet_uid, cfg['logging']['level_agent']) + logger.register_logger("agent.%s" % nodenet_uid, runtime_config['logging']['level_agent']) params = { + 'persistency_path': os.path.join(PERSISTENCY_PATH, NODENET_DIRECTORY, data.uid), 'name': data.name, 'worldadapter': worldadapter, 'worldadapter_instance': worldadapter_instance, 'world': world_uid, 'owner': data.owner, 'uid': data.uid, - 'native_modules': filter_native_modules(engine), + 'native_modules': native_modules, 'use_modulators': data.get('use_modulators', True) # getter for compatibility } + if hasattr(data, 'version'): + params['version'] = data.version if engine == 'dict_engine': from micropsi_core.nodenet.dict_engine.dict_nodenet import DictNodenet nodenets[nodenet_uid] = DictNodenet(**params) @@ -364,9 +441,9 @@ def load_nodenet(nodenet_uid): nodenets[nodenet_uid] = TheanoNodenet(**params) # Add additional engine types here else: - return False, "Nodenet %s requires unknown engine %s" % (nodenet_uid, engine) + return False, "Agent %s requires unknown engine %s" % (nodenet_uid, engine) - nodenets[nodenet_uid].load(os.path.join(PERSISTENCY_PATH, NODENET_DIRECTORY, nodenet_uid + ".json")) + nodenets[nodenet_uid].load() netapi_consoles[nodenet_uid] = NetapiShell(nodenets[nodenet_uid].netapi) @@ -379,21 +456,27 @@ def load_nodenet(nodenet_uid): worldadapter = nodenets[nodenet_uid].worldadapter return True, nodenet_uid - return False, "Nodenet %s not found in %s" % (nodenet_uid, PERSISTENCY_PATH) + return False, "Agent %s not found in %s" % (nodenet_uid, PERSISTENCY_PATH) def get_nodenet_metadata(nodenet_uid): """ returns the given nodenet's metadata""" nodenet = get_nodenet(nodenet_uid) + if nodenet is None: + return False, "Unknown nodenet" data = nodenet.metadata data.update({ 'nodetypes': nodenet.get_standard_nodetype_definitions(), 'nodespaces': nodenet.construct_nodespaces_dict(None, transitive=True), - 'native_modules': filter_native_modules(nodenet.engine), - 'monitors': nodenet.construct_monitors_dict(), - 'rootnodespace': nodenet.get_nodespace(None).uid + 'native_modules': nodenet.get_native_module_definitions(), + 'flow_modules': nodenet.get_flow_module_definitions(), + 'monitors': nodenet.construct_monitors_dict(with_values=False), + 'rootnodespace': nodenet.get_nodespace(None).uid, + 'resource_path': RESOURCE_PATH }) - return data + if nodenet.world: + data['current_world_step'] = worlds[nodenet.world].current_step + return True, data def get_nodenet_activation_data(nodenet_uid, nodespaces=[], last_call_step=-1): @@ -406,18 +489,26 @@ def get_nodenet_activation_data(nodenet_uid, nodespaces=[], last_call_step=-1): return data -def get_nodes(nodenet_uid, nodespaces=[], include_links=True): +def get_nodes(nodenet_uid, nodespaces=[], include_links=True, links_to_nodespaces=[]): """Return data for the given nodespaces""" nodenet = get_nodenet(nodenet_uid) - return nodenet.get_nodes(nodespaces, include_links) + return nodenet.get_nodes(nodespaces, include_links, links_to_nodespaces=links_to_nodespaces) -def get_calculation_state(nodenet_uid, nodenet=None, nodenet_diff=None, world=None, monitors=None, dashboard=None): +def get_calculation_state(nodenet_uid, nodenet=None, nodenet_diff=None, world=None, monitors=None, dashboard=None, recorders=None): """ returns the current state of the calculation """ data = {} nodenet_obj = get_nodenet(nodenet_uid) if nodenet_obj is not None: + if nodenet_uid in MicropsiRunner.last_nodenet_exception: + t, err, tb = MicropsiRunner.last_nodenet_exception[nodenet_uid] + del MicropsiRunner.last_nodenet_exception[nodenet_uid] + raise err + if nodenet_obj.world is not None and nodenet_obj.world in MicropsiRunner.last_world_exception: + t, err, tb = MicropsiRunner.last_world_exception[nodenet_obj.world] + del MicropsiRunner.last_world_exception[nodenet_obj.world] + raise err condition = nodenet_obj.get_runner_condition() if condition: data['calculation_condition'] = condition @@ -427,13 +518,14 @@ def get_calculation_state(nodenet_uid, nodenet=None, nodenet_diff=None, world=No data['calculation_condition']['monitor']['color'] = monitor.color else: del data['calculation_condition']['monitor'] - data['calculation_running'] = nodenet_obj.is_active + data['calculation_running'] = nodenet_obj.is_active or (nodenet_obj.world and worlds[nodenet_obj.world].is_active) data['current_nodenet_step'] = nodenet_obj.current_step data['current_world_step'] = worlds[nodenet_obj.world].current_step if nodenet_obj.world else 0 + data['control_frequency'] = nodenet_obj.frequency if nodenet is not None: if not type(nodenet) == dict: nodenet = {} - data['nodenet'] = get_nodes(nodenet_uid, nodespaces=nodenet.get('nodespaces', []), include_links=nodenet.get('include_links', True)) + data['nodenet'] = get_nodes(nodenet_uid, nodespaces=nodenet.get('nodespaces', []), include_links=nodenet.get('include_links', True), links_to_nodespaces=nodenet.get('links_to_nodespaces', [])) if nodenet_diff is not None: activations = get_nodenet_activation_data(nodenet_uid, last_call_step=nodenet_diff['step'], nodespaces=nodenet_diff.get('nodespaces', [])) data['nodenet_diff'] = { @@ -441,10 +533,10 @@ def get_calculation_state(nodenet_uid, nodenet=None, nodenet_diff=None, world=No 'modulators': nodenet_obj.construct_modulators_dict() } if activations['has_changes']: - data['nodenet_diff']['changes'] = nodenet_obj.get_nodespace_changes(nodenet_diff.get('nodespaces', []), nodenet_diff['step']) - if nodenet_obj.user_prompt: - data['user_prompt'] = nodenet_obj.user_prompt - nodenet_obj.user_prompt = None + data['nodenet_diff']['changes'] = nodenet_obj.get_nodespace_changes(nodenet_diff.get('nodespaces', []), nodenet_diff['step'], include_links=nodenet_diff.get('include_links', True)) + prompt = nodenet_obj.consume_user_prompt() + if prompt: + data['user_prompt'] = prompt if world is not None and nodenet_obj.world: if not type(world) == dict: world = {} @@ -455,9 +547,11 @@ def get_calculation_state(nodenet_uid, nodenet=None, nodenet_diff=None, world=No data['monitors'] = get_monitoring_info(nodenet_uid=nodenet_uid, **monitors) if dashboard is not None: data['dashboard'] = get_agent_dashboard(nodenet_uid) + if recorders is not None: + data['recorders'] = nodenet_obj.construct_recorders_dict() return True, data else: - return False, "No such nodenet" + return False, "No such agent" def unload_nodenet(nodenet_uid): @@ -471,7 +565,9 @@ def unload_nodenet(nodenet_uid): return False if nodenet_uid in netapi_consoles: del netapi_consoles[nodenet_uid] + stop_nodenetrunner(nodenet_uid) nodenet = nodenets[nodenet_uid] + nodenet.close_figures() if nodenet.world: worlds[nodenet.world].unregister_nodenet(nodenet.uid) del nodenets[nodenet_uid] @@ -479,7 +575,7 @@ def unload_nodenet(nodenet_uid): return True -def new_nodenet(nodenet_name, engine="dict_engine", worldadapter=None, template=None, owner="", world_uid=None, uid=None, use_modulators=True): +def new_nodenet(nodenet_name, engine="dict_engine", worldadapter=None, template=None, owner="admin", world_uid=None, use_modulators=True, worldadapter_config={}): """Creates a new node net manager and registers it. Arguments: @@ -487,29 +583,25 @@ def new_nodenet(nodenet_name, engine="dict_engine", worldadapter=None, template= gate types supported for directional activation spreading of this nodenet, and the initial node types owner (optional): the creator of this nodenet world_uid (optional): if submitted, attempts to bind the nodenet to this world - uid (optional): if submitted, this is used as the UID for the nodenet (normally, this is generated) Returns nodenet_uid if successful, None if failure """ - if not uid: - uid = tools.generate_uid() + uid = generate_uid() data = dict( - version=1, step=0, uid=uid, name=nodenet_name, - worldadapter=worldadapter, owner=owner, - world=world_uid, settings={}, engine=engine, - use_modulators=use_modulators) + use_modulators=use_modulators, + worldadapter_config=worldadapter_config) - filename = os.path.join(PERSISTENCY_PATH, NODENET_DIRECTORY, data['uid'] + ".json") nodenet_data[data['uid']] = Bunch(**data) + load_nodenet(data['uid']) if template is not None and template in nodenet_data: load_nodenet(template) @@ -518,7 +610,9 @@ def new_nodenet(nodenet_name, engine="dict_engine", worldadapter=None, template= load_nodenet(uid) nodenets[uid].merge_data(data_to_merge) - nodenets[uid].save(filename) + if world_uid and worldadapter: + set_nodenet_properties(uid, worldadapter=worldadapter, world_uid=world_uid, worldadapter_config=worldadapter_config) + save_nodenet(uid) return True, data['uid'] @@ -527,28 +621,33 @@ def delete_nodenet(nodenet_uid): Simple unloading is maintained automatically when a nodenet is suspended and another one is accessed. """ - filename = os.path.join(PERSISTENCY_PATH, NODENET_DIRECTORY, nodenet_uid + '.json') - nodenet = get_nodenet(nodenet_uid) - nodenet.remove(filename) - unload_nodenet(nodenet_uid) + import shutil + if nodenet_uid in nodenets: + unload_nodenet(nodenet_uid) del nodenet_data[nodenet_uid] + nodenet_directory = os.path.join(PERSISTENCY_PATH, NODENET_DIRECTORY, nodenet_uid) + shutil.rmtree(nodenet_directory) return True -def set_nodenet_properties(nodenet_uid, nodenet_name=None, worldadapter=None, world_uid=None, owner=None): +def set_nodenet_properties(nodenet_uid, nodenet_name=None, worldadapter=None, world_uid=None, owner=None, worldadapter_config={}): """Sets the supplied parameters (and only those) for the nodenet with the given uid.""" nodenet = get_nodenet(nodenet_uid) + if world_uid == '': + world_uid = None if nodenet.world and (nodenet.world != world_uid or nodenet.worldadapter != worldadapter): worlds[nodenet.world].unregister_nodenet(nodenet.uid) nodenet.world = None + nodenet.worldadapter_instance = None if worldadapter is None: worldadapter = nodenet.worldadapter if world_uid is not None and worldadapter is not None: - assert worldadapter in worlds[world_uid].supported_worldadapters + world_obj = load_world(world_uid) + assert worldadapter in world_obj.supported_worldadapters nodenet.world = world_uid nodenet.worldadapter = worldadapter - result, wa_instance = worlds[world_uid].register_nodenet(worldadapter, nodenet.uid) + result, wa_instance = world_obj.register_nodenet(worldadapter, nodenet.uid, nodenet_name=nodenet.name, config=worldadapter_config) if result: nodenet.worldadapter_instance = wa_instance if nodenet_name: @@ -560,23 +659,27 @@ def set_nodenet_properties(nodenet_uid, nodenet_name=None, worldadapter=None, wo def start_nodenetrunner(nodenet_uid): """Starts a thread that regularly advances the given nodenet by one step.""" - - nodenets[nodenet_uid].is_active = True + nodenet = get_nodenet(nodenet_uid) + nodenet.simulation_started() + # nodenets[nodenet_uid].is_active = True + if nodenet.world: + worlds[nodenet.world].is_active = True + worlds[nodenet.world].simulation_started() if runner['runner'].paused: runner['runner'].resume() return True -def set_runner_properties(timestep, factor): +def set_runner_properties(timestep, infguard=False): """Sets the speed of the nodenet calculation in ms. Argument: timestep: sets the calculation speed. """ - configs['runner_timestep'] = timestep + runner_config['runner_timestep'] = timestep + runner_config['runner_infguard'] = bool(infguard) runner['timestep'] = timestep - configs['runner_factor'] = int(factor) - runner['factor'] = int(factor) + runner['infguard'] = bool(infguard) return True @@ -584,9 +687,13 @@ def set_runner_condition(nodenet_uid, monitor=None, steps=None): """ registers a condition that stops the runner if it is fulfilled""" nodenet = get_nodenet(nodenet_uid) condition = {} - if monitor is not None: - condition['monitor'] = monitor - if steps is not None: + if monitor: + if type(monitor) == dict and 'uid' in monitor and 'value' in monitor: + condition['monitor'] = monitor + else: + return False, "Monitor condition expects a dict with keys 'uid' and 'value'" + if steps: + steps = int(steps) condition['step'] = nodenet.current_step + steps condition['step_amount'] = steps if condition: @@ -602,8 +709,8 @@ def remove_runner_condition(nodenet_uid): def get_runner_properties(): """Returns the speed that has been configured for the nodenet runner (in ms).""" return { - 'timestep': configs['runner_timestep'], - 'factor': configs['runner_factor'] + 'timestep': runner_config['runner_timestep'], + 'infguard': runner_config['runner_infguard'] } @@ -615,13 +722,15 @@ def get_is_nodenet_running(nodenet_uid): def stop_nodenetrunner(nodenet_uid): """Stops the thread for the given nodenet.""" nodenet = get_nodenet(nodenet_uid) - nodenet.is_active = False + nodenet.simulation_stopped() test = {nodenets[uid].is_active for uid in nodenets} + if nodenet.world: + test_world = {nodenets[uid].is_active and nodenets[uid].world == nodenet.world for uid in nodenets} + if True not in test_world: + worlds[nodenet.world].is_active = False + worlds[nodenet.world].simulation_stopped() if True not in test: - test = {worlds[uid].is_active for uid in worlds} - if True not in test: - runner['runner'].pause() - + runner['runner'].pause() return True @@ -632,10 +741,58 @@ def step_nodenet(nodenet_uid): nodenet_uid: The uid of the nodenet """ nodenet = get_nodenet(nodenet_uid) - nodenet.timed_step() - nodenet.update_monitors() - if nodenet.world and nodenet.current_step % configs['runner_factor'] == 0: + if nodenet.is_active: + nodenet.is_active = False + + if runtime_config['micropsi2'].get('profile_runner'): + import cProfile + profiler = cProfile.Profile() + profiler.enable() + + if nodenet.world: + if type(worlds[nodenet.world]).is_realtime and not worlds[nodenet.world].is_active: + if runner['runner'].paused: + runner['runner'].resume() + worlds[nodenet.world].simulation_started() + + nodenet.timed_step(runner_config.data) + + if runtime_config['micropsi2'].get('profile_runner'): + profiler.disable() + import pstats + import io + s = io.StringIO() + sortby = 'cumtime' + ps = pstats.Stats(profiler, stream=s).sort_stats(sortby) + ps.print_stats('micropsi_') + logging.getLogger("agent.%s" % nodenet_uid).debug(s.getvalue()) + + if nodenet.world and not type(worlds[nodenet.world]).is_realtime: worlds[nodenet.world].step() + nodenet.update_monitors_and_recorders() + return nodenet.current_step + + +def single_step_nodenet_only(nodenet_uid): + nodenet = get_nodenet(nodenet_uid) + if runtime_config['micropsi2'].get('profile_runner'): + import cProfile + profiler = cProfile.Profile() + profiler.enable() + + nodenet.timed_step(runner_config.data) + + if runtime_config['micropsi2'].get('profile_runner'): + profiler.disable() + import pstats + import io + s = io.StringIO() + sortby = 'cumtime' + ps = pstats.Stats(profiler, stream=s).sort_stats(sortby) + ps.print_stats('micropsi_') + logging.getLogger("agent.%s" % nodenet_uid).debug(s.getvalue()) + + nodenet.update_monitors_and_recorders() return nodenet.current_step @@ -643,18 +800,22 @@ def step_nodenets_in_world(world_uid, nodenet_uid=None, steps=1): """ Advances all nodenets registered in the given world (or, only the given nodenet) by the given number of steps""" nodenet = None + if world_uid in worlds and not worlds[world_uid].is_active: + worlds[world_uid].simulation_started() + if runner['runner'].paused: + runner['runner'].resume() if nodenet_uid is not None: nodenet = get_nodenet(nodenet_uid) if nodenet and nodenet.world == world_uid: for i in range(steps): - nodenet.timed_step() - nodenet.update_monitors() + nodenet.timed_step(runner_config.data) + nodenet.update_monitors_and_recorders() else: for i in range(steps): for uid in worlds[world_uid].agents: nodenet = get_nodenet(uid) - nodenet.timed_step() - nodenet.update_monitors() + nodenet.timed_step(runner_config.data) + nodenet.update_monitors_and_recorders() return True @@ -668,10 +829,22 @@ def revert_nodenet(nodenet_uid, also_revert_world=False): return True +def reload_and_revert(nodenet_uid, also_revert_world=False): + """Returns the nodenet to the last saved state.""" + nodenet = get_nodenet(nodenet_uid) + world_uid = nodenet.world + unload_nodenet(nodenet_uid) + if world_uid: + unload_world(world_uid) + result = reload_code() + load_nodenet(nodenet_uid) + return result + + def save_nodenet(nodenet_uid): """Stores the nodenet on the server (but keeps it open).""" nodenet = get_nodenet(nodenet_uid) - nodenet.save(os.path.join(PERSISTENCY_PATH, NODENET_DIRECTORY, nodenet_uid + '.json')) + nodenet.save() nodenet_data[nodenet_uid] = Bunch(**nodenet.metadata) return True @@ -694,10 +867,10 @@ def import_nodenet(string, owner=None): global nodenet_data import_data = json.loads(string) if 'uid' not in import_data: - import_data['uid'] = tools.generate_uid() + import_data['uid'] = generate_uid() else: if import_data['uid'] in nodenets: - raise RuntimeError("A nodenet with this ID already exists.") + raise RuntimeError("An agent with this ID already exists.") if 'owner': import_data['owner'] = owner nodenet_uid = import_data['uid'] @@ -705,7 +878,7 @@ def import_nodenet(string, owner=None): meta = parse_definition(import_data, filename) nodenet_data[nodenet_uid] = meta # assert import_data['world'] in worlds - with open(filename, 'w+') as fp: + with open(filename, 'w+', encoding="utf-8") as fp: fp.write(json.dumps(meta)) load_nodenet(nodenet_uid) merge_nodenet(nodenet_uid, string, keep_uids=True) @@ -765,12 +938,10 @@ def get_node(nodenet_uid, node_uid, include_links=True): "type" (string): the type of this node, "parameters" (dict): a dictionary of the node parameters "activation" (float): the activation of this node, - "gate_parameters" (dict): a dictionary containing dicts of parameters for each gate of this node "name" (str): display name "gate_activations" (dict): a dictionary containing dicts of activations for each gate of this node - "gate_functions"(dict): a dictionary containing the name of the gatefunction for each gate of this node + "gate_configuration"(dict): a dictionary containing the name of the gatefunction and its parameters for each gate "position" (list): the x, y, z coordinates of this node, as a list - "sheaves" (dict): a dict of sheaf-activations for this node "parent_nodespace" (str): the uid of the nodespace this node lives in } """ @@ -785,7 +956,7 @@ def get_node(nodenet_uid, node_uid, include_links=True): return False, "Unknown UID" -def add_node(nodenet_uid, type, pos, nodespace=None, state=None, uid=None, name="", parameters=None): +def add_node(nodenet_uid, type, pos, nodespace=None, state=None, name="", parameters=None): """Creates a new node. (Including native module.) Arguments: @@ -802,22 +973,20 @@ def add_node(nodenet_uid, type, pos, nodespace=None, state=None, uid=None, name= None if failure. """ nodenet = get_nodenet(nodenet_uid) - uid = nodenet.create_node(type, nodespace, pos, name, uid=uid, parameters=parameters) + uid = nodenet.create_node(type, nodespace, pos, name, parameters=parameters) return True, uid -def add_nodespace(nodenet_uid, pos, nodespace=None, uid=None, name="", options=None): +def add_nodespace(nodenet_uid, nodespace=None, name="", options=None): """Creates a new nodespace Arguments: nodenet_uid: uid of the nodespace manager - position: position of the node in the current nodespace nodespace: uid of the parent nodespace - uid (optional): if not supplied, a uid will be generated name (optional): if not supplied, the uid will be used instead of a display name options (optional): a dict of options. TBD """ nodenet = get_nodenet(nodenet_uid) - uid = nodenet.create_nodespace(nodespace, pos, name=name, uid=uid, options=options) + uid = nodenet.create_nodespace(nodespace, name=name, options=options) return True, uid @@ -857,11 +1026,11 @@ def clone_nodes(nodenet_uid, node_uids, clonemode, nodespace=None, offset=[50, 5 for _, n in copynodes.items(): target_nodespace = nodespace if nodespace is not None else n.parent_nodespace - uid = nodenet.create_node(n.type, target_nodespace, [n.position[0] + offset[0], n.position[1] + offset[1], n.position[2] + offset[2]], name=n.name + '_copy', uid=None, parameters=n.clone_parameters().copy(), gate_parameters=n.get_gate_parameters()) + uid = nodenet.create_node(n.type, target_nodespace, [n.position[0] + offset[0], n.position[1] + offset[1], n.position[2] + offset[2]], name=n.name, uid=None, parameters=n.clone_parameters().copy(), gate_configuration=n.get_gate_configuration()) if uid: uidmap[n.uid] = uid else: - logger.warning('Could not clone node: ' + uid) + logging.getLogger("system").warning('Could not clone node: ' + uid) for uid, l in copylinks.items(): source_uid = uidmap.get(l.source_node.uid, l.source_node.uid) @@ -871,8 +1040,7 @@ def clone_nodes(nodenet_uid, node_uids, clonemode, nodespace=None, offset=[50, 5 l.source_gate.type, target_uid, l.target_slot.type, - l.weight, - l.certainty) + l.weight) for uid in uidmap.values(): result[uid] = nodenet.get_node(uid).get_data(include_links=True) @@ -880,7 +1048,7 @@ def clone_nodes(nodenet_uid, node_uids, clonemode, nodespace=None, offset=[50, 5 for uid in followupnodes: result[uid] = nodenet.get_node(uid).get_data(include_links=True) - if len(result.keys()) or len(nodes) == 0: + if len(result.keys()) or len(node_uids) == 0: return True, result else: return False, "Could not clone nodes. See log for details." @@ -909,38 +1077,43 @@ def __pythonify(name): def generate_netapi_fragment(nodenet_uid, node_uids): - lines = [] + lines = ["nodespace_uid = None"] idmap = {} nodenet = get_nodenet(nodenet_uid) nodes = [] - nodespaces = [] + #nodespaces = [] + #for node_uid in node_uids: + # if not nodenet.is_nodespace(node_uid): + # nodes.append(nodenet.get_node(node_uid)) + # else: + # nodespaces.append(nodenet.get_nodespace(node_uid)) + for node_uid in node_uids: - if not nodenet.is_nodespace(node_uid): - nodes.append(nodenet.get_node(node_uid)) - else: - nodespaces.append(nodenet.get_nodespace(node_uid)) + nodes.append(nodenet.get_node(node_uid)) xpos = [] ypos = [] zpos = [] nodes = sorted(nodes, key=lambda node: node.position[1] * 1000 + node.position[0]) - nodespaces = sorted(nodespaces, key=lambda node: node.position[1] * 1000 + node.position[0]) + #nodespaces = sorted(nodespaces, key=lambda node: node.position[1] * 1000 + node.position[0]) # nodespaces - for i, nodespace in enumerate(nodespaces): - name = nodespace.name.strip() if nodespace.name != nodespace.uid else None - varname = "nodespace%i" % i - if name: - pythonname = __pythonify(name) - if pythonname not in idmap.values(): - varname = pythonname - lines.append("%s = netapi.create_nodespace(None, \"%s\")" % (varname, name)) - else: - lines.append("%s = netapi.create_nodespace(None)" % (varname)) - idmap[nodespace.uid] = varname - xpos.append(nodespace.position[0]) - ypos.append(nodespace.position[1]) - zpos.append(nodespace.position[2]) + #for i, nodespace in enumerate(nodespaces): + # name = nodespace.name.strip() if nodespace.name != nodespace.uid else None + # varname = "nodespace%i" % i + # if name: + # pythonname = __pythonify(name) + # if pythonname not in idmap.values(): + # varname = pythonname + # lines.append("%s = netapi.create_nodespace(None, \"%s\")" % (varname, name)) + # else: + # lines.append("%s = netapi.create_nodespace(None)" % (varname)) + # idmap[nodespace.uid] = varname + # xpos.append(nodespace.position[0]) + # ypos.append(nodespace.position[1]) + # zpos.append(nodespace.position[2]) + + flow_nodetypes = nodenet.get_flow_module_definitions() # nodes and gates for i, node in enumerate(nodes): @@ -950,14 +1123,13 @@ def generate_netapi_fragment(nodenet_uid, node_uids): pythonname = __pythonify(name) if pythonname not in idmap.values(): varname = pythonname - lines.append("%s = netapi.create_node('%s', None, \"%s\")" % (varname, node.type, name)) + lines.append("%s = netapi.create_node('%s', nodespace_uid, \"%s\")" % (varname, node.type, name)) else: - lines.append("%s = netapi.create_node('%s', None)" % (varname, node.type)) + lines.append("%s = netapi.create_node('%s', nodespace_uid)" % (varname, node.type)) - ndgps = node.clone_non_default_gate_parameters() - for gatetype in ndgps.keys(): - for parameter, value in ndgps[gatetype].items(): - lines.append("%s.set_gate_parameter('%s', \"%s\", %.2f)" % (varname, gatetype, parameter, value)) + gate_config = node.get_gate_configuration() + for gatetype, gconfig in gate_config.items(): + lines.append("%s.set_gate_configuration('%s', \"%s\", %s)" % (varname, gatetype, gconfig['gatefunction'], str(gconfig.get('gatefunction_parameters', {})))) nps = node.clone_parameters() for parameter, value in nps.items(): @@ -966,9 +1138,11 @@ def generate_netapi_fragment(nodenet_uid, node_uids): if parameter not in node.nodetype.parameter_defaults or node.nodetype.parameter_defaults[parameter] != value: if isinstance(value, str): - lines.append("%s.set_parameter(\"%s\", \"%s\")" % (varname, parameter, value)) - else: + lines.append("%s.set_parameter(\"%s\", \"\"\"%s\"\"\")" % (varname, parameter, value)) + elif isinstance(value, (float, int)): lines.append("%s.set_parameter(\"%s\", %.2f)" % (varname, parameter, value)) + elif isinstance(value, list): + lines.append("%s.set_parameter(\"%s\", \"\"\"%s\"\"\")" % (varname, parameter, ','.join([str(v) for v in value]))) idmap[node.uid] = varname xpos.append(node.position[0]) @@ -979,6 +1153,15 @@ def generate_netapi_fragment(nodenet_uid, node_uids): # links for node in nodes: + if node.type in flow_nodetypes: + source_id = idmap[node.uid] + for name in node.outputmap: + for uid, target in node.outputmap[name]: + if uid not in idmap: + continue + target_id = idmap[uid] + lines.append("netapi.flow(%s, \"%s\", %s, \"%s\")" % (source_id, name, target_id, target)) + for gatetype in node.get_gate_types(): gate = node.get_gate(gatetype) for link in gate.get_links(): @@ -1041,7 +1224,7 @@ def generate_netapi_fragment(nodenet_uid, node_uids): origin = [100, 100, 0] factor = [int(min(xpos)), int(min(ypos)), int(min(zpos))] lines.append("origin_pos = (%d, %d, %d)" % (origin[0], origin[1], origin[2])) - for node in nodes + nodespaces: + for node in nodes: x = int(node.position[0] - factor[0]) y = int(node.position[1] - factor[1]) z = int(node.position[2] - factor[2]) @@ -1050,9 +1233,9 @@ def generate_netapi_fragment(nodenet_uid, node_uids): return "\n".join(lines) -def set_entity_positions(nodenet_uid, positions): +def set_node_positions(nodenet_uid, positions): """ Takes a dict with node_uids as keys and new positions for the nodes as values """ - get_nodenet(nodenet_uid).set_entity_positions(positions) + get_nodenet(nodenet_uid).set_node_positions(positions) return True @@ -1104,14 +1287,14 @@ def get_available_node_types(nodenet_uid): nodenet = get_nodenet(nodenet_uid) return { 'nodetypes': nodenet.get_standard_nodetype_definitions(), - 'native_modules': filter_native_modules(nodenet.engine) + 'native_modules': nodenet.get_native_module_definitions() } def get_available_native_module_types(nodenet_uid): """Returns a list of native modules. If an nodenet uid is supplied, filter for node types defined within this nodenet.""" - return filter_native_modules(get_nodenet(nodenet_uid).engine) + return get_nodenet(nodenet_uid).get_native_module_definitions() def set_node_parameters(nodenet_uid, node_uid, parameters): @@ -1122,33 +1305,17 @@ def set_node_parameters(nodenet_uid, node_uid, parameters): return True -def get_gatefunction(nodenet_uid, node_uid, gate_type): - """ - Returns the name of the gate function configured for that given node and gate - """ - return get_nodenet(nodenet_uid).get_node(node_uid).get_gatefunction_name(gate_type) - - -def set_gatefunction(nodenet_uid, node_uid, gate_type, gatefunction=None): - """ - Sets the gate function of the given node and gate. - """ - get_nodenet(nodenet_uid).get_node(node_uid).set_gatefunction_name(gate_type, gatefunction) - return True - - def get_available_gatefunctions(nodenet_uid): """ - Returns a list of names of the available gatefunctions + Returns a dict of the available gatefunctions and their parameters and parameter-defaults """ return get_nodenet(nodenet_uid).get_available_gatefunctions() -def set_gate_parameters(nodenet_uid, node_uid, gate_type, parameters): - """Sets the gate parameters of the given gate of the given node to the supplied dictionary.""" +def set_gate_configuration(nodenet_uid, node_uid, gate_type, gatefunction=None, gatefunction_parameters=None): + """Sets the configuration of the given gate of the given node to the supplied gatefunction and -parameters.""" nodenet = get_nodenet(nodenet_uid) - for key, value in parameters.items(): - nodenet.get_node(node_uid).set_gate_parameter(gate_type, key, value) + nodenet.get_node(node_uid).set_gate_configuration(gate_type, gatefunction, gatefunction_parameters) return True @@ -1177,16 +1344,16 @@ def bind_datasource_to_sensor(nodenet_uid, sensor_uid, datasource): return False -def bind_datatarget_to_actor(nodenet_uid, actor_uid, datatarget): - """Associates the datatarget type to the actor node with the given uid.""" - node = get_nodenet(nodenet_uid).get_node(actor_uid) - if node.type == "Actor": +def bind_datatarget_to_actuator(nodenet_uid, actuator_uid, datatarget): + """Associates the datatarget type to the actuator node with the given uid.""" + node = get_nodenet(nodenet_uid).get_node(actuator_uid) + if node.type == "Actuator": node.set_parameter('datatarget', datatarget) return True return False -def add_link(nodenet_uid, source_node_uid, gate_type, target_node_uid, slot_type, weight=1, certainty=1): +def add_link(nodenet_uid, source_node_uid, gate_type, target_node_uid, slot_type, weight=1): """Creates a new link. Arguments. @@ -1195,38 +1362,27 @@ def add_link(nodenet_uid, source_node_uid, gate_type, target_node_uid, slot_type target_node_uid: uid of the target node slot_type: type of the target slot weight: the weight of the link (a float) - certainty (optional): a probabilistic parameter for the link """ nodenet = get_nodenet(nodenet_uid) with nodenet.netlock: - success = nodenet.create_link(source_node_uid, gate_type, target_node_uid, slot_type, weight, certainty) + success = nodenet.create_link(source_node_uid, gate_type, target_node_uid, slot_type, weight) uid = None if success: # todo: check whether clients need these uids uid = "%s:%s:%s:%s" % (source_node_uid, gate_type, slot_type, target_node_uid) return success, uid -def set_link_weight(nodenet_uid, source_node_uid, gate_type, target_node_uid, slot_type, weight=1, certainty=1): +def set_link_weight(nodenet_uid, source_node_uid, gate_type, target_node_uid, slot_type, weight=1): """Set weight of the given link.""" - return get_nodenet(nodenet_uid).set_link_weight(source_node_uid, gate_type, target_node_uid, slot_type, weight, certainty) + return get_nodenet(nodenet_uid).set_link_weight(source_node_uid, gate_type, target_node_uid, slot_type, weight) def get_links_for_nodes(nodenet_uid, node_uids): """ Returns a list of links connected to the given nodes, and their connected nodes, if they are not in the same nodespace""" nodenet = get_nodenet(nodenet_uid) - source_nodes = [nodenet.get_node(uid) for uid in node_uids] - links = {} - nodes = {} - for node in source_nodes: - nodelinks = node.get_associated_links() - for l in nodelinks: - links[l.signature] = l.get_data(complete=True) - if l.source_node.parent_nodespace != node.parent_nodespace: - nodes[l.source_node.uid] = l.source_node.get_data(include_links=False) - if l.target_node.parent_nodespace != node.parent_nodespace: - nodes[l.target_node.uid] = l.target_node.get_data(include_links=False) - return {'links': list(links.values()), 'nodes': nodes} + links, nodes = nodenet.get_links_for_nodes(node_uids) + return {'links': links, 'nodes': nodes} def delete_link(nodenet_uid, source_node_uid, gate_type, target_node_uid, slot_type): @@ -1240,12 +1396,13 @@ def align_nodes(nodenet_uid, nodespace): return result -def user_prompt_response(nodenet_uid, node_uid, values, resume_nodenet): +def user_prompt_response(nodenet_uid, node_uid, key, parameters, resume_nodenet): nodenet = get_nodenet(nodenet_uid) - for key, value in values.items(): - nodenet.get_node(node_uid).set_parameter(key, value) - nodenet.is_active = resume_nodenet - nodenet.user_prompt = None + if key and parameters: + nodenet.set_user_prompt_response(node_uid, key, parameters) + if resume_nodenet: + start_nodenetrunner(nodenet_uid) + # nodenet.is_active = resume_nodenet def get_available_recipes(): @@ -1286,7 +1443,7 @@ def run_recipe(nodenet_uid, name, parameters): params[key] = parameters[key] if name in custom_recipes: func = custom_recipes[name]['function'] - if cfg['micropsi2'].get('profile_runner'): + if runtime_config['micropsi2'].get('profile_runner'): import cProfile profiler = cProfile.Profile() profiler.enable() @@ -1294,7 +1451,7 @@ def run_recipe(nodenet_uid, name, parameters): ret = func(netapi, **params) if ret: result.update(ret) - if cfg['micropsi2'].get('profile_runner'): + if runtime_config['micropsi2'].get('profile_runner'): profiler.disable() import pstats import io @@ -1358,20 +1515,18 @@ def parsemembers(members): if name.startswith('_'): continue if inspect.isroutine(thing): - argspec = inspect.getargspec(thing) - arguments = argspec.args[1:] - defaults = argspec.defaults or [] + sig = inspect.signature(thing) params = [] - diff = len(arguments) - len(defaults) - for i, arg in enumerate(arguments): - if i >= diff: + for key in sig.parameters: + if key == 'self': + continue + if sig.parameters[key].default != inspect.Signature.empty: params.append({ - 'name': arg, - 'default': defaults[i - diff] + 'name': key, + 'default': sig.parameters[key].default }) else: - params.append({'name': arg}) - + params.append({'name': key}) data[name] = params else: data[name] = None @@ -1413,36 +1568,48 @@ def parsemembers(members): return data -# --- end of API +def flow(nodenet_uid, source_uid, source_output, target_uid, target_input): + """ Link two flow_modules """ + nodenet = get_nodenet(nodenet_uid) + return True, nodenet.flow(source_uid, source_output, target_uid, target_input) + + +def unflow(nodenet_uid, source_uid, source_output, target_uid, target_input): + """ Removes the link between the given flow_modules """ + nodenet = get_nodenet(nodenet_uid) + return True, nodenet.unflow(source_uid, source_output, target_uid, target_input) -def filter_native_modules(engine=None): - data = {} - for key in native_modules: - if native_modules[key].get('engine') is None or engine is None or engine == native_modules[key]['engine']: - data[key] = native_modules[key].copy() - return data +# --- end of API -def crawl_definition_files(path, type="definition"): + +def crawl_definition_files(path, datatype="definition"): """Traverse the directories below the given path for JSON definitions of nodenets and worlds, and return a dictionary with the signatures of these nodenets or worlds. """ - + from micropsi_core.world.world import WORLD_VERSION + from micropsi_core.nodenet.nodenet import NODENET_VERSION result = {} os.makedirs(path, exist_ok=True) - for user_directory_name, user_directory_names, file_names in os.walk(path): + if os.path.relpath(user_directory_name, start=os.path.join(PERSISTENCY_PATH, "nodenets")).startswith("__autosave__"): + continue for definition_file_name in file_names: if definition_file_name.endswith(".json"): try: filename = os.path.join(user_directory_name, definition_file_name) - with open(filename) as file: + with open(filename, encoding="utf-8") as file: data = parse_definition(json.load(file), filename) - result[data.uid] = data + if datatype == 'world' and data.version != WORLD_VERSION: + logging.getLogger("system").warning("Wrong Version of environment data in file %s" % definition_file_name) + elif datatype == 'nodenet' and data.version != NODENET_VERSION: + logging.getLogger("system").warning("Wrong Version of agent data in file %s" % definition_file_name) + else: + result[data.uid] = data except ValueError: - logging.getLogger('system').warn("Invalid %s data in file '%s'" % (type, definition_file_name)) + logging.getLogger('system').warning("Invalid %s data in file '%s'" % (datatype, definition_file_name)) except IOError: - logging.getLogger('system').warn("Could not open %s data file '%s'" % (type, definition_file_name)) + logging.getLogger('system').warning("Could not open %s data file '%s'" % (datatype, definition_file_name)) return result @@ -1458,6 +1625,7 @@ def parse_definition(json, filename=None): if "worldadapter" in json: result['worldadapter'] = json["worldadapter"] result['world'] = json["world"] + result['worldadapter_config'] = json.get('worldadapter_config', {}) if "world_type" in json: result['world_type'] = json['world_type'] if "settings" in json: @@ -1466,99 +1634,198 @@ def parse_definition(json, filename=None): result['config'] = json['config'] if 'use_modulators' in json: result['use_modulators'] = json['use_modulators'] + if 'version' in json: + result['version'] = json['version'] + else: + result['version'] = 1 return Bunch(**result) # Set up the MicroPsi runtime def load_definitions(): global nodenet_data, world_data - nodenet_data = crawl_definition_files(path=os.path.join(PERSISTENCY_PATH, NODENET_DIRECTORY), type="nodenet") - world_data = crawl_definition_files(path=os.path.join(PERSISTENCY_PATH, WORLD_DIRECTORY), type="world") + nodenet_data = crawl_definition_files(path=os.path.join(PERSISTENCY_PATH, NODENET_DIRECTORY), datatype="nodenet") + world_data = crawl_definition_files(path=os.path.join(PERSISTENCY_PATH, WORLD_DIRECTORY), datatype="world") if not world_data: # create a default world for convenience. - uid = tools.generate_uid() + uid = generate_uid() filename = os.path.join(PERSISTENCY_PATH, WORLD_DIRECTORY, uid + '.json') - world_data[uid] = Bunch(uid=uid, name="default", version=1, filename=filename) - with open(filename, 'w+') as fp: + world_data[uid] = Bunch(uid=uid, name="default", version=1, filename=filename, owner="admin", world_type="DefaultWorld") + with open(filename, 'w+', encoding="utf-8") as fp: fp.write(json.dumps(world_data[uid], sort_keys=True, indent=4)) + for uid in world_data: + try: + world_data[uid].supported_worldadapters = get_world_class_from_name(world_data[uid].get('world_type', "DefaultWorld")).get_supported_worldadapters() + except KeyError: + pass return nodenet_data, world_data -# set up all worlds referred to in the world_data: -def init_worlds(world_data): - global worlds - for uid in world_data: - if "world_type" in world_data[uid]: - try: - worlds[uid] = get_world_class_from_name(world_data[uid].world_type)(**world_data[uid]) - except TypeError: - worlds[uid] = world.World(**world_data[uid]) - except AttributeError as err: - logging.getLogger('system').warn("Unknown world_type: %s (%s)" % (world_data[uid].world_type, str(err))) - except: - logging.getLogger('system').warn("Can not instantiate World \"%s\": %s" % (world_data[uid].name, str(sys.exc_info()[1]))) - else: - worlds[uid] = world.World(**world_data[uid]) - return worlds +def load_user_files(path, resourcetype, errors=[]): + global native_modules, custom_recipes + import shutil + if os.path.isdir(path): + for f in os.listdir(path): + if not f.startswith('.'): + abspath = os.path.join(path, f) + if f == "__pycache__": + shutil.rmtree(abspath) + elif f.startswith("_"): + continue + err = None + if os.path.isdir(abspath): + errors.extend(load_user_files(abspath, resourcetype, errors=[])) + elif f.endswith(".py"): + if resourcetype == 'recipes' or resourcetype == 'operations': + err = parse_recipe_or_operations_file(abspath, resourcetype) + elif resourcetype == 'nodetypes': + err = parse_native_module_file(abspath) + if err: + errors.append(err) + return errors -def load_user_files(path, reload_nodefunctions=False, errors=[]): - global native_modules, custom_recipes +def load_world_files(path, errors=[]): for f in os.listdir(path): if not f.startswith('.') and f != '__pycache__': abspath = os.path.join(path, f) err = None if os.path.isdir(abspath): - errors.extend(load_user_files(path=abspath, reload_nodefunctions=reload_nodefunctions, errors=[])) - elif f == 'nodetypes.json': - err = parse_native_module_file(abspath) - elif f == 'recipes.py': - err = parse_recipe_or_operations_file(abspath, reload_nodefunctions) - elif f == 'nodefunctions.py' and reload_nodefunctions: - err = reload_nodefunctions_file(abspath) - elif f == 'operations.py': - err = parse_recipe_or_operations_file(abspath, reload_nodefunctions) + errors.extend(load_world_files(path=abspath, errors=[])) + elif f == 'worlds.json': + err = parse_world_definitions(abspath) if err: - errors.append(err) + errors.extend(err) return errors -def parse_native_module_file(path): - global native_modules +def parse_world_definitions(path): + import importlib + import inspect + global world_classes, worldadapter_classes, worldobject_classes + from micropsi_core.world.world import World + from micropsi_core.world.worldobject import WorldObject + from micropsi_core.world.worldadapter import WorldAdapter + base_path = os.path.dirname(path) + errors = [] with open(path) as fp: - category = os.path.relpath(os.path.dirname(path), start=RESOURCE_PATH) try: - modules = json.load(fp) + data = json.load(fp) except ValueError: - return "Nodetype data in %s/nodetypes.json not well-formed." % category - for key in modules: - modules[key]['path'] = os.path.join(os.path.dirname(path), 'nodefunctions.py') - modules[key]['category'] = category - if key in native_modules: - logging.getLogger("system").warning("Native module names must be unique. %s is not." % key) - native_modules[key] = modules[key] + return "World data in %s/worlds.json not well formed" % path + worldfiles = data.get('worlds', []) + worldadapterfiles = data.get('worldadapters', []) + worldobjectfiles = data.get('worldobjects', []) + dependencies = data.get('dependencies', []) + for dep in dependencies: + dep_path = os.path.join(base_path, dep) + sys.path.append(dep_path) + + for w in worldfiles: + relpath = os.path.relpath(os.path.join(base_path, w), start=WORLD_PATH) + sys.path.append(base_path) + name = w[:-3] + try: + loader = importlib.machinery.SourceFileLoader(name, os.path.join(base_path, w)) + wmodule = loader.load_module() + for name, cls in inspect.getmembers(wmodule, inspect.isclass): + if World in inspect.getmro(cls) and name != "World": + world_classes[name] = cls + logging.getLogger("system").debug("Found world %s " % name) + except Exception as e: + errors.append("%s when importing world file %s: %s" % (e.__class__.__name__, relpath, str(e))) + post_mortem() + for w in worldadapterfiles: + relpath = os.path.relpath(os.path.join(base_path, w), start=WORLD_PATH) + name = w[:-3] + try: + loader = importlib.machinery.SourceFileLoader(name, os.path.join(base_path, w)) + wmodule = loader.load_module() + for name, cls in inspect.getmembers(wmodule, inspect.isclass): + if WorldAdapter in inspect.getmro(cls) and not inspect.isabstract(cls): + worldadapter_classes[name] = cls + # errors.append("Name collision in worldadapters: %s defined more than once" % name) + except Exception as e: + errors.append("%s when importing worldadapter file %s: %s" % (e.__class__.__name__, relpath, str(e))) + post_mortem() + for w in worldobjectfiles: + relpath = os.path.relpath(os.path.join(base_path, w), start=WORLD_PATH) + name = w[:-3] + try: + loader = importlib.machinery.SourceFileLoader(name, os.path.join(base_path, w)) + wmodule = loader.load_module() + for name, cls in inspect.getmembers(wmodule, inspect.isclass): + if WorldObject in inspect.getmro(cls) and WorldAdapter not in inspect.getmro(cls): + worldobject_classes[name] = cls + # errors.append("Name collision in worldadapters: %s defined more than once" % name) + except Exception as e: + errors.append("%s when importing worldobject file %s: %s" % (e.__class__.__name__, relpath, str(e))) + post_mortem() + return errors or None -def parse_recipe_or_operations_file(path, reload=False, category_overwrite=False): +def parse_native_module_file(path): + import importlib + global native_modules + import os + try: + base_path = os.path.join(RESOURCE_PATH, 'nodetypes') + relpath = os.path.relpath(path, start=base_path) + loader = importlib.machinery.SourceFileLoader(relpath, path) + module = loader.load_module() + if hasattr(module, 'nodetype_definition') and type(module.nodetype_definition) == dict: + category = os.path.relpath(os.path.dirname(path), start=base_path) + if category == '.': + category = '' + moduledef = nodedef_sanity_check(module.nodetype_definition) + moduledef['path'] = path + moduledef['category'] = category + if moduledef['name'] in native_modules: + logging.getLogger("system").warning("Native module names must be unique. %s is not." % moduledef['name']) + native_modules[moduledef['name']] = moduledef + except Exception as e: + post_mortem() + return "%s when importing nodetype file %s: %s" % (e.__class__.__name__, relpath, str(e)) + + +def nodedef_sanity_check(nodetype_definition): + """ catch some common errors in nodetype definitions """ + nd = nodetype_definition + + if nd.get('flow_module', False): + # chedck for mismatch between nr of inputdims and nr of inputs + n_in = len(nd.get('inputs', [])) + n_indims = len(nd.get('inputdims', [])) + if n_in != n_indims: + raise Exception('Node takes %s inputs but %s inputdims have been given' % (n_in, n_indims)) + + return nodetype_definition + + +def parse_recipe_or_operations_file(path, mode, category_overwrite=False): global custom_recipes import importlib import inspect - category = category_overwrite or os.path.relpath(os.path.dirname(path), start=RESOURCE_PATH) + base_path = os.path.join(RESOURCE_PATH, mode) + category = category_overwrite or os.path.relpath(os.path.dirname(path), start=base_path) if category == '.': - category == '' # relapth in rootfolder - relpath = os.path.relpath(path, start=RESOURCE_PATH) + category = '' # relapth in rootfolder + if path.startswith(base_path): + relpath = os.path.relpath(path, start=base_path) + else: + # builtin operations get their filename as relpath + relpath, _ = os.path.splitext(os.path.basename(path)) name = os.path.basename(path)[:-3] - mode = 'recipes' if os.path.basename(path).startswith('recipes') else 'operations' - try: loader = importlib.machinery.SourceFileLoader(name, path) recipes = loader.load_module() # recipes = __import__(pyname, fromlist=['recipes']) # importlib.reload(sys.modules[pyname]) - except SyntaxError as e: - return "%s in %s file %s, line %d" % (e.__class__.__name__, mode, relpath, e.lineno) + except Exception as e: + post_mortem() + return "%s when importing %s file %s: %s" % (e.__class__.__name__, mode, relpath, str(e)) for name, module in inspect.getmembers(recipes, inspect.ismodule): if hasattr(module, '__file__') and module.__file__.startswith(RESOURCE_PATH): @@ -1571,21 +1838,16 @@ def parse_recipe_or_operations_file(path, reload=False, category_overwrite=False # import from another file of the same mode. ignore, to avoid # false duplicate-function-name alerts continue - argspec = inspect.getargspec(func) - if mode == 'recipes': - arguments = argspec.args[1:] - elif mode == 'operations': - arguments = argspec.args[2:] - defaults = argspec.defaults or [] + signature = inspect.signature(func) params = [] - diff = len(arguments) - len(defaults) - for i, arg in enumerate(arguments): - if i >= diff: - default = defaults[i - diff] - else: + for param in signature.parameters: + if param == 'netapi' or (param == 'selection' and mode == 'operations'): + continue + default = signature.parameters[param].default + if default == inspect.Signature.empty: default = None params.append({ - 'name': arg, + 'name': param, 'default': default }) if mode == 'recipes' and name in custom_recipes and id(func) != id(custom_recipes[name]['function']): @@ -1609,51 +1871,82 @@ def parse_recipe_or_operations_file(path, reload=False, category_overwrite=False custom_operations[name] = data -def reload_nodefunctions_file(path): - import importlib - import inspect - +def reload_code(): + global native_modules, custom_recipes, custom_operations, world_classes, worldadapter_classes + from micropsi_core.world.world import DefaultWorld + from micropsi_core.world.worldadapter import Default + from micropsi_core.world.worldobject import TestObject + import sys + for mod in list(sys.modules.keys()): + if hasattr(sys.modules[mod], '__file__'): + path = sys.modules[mod].__file__ + if path.startswith(RESOURCE_PATH) or path.startswith(WORLD_PATH): + del sys.modules[mod] + world_classes['DefaultWorld'] = DefaultWorld + worldadapter_classes['Default'] = Default + worldobject_classes['TestObject'] = TestObject try: - loader = importlib.machinery.SourceFileLoader("nodefunctions", path) - nodefuncs = loader.load_module() - for name, module in inspect.getmembers(nodefuncs, inspect.ismodule): - if hasattr(module, '__file__') and module.__file__.startswith(RESOURCE_PATH): - loader = importlib.machinery.SourceFileLoader(name, module.__file__) - loader.load_module() - except SyntaxError as e: - relpath = os.path.relpath(path, start=RESOURCE_PATH) - return "%s in nodefunction file %s, line %d" % (e.__class__.__name__, relpath, e.lineno) - - -def reload_native_modules(): - # stop nodenets, save state - global native_modules, custom_recipes, custom_operations + from micropsi_core.world.worldadapter import DefaultArray + worldadapter_classes['DefaultArray'] = DefaultArray + except ImportError: + pass native_modules = {} custom_recipes = {} custom_operations = {} runners = {} errors = [] + # load builtins: - from micropsi_core.nodenet.native_modules import nodetypes - native_modules.update(nodetypes) - operationspath = os.path.abspath('micropsi_core/nodenet/operations/') + operationspath = os.path.dirname(os.path.realpath(__file__)) + '/nodenet/operations/' for file in os.listdir(operationspath): import micropsi_core.nodenet.operations if file != '__init__.py' and not file.startswith('.') and os.path.isfile(os.path.join(operationspath, file)): - err = parse_recipe_or_operations_file(os.path.join(operationspath, file), category_overwrite=file[:-3]) + err = parse_recipe_or_operations_file(os.path.join(operationspath, file), 'operations', category_overwrite=file[:-3]) if err: errors.append(err) - + # stop nodenets for uid in nodenets: if nodenets[uid].is_active: runners[uid] = True - nodenets[uid].is_active = False - errors.extend(load_user_files(RESOURCE_PATH, reload_nodefunctions=True, errors=[])) + stop_nodenetrunner(uid) + # nodenets[uid].is_active = False + + # load code-directory + if RESOURCE_PATH not in sys.path: + sys.path.insert(0, RESOURCE_PATH) + + for key in ['nodetypes', 'recipes', 'operations']: + basedir = os.path.join(RESOURCE_PATH, key) + if os.path.isdir(basedir): + errors.extend(load_user_files(basedir, key, errors=[])) + + errors.extend(load_world_files(WORLD_PATH, errors=[])) + + # reload native modules in nodenets for nodenet_uid in nodenets: - nodenets[nodenet_uid].reload_native_modules(filter_native_modules(nodenets[nodenet_uid].engine)) + nodenets[nodenet_uid].reload_native_modules(native_modules) + + # reload worlds: + for world_uid in worlds: + wtype = worlds[world_uid].__class__.__name__ + if wtype in world_classes: + data = worlds[world_uid].data.copy() + agents = data.pop('agents') + worlds[world_uid].__del__() + del micropsi_core.runtime.worlds[world_uid] + worlds[world_uid] = world_classes[wtype](**world_data[world_uid]) + worlds[world_uid].initialize_world(data) + for uid in agents: + if uid in nodenets: + worlds[world_uid].register_nodenet(agents[uid]['type'], uid, agents[uid]['name'], nodenets[uid].metadata['worldadapter_config']) + nodenets[uid].worldadapter_instance = worlds[world_uid].agents[uid] + else: + worlds[world_uid].logger.warning("World definition for world %s gone, destroying." % str(worlds[world_uid])) + # restart previously active nodenets for uid in runners: - nodenets[uid].is_active = True + start_nodenetrunner(uid) + # nodenets[uid].is_active = True if len(errors) == 0: return True, [] @@ -1661,51 +1954,99 @@ def reload_native_modules(): return False, errors -def initialize(persistency_path=None, resource_path=None): - global PERSISTENCY_PATH, RESOURCE_PATH, configs, logger, runner +def runtime_info(): + return { + "version": runtime_config['micropsi2']['version'], + "persistency_directory": PERSISTENCY_PATH, + "agent_directory": RESOURCE_PATH, + "world_directory": WORLD_PATH + } + + +def initialize(config=None): + global PERSISTENCY_PATH, RESOURCE_PATH, WORLD_PATH, AUTOSAVE_PATH + global runtime_config, runner_config, logger, runner, initialized, auto_save_intervals + + if config is None: + from configuration import config + + runtime_config = config + + PERSISTENCY_PATH = config['paths']['persistency_directory'] + RESOURCE_PATH = config['paths']['agent_directory'] + WORLD_PATH = config['paths']['world_directory'] - if persistency_path is None: - persistency_path = cfg['paths']['data_directory'] + sys.path.append(WORLD_PATH) - if resource_path is None: - resource_path = persistency_path + runner_config = ConfigurationManager(config['paths']['server_settings_path']) - PERSISTENCY_PATH = persistency_path - RESOURCE_PATH = resource_path + # create autosave-dir if not exists: + auto_save_intervals = config['micropsi2'].get('auto_save_intervals') + if auto_save_intervals is not None: + auto_save_intervals = sorted([int(x) for x in config['micropsi2']['auto_save_intervals'].split(',')], reverse=True) + AUTOSAVE_PATH = os.path.join(PERSISTENCY_PATH, "nodenets", "__autosave__") + os.makedirs(AUTOSAVE_PATH, exist_ok=True) - sys.path.append(resource_path) + # bring up plotting infrastructure + try: + import matplotlib + matplotlib.rcParams['webagg.port'] = int(config['micropsi2'].get('webagg_port', 6545)) + matplotlib.rcParams['webagg.open_in_browser'] = False + matplotlib.use('WebAgg') + + def plotter_initializer(): + from matplotlib import pyplot as plt + plt.show() - configs = config.ConfigurationManager(cfg['paths']['server_settings_path']) + plt_thread = threading.Thread(target=plotter_initializer, args=(), daemon=True) + plt_thread.start() + except ImportError: + pass if logger is None: logger = MicropsiLogger({ - 'system': cfg['logging']['level_system'], - 'world': cfg['logging']['level_world'] - }, cfg['logging'].get('logfile')) + 'system': config['logging']['level_system'], + 'world': config['logging']['level_world'] + }, config['logging'].get('logfile')) + try: + import theano + precision = config['theano']['precision'] + if precision == "32": + theano.config.floatX = "float32" + elif precision == "64": + theano.config.floatX = "float64" + else: # pragma: no cover + logging.getLogger("system").warning("Unsupported precision value from configuration: %s, falling back to float64", precision) + theano.config.floatX = "float64" + config['theano']['precision'] = "64" + except ImportError: + pass + + result, errors = reload_code() load_definitions() - init_worlds(world_data) - result, errors = reload_native_modules() for e in errors: logging.getLogger("system").error(e) + # shut tornado up + for key in ["tornado.application", "tornado.access", "tornado", "tornado.general"]: + logging.getLogger(key).setLevel(logging.ERROR) + # initialize runners # Initialize the threads for the continuous calculation of nodenets and worlds - if 'runner_timestep' not in configs: - configs['runner_timestep'] = 200 - configs.save_configs() - if 'runner_factor' not in configs: - configs['runner_factor'] = 2 - configs.save_configs() + if 'runner_timestep' not in runner_config: + runner_config['runner_timestep'] = 10 + if 'infguard' not in runner_config: + runner_config['runner_infguard'] = True + runner_config.save_configs() - set_runner_properties(configs['runner_timestep'], configs['runner_factor']) + set_runner_properties(runner_config['runner_timestep'], runner_config['runner_infguard']) runner['running'] = True if runner.get('runner') is None: runner['runner'] = MicropsiRunner() - if kill_runners not in signal_handler_registry: - add_signal_handler(kill_runners) - signal.signal(signal.SIGINT, signal_handler) signal.signal(signal.SIGTERM, signal_handler) + signal.signal(signal.SIGABRT, signal_handler) + initialized = True diff --git a/micropsi_core/tests/conftest.py b/micropsi_core/tests/conftest.py deleted file mode 100644 index 736095e4..00000000 --- a/micropsi_core/tests/conftest.py +++ /dev/null @@ -1,43 +0,0 @@ -""" -Central initialization of fixtures for Runtime etc. -""" -import pytest -from micropsi_core import runtime as micropsi - -DELETE_TEST_FILES_ON_EXIT = True - - -nn_uid = 'Testnet' - - -@pytest.yield_fixture(scope="function") -def fixed_nodenet(request, test_world, engine): - """ - A test nodenet filled with some example data (nodenet_data.py) - Structure: - - -> A1 -> A2 - / - S ACTA - \ - -> B1 -> B2 - - S: Sensor, brightness_l - A1: Pipe - A2: Pipe - B1: Pipe - B2: Pipe - ACTA: Activator, por - """ - from micropsi_core.tests.nodenet_data import fixed_nodenet_data - if engine == "theano_engine": - fixed_nodenet_data = fixed_nodenet_data.replace('Root', 's0001') - success, uid = micropsi.new_nodenet("Fixednet", engine=engine, worldadapter="Braitenberg", owner="Pytest User", world_uid=test_world, uid='fixed_test_nodenet') - micropsi.get_nodenet(uid) - micropsi.merge_nodenet(uid, fixed_nodenet_data, keep_uids=True) - micropsi.save_nodenet(uid) - yield uid - try: - micropsi.delete_nodenet(uid) - except: - pass diff --git a/micropsi_core/tests/nodenet_data.py b/micropsi_core/tests/nodenet_data.py deleted file mode 100644 index 70810e52..00000000 --- a/micropsi_core/tests/nodenet_data.py +++ /dev/null @@ -1,129 +0,0 @@ - -fixed_nodenet_data = """{ - "uid": "fixed_test_nodenet", - "links": { - "n0003:sub:gen:n0004": { - "certainty": 1, - "source_gate_name": "sub", - "source_node_uid": "n0003", - "target_node_uid": "n0004", - "target_slot_name": "gen", - "uid": "n0003:sub:gen:n0004", - "weight": 1 - }, - "n0001:por:gen:n0002": { - "certainty": 1, - "source_gate_name": "por", - "source_node_uid": "n0001", - "target_node_uid": "n0002", - "target_slot_name": "gen", - "uid": "n0001:por:gen:n0002", - "weight": 1 - }, - "n0005:gen:gen:n0003": { - "certainty": 1, - "source_gate_name": "gen", - "source_node_uid": "n0005", - "target_node_uid": "n0003", - "target_slot_name": "gen", - "uid": "n0005:gen:gen:n0003", - "weight": 1 - }, - "n0005:gen:gen:n0001": { - "certainty": 1, - "source_gate_name": "gen", - "source_node_uid": "n0005", - "target_node_uid": "n0001", - "target_slot_name": "gen", - "uid": "n0005:gen:gen:n0001", - "weight": 1 - } - }, - "name": "fixed", - "nodes": { - "n0001": { - "activation": 0, - "index": 2, - "name": "A1", - "parameters": {}, - "parent_nodespace": "Root", - "position": [ - 367, - 115 - ], - "type": "Pipe", - "uid": "n0001" - }, - "n0005": { - "activation": 0, - "index": 1, - "name": "S", - "parameters": { - "datasource": "brightness_l" - }, - "parent_nodespace": "Root", - "position": [ - 163, - 138 - ], - "type": "Sensor", - "uid": "n0005" - }, - "n0002": { - "activation": 0, - "index": 4, - "name": "A2", - "parameters": { - "foo": "23" - }, - "parent_nodespace": "Root", - "position": [ - 567, - 118 - ], - "type": "Pipe", - "uid": "n0002" - }, - "n0003": { - "activation": 0, - "index": 3, - "name": "B1", - "parameters": {}, - "parent_nodespace": "Root", - "position": [ - 367, - 296 - ], - "type": "Pipe", - "uid": "n0003" - }, - "n0006": { - "activation": 0, - "index": 6, - "name": "ACTA", - "parameters": { - "type": "por" - }, - "parent_nodespace": "Root", - "position": [ - 749, - 103 - ], - "type": "Activator", - "uid": "n0006" - }, - "n0004": { - "activation": 0, - "index": 5, - "name": "B2", - "parameters": {}, - "parent_nodespace": "Root", - "position": [ - 568, - 298 - ], - "type": "Pipe", - "uid": "n0004" - } - } -}""" \ No newline at end of file diff --git a/micropsi_core/tests/test_changing_definitions.py b/micropsi_core/tests/test_changing_definitions.py new file mode 100644 index 00000000..975bc36d --- /dev/null +++ b/micropsi_core/tests/test_changing_definitions.py @@ -0,0 +1,105 @@ + +import pytest + + +@pytest.mark.engine("theano_engine") +def test_chaning_defs_theano(runtime, test_nodenet, default_world, resourcepath): + import os + os.makedirs(os.path.join(resourcepath, 'nodetypes'), exist_ok=True) + + def write_flowmodule(filename, name): + with open(os.path.join(resourcepath, 'nodetypes', filename), 'w') as fp: + fp.write(""" +nodetype_definition = { + 'run_function_name': 'flowfunc', + 'name': '%s', + 'flow_module': True, + 'implementation': 'python', + 'inputs': ['Y'], + 'outputs': ['X'], + 'inputdims': ['2'] +} + +def flowfunc(Y, netapi, node, params): + import numpy as np + return np.ones((2,3)) +""" % name) + + def write_nativemodule(filename, name): + with open(os.path.join(resourcepath, 'nodetypes', filename), 'w') as fp: + fp.write(""" +nodetype_definition = { + 'nodefunction_name': 'nodefunc', + 'name': '%s', + 'slottypes': ['gen', 'foo', 'bar'], + 'gatetypes': ['gen', 'foo', 'bar'] +} + +def nodefunc(netapi, node, params): + pass +""" % name) + + def removedefs(*filenames): + for f in filenames: + os.remove(os.path.join(resourcepath, 'nodetypes', f)) + + write_flowmodule('foonode.py', 'foonode') + write_flowmodule('foo2node.py', 'foo2node') + write_nativemodule('barnode.py', 'barnode') + runtime.reload_code() + + netapi = runtime.nodenets[test_nodenet].netapi + foonode = netapi.create_node('foonode') + barnode = netapi.create_node('barnode') + neuron = netapi.create_node('Neuron') + foo2node = netapi.create_node('foo2node') + netapi.link(neuron, 'gen', foonode, 'gen') + netapi.link(neuron, 'gen', foo2node, 'gen') + netapi.link(neuron, 'gen', barnode, 'gen') + runtime.set_nodenet_properties(nodenet_uid=test_nodenet, world_uid=default_world, worldadapter="DefaultArray") + netapi.flow('worldadapter', 'vision', foonode, 'Y') + netapi.flow(foonode, 'X', foo2node, 'Y') + netapi.flow(foo2node, 'X', 'worldadapter', 'action') + runtime.save_nodenet(test_nodenet) + runtime.unload_nodenet(test_nodenet) + + # remove nativemodule + removedefs('barnode.py') + runtime.reload_code() + net = runtime.get_nodenet(test_nodenet) + assert type(net.netapi.get_node(foonode.uid)).__name__ == "FlowModule" + with pytest.raises(KeyError): + net.netapi.get_node(barnode.uid) + write_nativemodule('barnode.py', 'barnode') + runtime.unload_nodenet(test_nodenet) + + # remove a flowmodule + removedefs('foonode.py') + runtime.reload_code() + net = runtime.get_nodenet(test_nodenet) + foo2node = net.netapi.get_node(foo2node.uid) + assert type(foo2node).__name__ == "FlowModule" + assert type(net.netapi.get_node(barnode.uid)).__name__ == "TheanoNode" + assert foonode.uid not in foo2node.inputmap['Y'] + with pytest.raises(KeyError): + net.netapi.get_node(foonode.uid) + runtime.unload_nodenet(test_nodenet) + + # change native module to flowmodule and vice versa + write_flowmodule('barnode.py', 'barnode') + write_nativemodule('foo2node.py', 'foo2node') + runtime.reload_code() + net = runtime.get_nodenet(test_nodenet) + with pytest.raises(KeyError): + foo2node = net.netapi.get_node(foo2node.uid) + with pytest.raises(KeyError): + barnode = net.netapi.get_node(barnode.uid) + with pytest.raises(KeyError): + net.netapi.get_node(foonode.uid) + newbarnode = net.netapi.create_node("barnode") + newfoo2node = net.netapi.create_node("foo2node") + neuron = net.netapi.get_node(neuron.uid) + net.netapi.link(neuron, 'gen', newbarnode, 'sub') + net.netapi.link(neuron, 'gen', newfoo2node, 'foo') + net.netapi.flow('worldadapter', 'vision', newbarnode, 'Y') + net.netapi.flow(newbarnode, 'X', 'worldadapter', 'action') diff --git a/micropsi_core/tests/test_code_reload.py b/micropsi_core/tests/test_code_reload.py new file mode 100644 index 00000000..95413d14 --- /dev/null +++ b/micropsi_core/tests/test_code_reload.py @@ -0,0 +1,109 @@ + + +def test_code_reload(runtime, test_nodenet, resourcepath): + import os + os.makedirs(os.path.join(resourcepath, 'nodetypes', 'library'), exist_ok=True) + os.makedirs(os.path.join(resourcepath, 'dummyworld'), exist_ok=True) + os.makedirs(os.path.join(resourcepath, 'shared_utils'), exist_ok=True) + + nodetypef = os.path.join(resourcepath, 'nodetypes', 'testnode.py') + foof = os.path.join(resourcepath, 'nodetypes', 'library', 'foo.py') + barf = os.path.join(resourcepath, 'nodetypes', 'library', 'bar.py') + + worldjsonf = os.path.join(resourcepath, 'dummyworld', 'worlds.json') + worldf = os.path.join(resourcepath, 'dummyworld', 'dummyworld.py') + worldsharedf = os.path.join(resourcepath, 'shared_utils', 'stuff.py') + + def write_resources(nodevalues, datatarget_name, worldvalues): + with open(nodetypef, 'w') as fp: + fp.write(""" +nodetype_definition = { + 'doc': 'calculates stuff', + 'nodefunction_name': 'testnode', + 'name': 'testnode', + 'slottypes': ['gen'], + 'gatetypes': ['gen'], +} + +from nodetypes.library.foo import module_level + + +def testnode(netapi, node): + from nodetypes.library.foo import inline, get_bar + val = 1 + module_level + inline + get_bar() + node.get_gate('gen').gate_function(val) +""") + with open(foof, 'w') as fp: + fp.write(""" +module_level = %d +inline = %d +def get_bar(): + from nodetypes.library.bar import magicnumber + return magicnumber +""" % (nodevalues[0], nodevalues[1])) + with open(barf, 'w') as fp: + fp.write("magicnumber=%d" % nodevalues[2]) + + with open(worldjsonf, 'w') as fp: + fp.write("""{"worlds": ["dummyworld.py"],"worldadapters": ["dummyworld.py"]}""") + with open(worldf, 'w') as fp: + fp.write("""from micropsi_core.world.world import World +from micropsi_core.world.worldadapter import WorldAdapter +from shared_utils.stuff import variable + +class DummyWorld(World): + supported_worldadapters=['DummyWA'] + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.var = variable + self.inline = %d + +class DummyWA(WorldAdapter): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.add_datasource("foo") + self.add_datasource("bar") + self.add_datatarget("%s") + def update_data_sources_and_targets(self): + from shared_utils.stuff import get_values + values = get_values() + self.datasources['foo'] = values[0] + self.datasources['bar'] = values[1] +""" % (worldvalues[0], datatarget_name)) + with open(worldsharedf, 'w') as fp: + fp.write("""variable = %d +def get_values(): + return %d, %d""" % (worldvalues[1], worldvalues[2], worldvalues[3])) + + write_resources([3, 5, 7], "target", [13, 15, 17, 19]) + res, errors = runtime.reload_code() + # assert res + + res, wuid = runtime.new_world("dummyworld", "DummyWorld") + runtime.set_nodenet_properties(test_nodenet, world_uid=wuid, worldadapter="DummyWA") + + net = runtime.nodenets[test_nodenet] + netapi = net.netapi + node = netapi.create_node('testnode') + runtime.step_nodenet(test_nodenet) + assert node.get_gate('gen').activation == 1 + 3 + 5 + 7 + world = runtime.worlds[wuid] + assert world.inline == 13 + assert world.var == 15 + wa = net.worldadapter_instance + assert "target" in wa.datatargets + assert wa.get_datasource_value("foo") == 17 + assert wa.get_datasource_value("bar") == 19 + + write_resources([11, 13, 17], "foobar", [1, 3, 5, 7]) + runtime.reload_code() + node = netapi.get_node(node.uid) + runtime.step_nodenet(test_nodenet) + assert node.get_gate('gen').activation == 1 + 11 + 13 + 17 + world = runtime.worlds[wuid] + assert world.inline == 1 + assert world.var == 3 + wa = net.worldadapter_instance + assert "foobar" in wa.datatargets + assert wa.get_datasource_value("foo") == 5 + assert wa.get_datasource_value("bar") == 7 diff --git a/micropsi_core/tests/test_flowmodules.py b/micropsi_core/tests/test_flowmodules.py new file mode 100644 index 00000000..784860a8 --- /dev/null +++ b/micropsi_core/tests/test_flowmodules.py @@ -0,0 +1,1187 @@ +#!/usr/local/bin/python +# -*- coding: utf-8 -*- + + +import pytest +# skip these tests if numpy is not installed +pytest.importorskip("numpy") + +import numpy as np + + +def prepare(runtime, test_nodenet, default_world, resourcepath, wa_class=None): + """ Create a bunch of available flowmodules for the following tests """ + import os + foodir = os.path.join(resourcepath, "nodetypes", 'foobar') + os.makedirs(foodir) + with open(os.path.join(resourcepath, "nodetypes", "out12345.py"), 'w') as fp: + fp.write("""nodetype_definition = { + "flow_module": True, + "implementation": "python", + "name": "out12345", + "run_function_name": "out12345", + "inputs": [], + "outputs": ["out"], + "inputdims": [], + "parameters": ["default_test"], + "parameter_defaults": {"default_test": "defaultvalue"} +} + +def out12345(netapi, node, parameters): + import numpy as np + assert parameters['default_test'] == 'defaultvalue' + return np.asarray([1,2,3,4,5]).astype(netapi.floatX) +""") + + with open(os.path.join(foodir, "Double.py"), 'w') as fp: + fp.write("""nodetype_definition = { + "flow_module": True, + "implementation": "theano", + "name": "Double", + "build_function_name": "double", + "init_function_name": "double_init", + "inputs": ["inputs"], + "outputs": ["outputs"], + "inputdims": [1], + "parameters": ["test_param"], + "parameter_defaults": {"test_param": "defaultvalue"} +} + +def double_init(netapi, node, parameters): + assert nodetype_definition['name'] == 'Double' + node.initfunction_ran = True + assert parameters['test_param'] == 'defaultvalue' + +def double(inputs, netapi, node, parameters): + return inputs * 2 +""") + with open(os.path.join(resourcepath, "nodetypes", "Add.py"), 'w') as fp: + fp.write("""nodetype_definition = { + "flow_module": True, + "implementation": "theano", + "name": "Add", + "build_function_name": "add", + "inputs": ["input1", "input2"], + "outputs": ["outputs"], + "inputdims": [1, 1] +} + +def add(input1, input2, netapi, node, parameters): + return input1 + input2 +""") + with open(os.path.join(resourcepath, "nodetypes", "Bisect.py"), 'w') as fp: + fp.write("""nodetype_definition = { + "flow_module": True, + "implementation": "theano", + "name": "Bisect", + "build_function_name": "bisect", + "inputs": ["inputs"], + "outputs": ["outputs"], + "inputdims": [1] +} + +def bisect(inputs, netapi, node, parameters): + return inputs / 2 +""") + with open(os.path.join(resourcepath, "nodetypes", "Numpy.py"), 'w') as fp: + fp.write("""nodetype_definition = { + "flow_module": True, + "implementation": "python", + "name": "Numpy", + "init_function_name": "numpyfunc_init", + "run_function_name": "numpyfunc", + "inputs": ["inputs"], + "outputs": ["outputs"], + "inputdims": [1], + "parameters": ["no_return_flag"] +} + +def numpyfunc_init(netapi, node, parameters): + node.initfunction_ran = True + +def numpyfunc(inputs, netapi, node, parameters): + import numpy as np + netapi.notify_user(node, "numpyfunc ran") + if parameters.get('no_return_flag') != 1: + ones = np.zeros_like(inputs) + ones[:] = 1.0 + return inputs + ones +""") + with open(os.path.join(resourcepath, "nodetypes", "Thetas.py"), 'w') as fp: + fp.write("""nodetype_definition = { + "flow_module": True, + "implementation": "theano", + "name": "Thetas", + "init_function_name": "thetas_init", + "build_function_name": "thetas", + "parameters": ["weights_shape", "use_thetas"], + "inputs": ["X"], + "outputs": ["Y"], + "inputdims": [1] +} + +import theano + +def thetas_init(netapi, node, parameters): + import numpy as np + w_array = np.random.rand(parameters['weights_shape']).astype(netapi.floatX) + b_array = np.random.rand(parameters['weights_shape']).astype(netapi.floatX) + + node.set_theta('weights', w_array) + node.set_theta('bias', theano.shared(b_array)) + +def thetas(X, netapi, node, parameters): + if parameters.get('use_thetas'): + return X * node.get_theta('weights') + node.get_theta('bias') + else: + return X +""") + with open(os.path.join(resourcepath, "nodetypes", "TwoOutputs.py"), 'w') as fp: + fp.write("""nodetype_definition = { + "flow_module": True, + "implementation": "theano", + "name": "TwoOutputs", + "build_function_name": "two_outputs", + "inputs": ["X"], + "outputs": ["A", "B"], + "inputdims": [1] +} + +def two_outputs(X, netapi, node, parameters): + return X, X+1 +""") + with open(os.path.join(resourcepath, "nodetypes", "TRPOOut.py"), 'w') as fp: + fp.write("""nodetype_definition = { + "flow_module": True, + "implementation": "theano", + "name": "TRPOOut", + "build_function_name": "trpoout", + "inputs": ["X"], + "outputs": ["Y", "Z"], + "inputdims": [1], + "parameters": ["makeinf"], + "parameter_defaults": {"makeinf": "False"} +} + +def trpoout(X, netapi, node, parameters): + from theano import tensor as T + if parameters["makeinf"] == "False": + return [X, X+1, X*2], T.exp(X) + else: + return [X, X/0, X*2], T.exp(X) +""") + with open(os.path.join(resourcepath, "nodetypes", "TRPOIn.py"), 'w') as fp: + fp.write("""nodetype_definition = { + "flow_module": True, + "implementation": "theano", + "name": "TRPOIn", + "build_function_name": "trpoin", + "inputs": ["Y", "Z"], + "outputs": ["A"], + "inputdims": ["list", 1] +} + +def trpoin(X, Y, netapi, node, parameters): + for thing in X: + Y += thing + return Y +""") + with open(os.path.join(resourcepath, "nodetypes", "TRPOInPython.py"), 'w') as fp: + fp.write("""nodetype_definition = { + "flow_module": True, + "implementation": "python", + "name": "TRPOInPython", + "run_function_name": "trpoinpython", + "inputs": ["Y", "Z"], + "outputs": ["A"], + "inputdims": ["list", 1] +} + +def trpoinpython(X, Y, netapi, node, parameters): + for thing in X: + Y += thing + return Y +""") + + with open(os.path.join(resourcepath, "nodetypes", "infmaker.py"), 'w') as fp: + fp.write("""nodetype_definition = { + "flow_module": True, + "implementation": "python", + "name": "infmaker", + "run_function_name": "infmaker", + "inputs": [], + "outputs": ["A"], + "inputdims": [], + "parameters": ["what"], + "parameter_values": {"what": ["nan", "inf", "neginf"]}, + "parameter_defaults": {"what": "nan"} +} + +import numpy as np + +def infmaker(netapi, node, parameters): + data = np.ones(12).astype(netapi.floatX) + what = np.nan + if parameters['what'] == 'inf': + what = np.inf + elif parameters['what'] == 'neginf': + what = -np.inf + data[np.random.randint(0, 11)] = what + return data +""") + + with open(os.path.join(resourcepath, 'worlds.json'), 'w') as fp: + fp.write("""{"worlds":["flowworld.py"],"worldadapters":["flowworld.py"]}""") + + with open(os.path.join(resourcepath, 'flowworld.py'), 'w') as fp: + fp.write(""" +import numpy as np +from micropsi_core.world.world import World +from micropsi_core.world.worldadapter import ArrayWorldAdapter + +class FlowWorld(World): + supported_worldadapters = ["SimpleArrayWA"] + +class SimpleArrayWA(ArrayWorldAdapter): + def __init__(self, world, **kwargs): + super().__init__(world, **kwargs) + self.add_datasource("execute") + self.add_flow_datasource("foo", shape=(5)) + self.add_flow_datasource("vision", (6)) + self.add_flow_datasource("start", (1)) + self.add_datatarget("reset") + self.add_flow_datatarget("bar", shape=(5)) + self.add_flow_datatarget("motor", (6)) + self.add_flow_datatarget("stop", (1)) + + self.update_data_sources_and_targets() + + def update_data_sources_and_targets(self): + for key in self.flow_datatargets: + self.flow_datatarget_feedbacks[key] = np.copy(self.flow_datatargets[key]).astype(self.floatX) + for key in self.flow_datasources: + self.flow_datasources[key] = np.random.rand(len(self.flow_datasources[key])).astype(self.floatX) +""") + + nodenet = runtime.nodenets[test_nodenet] + netapi = nodenet.netapi + runtime.reload_code() + + res, wuid = runtime.new_world("FlowWorld", "FlowWorld") + runtime.set_nodenet_properties(test_nodenet, worldadapter="SimpleArrayWA", world_uid=wuid) + worldadapter = nodenet.worldadapter_instance + + return nodenet, netapi, worldadapter + + +@pytest.mark.engine("theano_engine") +def test_flowmodule_definition(runtime, test_nodenet, default_world, resourcepath): + """ Basic definition and existance test """ + nodenet, netapi, worldadapter = prepare(runtime, test_nodenet, default_world, resourcepath) + + result, metadata = runtime.get_nodenet_metadata(test_nodenet) + assert 'Double' not in metadata['native_modules'] + assert metadata['flow_modules']['Double']['inputs'] == ["inputs"] + assert metadata['flow_modules']['Double']['outputs'] == ["outputs"] + assert metadata['flow_modules']['Double']['category'] == 'foobar' + flowmodule = netapi.create_node("Double", None, "Double") + assert not hasattr(flowmodule, 'initfunction_ran') + + nodenet.flow('worldadapter', 'foo', flowmodule.uid, "inputs") + nodenet.flow(flowmodule.uid, "outputs", 'worldadapter', 'bar') + + sources = np.zeros((5), dtype=nodenet.numpyfloatX) + sources[:] = np.random.randn(*sources.shape) + + worldadapter.set_flow_datasource('foo', sources) + + # step & assert that nothing happened without sub-activation + nodenet.step() + assert np.all(worldadapter.get_flow_datatarget('bar') == np.zeros(5, dtype=nodenet.numpyfloatX)) + # assert len(nodenet.flowfunctions) == 0 + + # create activation source: + source = netapi.create_node("Neuron", None) + netapi.link(source, 'gen', source, 'gen') + netapi.link(source, 'gen', flowmodule, 'sub') + source.activation = 1 + + # assert len(nodenet.flowfunctions) == 1 + + # # step & assert that the initfunction and flowfunction ran + nodenet.step() + assert np.all(worldadapter.get_flow_datatarget('bar') == sources * 2) + assert hasattr(flowmodule, 'initfunction_ran') + + +@pytest.mark.engine("theano_engine") +def test_multiple_flowgraphs(runtime, test_nodenet, default_world, resourcepath): + """ Testing a flow from datasources to datatargets """ + nodenet, netapi, worldadapter = prepare(runtime, test_nodenet, default_world, resourcepath) + + double = netapi.create_node("Double", None, "Double") + add = netapi.create_node("Add", None, "Add") + bisect = netapi.create_node("Bisect", None, "Bisect") + + # create a first graph + # link datasources to double & add + nodenet.flow('worldadapter', 'foo', double.uid, "inputs") + nodenet.flow('worldadapter', 'foo', add.uid, "input2") + # link double to add: + nodenet.flow(double.uid, "outputs", add.uid, "input1") + + # link add to datatargets + nodenet.flow(add.uid, "outputs", 'worldadapter', 'bar') + + # assert len(nodenet.flowfunctions) == 0 + + # create a second graph + nodenet.flow('worldadapter', 'foo', bisect.uid, "inputs") + nodenet.flow(bisect.uid, "outputs", 'worldadapter', 'bar') + + # assert len(nodenet.flowfunctions) == 0 + + sources = np.zeros((5), dtype=nodenet.numpyfloatX) + sources[:] = np.random.randn(*sources.shape) + + worldadapter.set_flow_datasource('foo', sources) + + # create activation source + source = netapi.create_node("Neuron", None) + netapi.link(source, 'gen', source, 'gen') + source.activation = 1 + + # link to first graph: + netapi.link(source, 'gen', add, 'sub') + # assert len(nodenet.flowfunctions) == 1 + + # step & assert that only the first graph ran + nodenet.step() + assert np.all(worldadapter.get_flow_datatarget('bar') == sources * 3) + + # link source to second graph: + netapi.link(source, 'gen', bisect, 'sub') + # assert len(nodenet.flowfunctions) == 2 + worldadapter.flow_datatargets['bar'] = np.zeros_like(worldadapter.get_flow_datatarget('bar')) + + nodenet.step() + assert np.allclose(worldadapter.get_flow_datatarget('bar'), sources * 3.5) + + +@pytest.mark.engine("theano_engine") +def test_disconnect_flowmodules(runtime, test_nodenet, default_world, resourcepath): + """ test disconnecting flowmodules """ + nodenet, netapi, worldadapter = prepare(runtime, test_nodenet, default_world, resourcepath) + + double = netapi.create_node("Double", None, "Double") + add = netapi.create_node("Add", None, "Add") + source = netapi.create_node("Neuron", None, "Source") + + # link datasources to double & add + nodenet.flow('worldadapter', 'foo', double.uid, "inputs") + nodenet.flow('worldadapter', 'foo', add.uid, "input2") + # link double to add: + nodenet.flow(double.uid, "outputs", add.uid, "input1") + # link add to datatargets + nodenet.flow(add.uid, "outputs", 'worldadapter', 'bar') + netapi.link(source, 'gen', add, 'sub') + # have one connected graph + + # assert len(nodenet.flowfunctions) == 1 + + # unlink double from add + netapi.unflow(double, "outputs", add, "input1") + + # unlink add from datatargets + netapi.unflow(add, "outputs", "worldadapter", "bar") + + # we still have one graph, but it doesn't do anything + # assert len(nodenet.flowfunctions) == 1 + + sources = np.zeros((5), dtype=nodenet.numpyfloatX) + sources[:] = np.random.randn(*sources.shape) + worldadapter.set_flow_datasource('foo', sources) + + nodenet.step() + assert np.all(worldadapter.get_flow_datatarget('bar') == np.zeros_like(worldadapter.get_flow_datatarget('bar'))) + + +@pytest.mark.engine("theano_engine") +def test_diverging_flowgraph(runtime, test_nodenet, default_world, resourcepath): + nodenet, netapi, worldadapter = prepare(runtime, test_nodenet, default_world, resourcepath) + + double = netapi.create_node("Double", None, "Double") + add = netapi.create_node("Add", None, "Add") + bisect = netapi.create_node("Bisect", None, "Bisect") + + # link sources to bisect + nodenet.flow('worldadapter', 'foo', bisect.uid, "inputs") + # link bisect to double: + nodenet.flow(bisect.uid, "outputs", double.uid, "inputs") + # link bisect to add: + nodenet.flow(bisect.uid, "outputs", add.uid, "input1") + # link sources to add: + nodenet.flow('worldadapter', 'foo', add.uid, "input2") + + # link double and add to targets: + nodenet.flow(double.uid, "outputs", 'worldadapter', 'bar') + nodenet.flow(add.uid, "outputs", 'worldadapter', 'bar') + + sources = np.zeros((5), dtype=nodenet.numpyfloatX) + sources[:] = np.random.randn(*sources.shape) + worldadapter.set_flow_datasource('foo', sources) + + # create activation source + source = netapi.create_node("Neuron", None) + netapi.link(source, 'gen', source, 'gen') + source.activation = 1 + + # link activation source to double + netapi.link(source, 'gen', double, 'sub') + nodenet.step() + assert np.all(worldadapter.get_flow_datatarget('bar') == sources) + worldadapter.flow_datatargets['bar'] = np.zeros_like(worldadapter.get_flow_datatarget('bar')) + + # unlink double, link add: + netapi.unlink(source, 'gen', double, 'sub') + netapi.link(source, 'gen', add, 'sub') + nodenet.step() + assert np.all(worldadapter.get_flow_datatarget('bar') == sources * 1.5) + + +@pytest.mark.engine("theano_engine") +def test_converging_flowgraphs(runtime, test_nodenet, default_world, resourcepath): + nodenet, netapi, worldadapter = prepare(runtime, test_nodenet, default_world, resourcepath) + + double1 = netapi.create_node("Double", None, "Double") + double2 = netapi.create_node("Double", None, "Double") + add = netapi.create_node("Add", None, "Add") + + # link sources + nodenet.flow('worldadapter', 'foo', double1.uid, "inputs") + nodenet.flow('worldadapter', 'foo', double2.uid, "inputs") + + # link both doubles to add + nodenet.flow(double1.uid, "outputs", add.uid, "input1") + nodenet.flow(double2.uid, "outputs", add.uid, "input2") + + # link add to targets. + nodenet.flow(add.uid, "outputs", 'worldadapter', 'bar') + + sources = np.zeros((5), dtype=nodenet.numpyfloatX) + sources[:] = np.random.randn(*sources.shape) + worldadapter.set_flow_datasource('foo', sources) + + # create activation source + source = netapi.create_node("Neuron", None) + netapi.link(source, 'gen', source, 'gen') + source.activation = 1 + + # link activation source to double + netapi.link(source, 'gen', add, 'sub') + nodenet.step() + assert np.all(worldadapter.get_flow_datatarget('bar') == sources * 4) + + +@pytest.mark.engine("theano_engine") +def test_flowmodule_persistency(runtime, test_nodenet, default_world, resourcepath): + nodenet, netapi, worldadapter = prepare(runtime, test_nodenet, default_world, resourcepath) + + double = netapi.create_node("Double", None, "Double") + thetas = netapi.create_node("Thetas", None, "Thetas") + thetas.set_parameter("weights_shape", 5) + thetas.set_parameter("use_thetas", True) + + nodenet.flow('worldadapter', 'foo', double.uid, "inputs") + nodenet.flow(double.uid, 'outputs', thetas.uid, "X") + nodenet.flow(thetas.uid, "Y", 'worldadapter', 'bar') + source = netapi.create_node("Neuron", None) + netapi.link(source, 'gen', source, 'gen') + netapi.link(source, 'gen', thetas, 'sub') + source.activation = 1 + custom_theta = np.random.rand(5).astype(netapi.floatX) + thetas.set_theta("weights", custom_theta) + + assert double.initfunction_ran + + sources = np.zeros((5), dtype=netapi.floatX) + sources[:] = np.random.randn(*sources.shape) + worldadapter.set_flow_datasource('foo', sources) + + nodenet.step() + + result = worldadapter.get_flow_datatarget('bar') + + assert np.allclose(result, sources * 2 * thetas.get_theta("weights").get_value() + thetas.get_theta("bias").get_value()) + + runtime.save_nodenet(test_nodenet) + runtime.revert_nodenet(test_nodenet) + + nodenet = runtime.nodenets[test_nodenet] + netapi = nodenet.netapi + worldadapter = nodenet.worldadapter_instance + worldadapter.set_flow_datasource('foo', sources) + thetas = netapi.get_node(thetas.uid) + + assert np.allclose(thetas.get_theta("weights").get_value(), custom_theta) + nodenet.step() + assert np.allclose(worldadapter.get_flow_datatarget('bar'), result) + assert netapi.get_node(double.uid).initfunction_ran + # also assert, that the edge-keys are preserved: + # this would raise an exception otherwise + netapi.unflow(netapi.get_node(double.uid), 'outputs', netapi.get_node(thetas.uid), 'X') + + # assert that custom thetas survive reloadCode: + runtime.reload_code() + assert np.allclose(netapi.get_node(thetas.uid).get_theta('weights').get_value(), custom_theta) + + +@pytest.mark.engine("theano_engine") +def test_flowmodule_reload_code_behaviour(runtime, test_nodenet, default_world, resourcepath): + import os + nodenet, netapi, worldadapter = prepare(runtime, test_nodenet, default_world, resourcepath) + node = netapi.create_node("Thetas", None, "Thetas", weights_shape=5) + double = netapi.create_node("Double", None, "Double") + netapi.flow('worldadapter', 'foo', double, 'inputs') + netapi.flow(double, 'outputs', 'worldadapter', 'bar') + node.ensure_initialized() + weights = node.get_theta('weights').get_value() + source = netapi.create_node("Neuron", None) + netapi.link(source, 'gen', source, 'gen') + netapi.link(source, 'gen', double, 'sub') + source.activation = 1 + with open(os.path.join(resourcepath, "nodetypes", "Thetas.py"), 'w') as fp: + fp.write("""nodetype_definition = { + "flow_module": True, + "implementation": "theano", + "name": "Thetas", + "init_function_name": "thetas_init", + "build_function_name": "thetas", + "parameters": ["weights_shape", "use_thetas"], + "inputs": ["Y"], + "outputs": ["Z"], + "inputdims": [1] +} + +import theano + +def thetas_init(netapi, node, parameters): + import numpy as np + w_array = np.random.rand(parameters['weights_shape']).astype(netapi.floatX) + b_array = np.random.rand(parameters['weights_shape']).astype(netapi.floatX) + node.initfunction_ran = 'yep' + node.set_theta('weights', w_array) + node.set_theta('bias', theano.shared(b_array)) + +def thetas(Y, netapi, node, parameters): + if parameters.get('use_thetas'): + return Y * node.get_theta('weights') + node.get_theta('bias') + else: + return Y +""") + with open(os.path.join(resourcepath, "nodetypes", "foobar", "Double.py"), 'w') as fp: + fp.write("""nodetype_definition = { + "flow_module": True, + "implementation": "theano", + "name": "Double", + "build_function_name": "double", + "init_function_name": "double_init", + "inputs": ["inputs"], + "outputs": ["outputs"], + "inputdims": [1] +} + +def double_init(netapi, node, parameters): + node.initfunction_ran = True + +def double(inputs, netapi, node, parameters): + return inputs * 4 +""") + runtime.reload_code() + node = netapi.get_node(node.uid) + assert node.inputs == ["Y"] + assert node.outputs == ["Z"] + assert not np.all(weights == node.get_theta('weights').get_value()) + assert weights.shape == node.get_theta('weights').get_value().shape + assert node.initfunction_ran == 'yep' + worldadapter = nodenet.worldadapter_instance + sources = np.zeros((5), dtype=worldadapter.floatX) + sources[:] = np.random.randn(*sources.shape) + worldadapter.set_flow_datasource('foo', sources) + nodenet.step() + assert np.all(worldadapter.get_flow_datatarget("bar") == sources * 4) + + +@pytest.mark.engine("theano_engine") +def test_delete_flowmodule(runtime, test_nodenet, default_world, resourcepath): + nodenet, netapi, worldadapter = prepare(runtime, test_nodenet, default_world, resourcepath) + + double1 = netapi.create_node("Double", None, "Double") + double2 = netapi.create_node("Double", None, "Double") + add = netapi.create_node("Add", None, "Add") + bisect = netapi.create_node("Bisect", None, "Bisect") + + # build graph: + netapi.flow(bisect, "outputs", add, "input1") + netapi.flow(add, "outputs", double1, "inputs") + netapi.flow(add, "outputs", double2, "inputs") + netapi.flow('worldadapter', 'foo', bisect, "inputs") + netapi.flow('worldadapter', 'foo', add, "input2") + netapi.flow(double1, "outputs", 'worldadapter', 'bar') + netapi.flow(double2, "outputs", 'worldadapter', 'bar') + + source = netapi.create_node("Neuron", None, "Source") + netapi.link(source, 'gen', source, 'gen') + source.activation = 1 + netapi.link(source, 'gen', double1, 'sub') + netapi.link(source, 'gen', double2, 'sub') + # assert len(nodenet.flowfunctions) == 2 + + netapi.delete_node(add) + + # no possible connections anymore + # assert len(nodenet.flowfunctions) == 0 + + assert not nodenet.flow_module_instances[bisect.uid].is_output_connected() + + sources = np.zeros((5), dtype=nodenet.numpyfloatX) + sources[:] = np.random.randn(*sources.shape) + worldadapter.set_flow_datasource('foo', sources) + + nodenet.step() + assert np.all(worldadapter.get_flow_datatarget('bar') == np.zeros(5)) + + +@pytest.mark.engine("theano_engine") +def test_link_large_graph(runtime, test_nodenet, default_world, resourcepath): + nodenet, netapi, worldadapter = prepare(runtime, test_nodenet, default_world, resourcepath) + + double = netapi.create_node("Double", None, "Double") + bisect = netapi.create_node("Bisect", None, "Bisect") + add = netapi.create_node("Add", None, "Add") + + # create activation source: + source = netapi.create_node("Neuron", None) + source.activation = 1 + + nodenet.flow('worldadapter', 'foo', bisect.uid, "inputs") + nodenet.flow(bisect.uid, "outputs", double.uid, "inputs") + + nodenet.flow('worldadapter', 'foo', add.uid, "input1") + nodenet.flow(add.uid, "outputs", 'worldadapter', 'bar') + + nodenet.flow(double.uid, "outputs", add.uid, "input2") + + netapi.link(source, 'gen', add, 'sub') + # assert len(nodenet.flowfunctions) == 1 + + sources = np.zeros((5), dtype=nodenet.numpyfloatX) + sources[:] = np.random.randn(*sources.shape) + worldadapter.set_flow_datasource('foo', sources) + + nodenet.step() + assert np.all(worldadapter.get_flow_datatarget('bar') == sources * 2) + + +@pytest.mark.engine("theano_engine") +def test_python_flowmodules(runtime, test_nodenet, default_world, resourcepath): + nodenet, netapi, worldadapter = prepare(runtime, test_nodenet, default_world, resourcepath) + + double = netapi.create_node("Double", None, "Double") + py = netapi.create_node("Numpy", None, "Numpy") + bisect = netapi.create_node("Bisect", None, "Bisect") + + source = netapi.create_node("Neuron", None) + netapi.link(source, 'gen', source, 'gen') + source.activation = 1 + + assert not hasattr(py, 'initfunction_ran') + + netapi.flow('worldadapter', 'foo', double, "inputs") + netapi.flow(double, "outputs", py, "inputs") + netapi.flow(py, "outputs", bisect, "inputs") + netapi.flow(bisect, "outputs", 'worldadapter', 'bar') + + sources = np.zeros((5), dtype=nodenet.numpyfloatX) + sources[:] = np.random.randn(*sources.shape) + worldadapter.set_flow_datasource('foo', sources) + + nodenet.step() + assert np.all(worldadapter.get_flow_datatarget('bar') == 0) + + # netapi.link(source, 'gen', bisect, 'sub') + + nodenet.step() + assert np.all(worldadapter.get_flow_datatarget('bar') == 0) + + netapi.link(source, 'gen', bisect, 'sub') + + nodenet.step() + # ((x * 2) + 1) / 2 == x + .5 + assert np.all(worldadapter.get_flow_datatarget('bar') == sources + 0.5) + assert py.initfunction_ran + + +@pytest.mark.engine("theano_engine") +def test_compile_flow_subgraph(runtime, test_nodenet, default_world, resourcepath): + nodenet, netapi, worldadapter = prepare(runtime, test_nodenet, default_world, resourcepath) + + double = netapi.create_node("Double", None, "Double") + bisect = netapi.create_node("Bisect", None, "Bisect") + + netapi.flow(double, "outputs", bisect, "inputs") + + func, ins, outs = nodenet.compile_flow_subgraph([double.uid, bisect.uid]) + + assert np.all(func(inputs=[1, 2, 3, 4]) == np.asarray([1, 2, 3, 4], dtype=nodenet.numpyfloatX)) + + +@pytest.mark.engine("theano_engine") +def test_get_callable_flowgraph_bridges_numpy_gaps(runtime, test_nodenet, default_world, resourcepath): + """ Asserts that callable_flowgraph wraps everything in one callable, symbolic or numeric """ + nodenet, netapi, worldadapter = prepare(runtime, test_nodenet, default_world, resourcepath) + + double = netapi.create_node("Double", None, "Double") + py = netapi.create_node("Numpy", None, "Numpy") + bisect = netapi.create_node("Bisect", None, "Bisect") + + netapi.flow(double, "outputs", py, "inputs") + netapi.flow(py, "outputs", bisect, "inputs") + + func = netapi.get_callable_flowgraph([bisect, double, py]) + + assert np.all(func(inputs=[1, 2, 3, 4]) == np.asarray([1.5, 2.5, 3.5, 4.5], dtype=nodenet.numpyfloatX)) + + +@pytest.mark.engine("theano_engine") +def test_collect_thetas(runtime, test_nodenet, default_world, resourcepath): + nodenet, netapi, worldadapter = prepare(runtime, test_nodenet, default_world, resourcepath) + + module = netapi.create_node("Thetas", None, "module") + module.set_parameter('use_thetas', True) + module.set_parameter('weights_shape', 5) + + netapi.flow('worldadapter', 'foo', module, "X") + netapi.flow(module, "Y", 'worldadapter', 'bar') + + sources = np.zeros((5), dtype=nodenet.numpyfloatX) + sources[:] = np.random.randn(*sources.shape) + worldadapter.set_flow_datasource('foo', sources) + + source = netapi.create_node("Neuron", None) + netapi.link(source, 'gen', source, 'gen') + source.activation = 1 + netapi.link(source, 'gen', module, 'sub') + + nodenet.step() + + collected = netapi.collect_thetas([module]) + assert len(collected) == 2 + # assert collect sorts alphabetically + assert collected[0] == module.get_theta('bias') + assert collected[1] == module.get_theta('weights') + + result = sources * module.get_theta('weights').get_value() + module.get_theta('bias').get_value() + assert np.allclose(worldadapter.get_flow_datatarget('bar'), result) + + func = netapi.get_callable_flowgraph([module], use_different_thetas=True) + + x = np.ones(5).astype(netapi.floatX) + weights = np.random.rand(5).astype(netapi.floatX) + bias = np.ones(5).astype(netapi.floatX) + + result = func(thetas=[bias, weights], X=x) + + assert np.all(result == x * weights + bias) + + +@pytest.mark.engine("theano_engine") +def test_flow_edgecase(runtime, test_nodenet, default_world, resourcepath): + """ Tests a structural edge case: diverging and again converging graph with a numpy node in one arm""" + nodenet, netapi, worldadapter = prepare(runtime, test_nodenet, default_world, resourcepath) + + twoout = netapi.create_node("TwoOutputs", None, "twoout") + double = netapi.create_node("Double", None, "double") + numpy = netapi.create_node("Numpy", None, "numpy") + add = netapi.create_node("Add", None, "add") + + netapi.flow(twoout, "A", double, "inputs") + netapi.flow(twoout, "B", numpy, "inputs") + netapi.flow(double, "outputs", add, "input1") + netapi.flow(numpy, "outputs", add, "input2") + + function = netapi.get_callable_flowgraph([twoout, double, numpy, add]) + + x = np.array([1, 2, 3], dtype=netapi.floatX) + result = np.array([5, 8, 11], dtype=netapi.floatX) + assert np.all(function(X=x) == result) + + +@pytest.mark.engine("theano_engine") +def test_flow_trpo_modules(runtime, test_nodenet, default_world, resourcepath): + """ Test the trpo modules, that can return list-outputs """ + nodenet, netapi, worldadapter = prepare(runtime, test_nodenet, default_world, resourcepath) + + trpoout = netapi.create_node("TRPOOut", None, "TRPOOut") + trpoin = netapi.create_node("TRPOIn", None, "TRPOIn") + + netapi.flow(trpoout, "Y", trpoin, "Y") + netapi.flow(trpoout, "Z", trpoin, "Z") + + function = netapi.get_callable_flowgraph([trpoin, trpoout]) + + x = np.array([1, 2, 3], dtype=netapi.floatX) + result = sum([np.exp(x), x, x * 2, x + 1]) + assert np.all(function(X=x) == result) + + netapi.delete_node(trpoin) + trpoinpy = netapi.create_node("TRPOInPython", None, "TRPOInPython") + + netapi.flow(trpoout, "Y", trpoinpy, "Y") + netapi.flow(trpoout, "Z", trpoinpy, "Z") + + function = netapi.get_callable_flowgraph([trpoinpy, trpoout]) + assert np.all(function(X=x) == result) + + +@pytest.mark.engine("theano_engine") +def test_none_output_skips_following_graphs(runtime, test_nodenet, default_world, resourcepath): + """ Tests the "staudamm" functionality: a graph can return None, thus preventing graphs + depending on this output as their input from being executed, even if they are requested """ + nodenet, netapi, worldadapter = prepare(runtime, test_nodenet, default_world, resourcepath) + + with netapi.flowbuilder: + double = netapi.create_node("Double", None, "Double") + py = netapi.create_node("Numpy", None, "Numpy") + bisect = netapi.create_node("Bisect", None, "Bisect") + + netapi.flow("worldadapter", "foo", double, "inputs") + netapi.flow(double, "outputs", py, "inputs") + netapi.flow(py, "outputs", bisect, "inputs") + netapi.flow(bisect, "outputs", "worldadapter", "bar") + + source = netapi.create_node("Neuron", None, "Source") + netapi.link(source, 'gen', source, 'gen') + source.activation = 1 + netapi.link(source, 'gen', py, 'sub') + netapi.link(source, 'gen', bisect, 'sub') + # assert len(nodenet.flowfunctions) == 0 + + sources = np.zeros((5), dtype=nodenet.numpyfloatX) + sources[:] = np.random.randn(*sources.shape) + worldadapter.set_flow_datasource('foo', sources) + + py.set_parameter('no_return_flag', 1) + + nodenet.step() + # assert that the bisect function did not run + assert np.all(worldadapter.get_flow_datatarget('bar') == np.zeros(5)) + # but python did + assert nodenet.consume_user_prompt()['msg'] == 'numpyfunc ran' + # and assert that you can get that info from the sur-gates: + assert bisect.get_gate('sur').activation == 0 + assert py.get_gate('sur').activation == 1 + + py.set_parameter('no_return_flag', 0) + nodenet.step() + assert np.all(worldadapter.get_flow_datatarget('bar') == (2 * sources + 1) / 2) + + +@pytest.mark.engine("theano_engine") +def test_shadow_flowgraph(runtime, test_nodenet, default_world, resourcepath): + nodenet, netapi, worldadapter = prepare(runtime, test_nodenet, default_world, resourcepath) + + node1 = netapi.create_node("Thetas", None, "node1") + node1.set_parameter('use_thetas', True) + node1.set_parameter('weights_shape', 5) + node1.set_state('foo', 'bar') + node2 = netapi.create_node("Thetas", None, "node2") + node2.set_parameter('use_thetas', False) + node2.set_parameter('weights_shape', 5) + + netapi.flow(node1, "Y", node2, "X") + + function = netapi.get_callable_flowgraph([node1, node2]) + + x = np.array([1, 2, 3, 4, 5], dtype=netapi.floatX) + result = function(X=x)[0] + + copies = netapi.shadow_flowgraph([node1, node2]) + + copyfunction = netapi.get_callable_flowgraph([copies[0], copies[1]]) + + assert np.all(copyfunction(X=x) == result) + assert netapi.collect_thetas(copies) == netapi.collect_thetas([node1, node2]) + assert copies[0].get_state('foo') == 'bar' + assert not copies[1].get_parameter('use_thetas') + + # change original + node2.set_parameter('use_thetas', True) + node1.set_state('foo', 'baz') + + # recompile, assert change took effect + assert copies[1].get_parameter('use_thetas') + assert copies[0].get_state('foo') == 'baz' + function = netapi.get_callable_flowgraph([node1, node2]) + result2 = function(X=x)[0] + assert np.all(result2 != result) + + # recompile copy, assert change took effect here as well. + copyfunc = netapi.get_callable_flowgraph([copies[0], copies[1]]) + assert np.all(copyfunc(X=x) == result2) + + # change back, save and reload and assert the copy + # still returs the original's value + node2.set_parameter('use_thetas', False) + runtime.save_nodenet(test_nodenet) + runtime.revert_nodenet(test_nodenet) + nodenet = runtime.nodenets[test_nodenet] + netapi = nodenet.netapi + copies = [netapi.get_node(copies[0].uid), netapi.get_node(copies[1].uid)] + assert not copies[1].get_parameter('use_thetas') + + +@pytest.mark.engine("theano_engine") +def test_naming_collision_in_callable_subgraph(runtime, test_nodenet, default_world, resourcepath): + """ Asserts that compiling a graph that has naming collisions raises an Exception, + asserts that unique_inputs_names fixes the collision""" + nodenet, netapi, worldadapter = prepare(runtime, test_nodenet, default_world, resourcepath) + + double = netapi.create_node("Double", None, "Double") + bisect = netapi.create_node("Bisect", None, "Bisect") + add = netapi.create_node("Add", None, "Add") + + netapi.flow(double, "outputs", add, "input1") + netapi.flow(bisect, "outputs", add, "input2") + + with pytest.raises(RuntimeError): + netapi.get_callable_flowgraph([double, bisect, add]) + + function = netapi.get_callable_flowgraph([double, bisect, add], use_unique_input_names=True) + kwargs = { + "%s_inputs" % double.uid: [1.], + "%s_inputs" % bisect.uid: [1.] + } + assert function(**kwargs) == [2.5] + + +@pytest.mark.engine("theano_engine") +def test_filter_subgraph_outputs(runtime, test_nodenet, default_world, resourcepath): + """ Tests requesting only specific outputs from a subgraph """ + nodenet, netapi, worldadapter = prepare(runtime, test_nodenet, default_world, resourcepath) + + double = netapi.create_node("Double", None, "Double") + twoout = netapi.create_node("TwoOutputs", None, "TwoOutputs") + + netapi.flow(twoout, "A", double, "inputs") + + function = netapi.get_callable_flowgraph([twoout, double]) + assert function(X=[2.]) == [3., 4.] + assert "B of %s" % twoout.uid in function.__doc__ + + function = netapi.get_callable_flowgraph([twoout, double], requested_outputs=[(double.uid, "outputs")]) + assert function(X=[2.]) == [4.] + assert "B of %s" % twoout.uid not in function.__doc__ + + +@pytest.mark.engine("theano_engine") +def test_connect_flow_modules_to_structured_flow_datasource(runtime, test_nodenet, default_world, resourcepath): + import os + nodenet, netapi, worldadapter = prepare(runtime, test_nodenet, default_world, resourcepath) + # get ndoetype defs + assert nodenet.native_module_definitions['datatargets']['is_autogenerated'] + nodenet.native_modules['datatargets'].inputs == ['motor', 'stop'] + nodenet.native_modules['datatargets'].outputs == [] + assert nodenet.native_module_definitions['datasources']['is_autogenerated'] + nodenet.native_modules['datasources'].inputs == [] + nodenet.native_modules['datasources'].outputs == ['vision', 'start'] + in_node_found = False + out_node_found = False + + assert len(nodenet.flow_module_instances) == 2 + for uid, node in nodenet.flow_module_instances.items(): + if node.name == 'datatargets': + assert node.type == 'datatargets' + in_node_found = True + assert node.outputs == [] + assert node.inputs == ['bar', 'motor', 'stop'] + assert node.get_data()['type'] == 'datatargets' + elif node.name == 'datasources': + assert node.type == 'datasources' + out_node_found = True + assert node.outputs == ['foo', 'vision', 'start'] + assert node.inputs == [] + assert node.get_data()['type'] == 'datasources' + + assert in_node_found + assert out_node_found + + sources = np.zeros((6), dtype=nodenet.numpyfloatX) + sources[:] = np.random.randn(*sources.shape) + worldadapter.set_flow_datasource('vision', sources) + worldadapter.set_flow_datasource('start', np.asarray([0.73]).astype(nodenet.numpyfloatX)) + + double = netapi.create_node("Double", None, "Double") + netapi.flow('worldadapter', 'vision', double, 'inputs') + netapi.flow(double, 'outputs', 'worldadapter', 'motor') + netapi.flow('worldadapter', 'start', 'worldadapter', 'stop') + + source = netapi.create_node("Neuron", None) + source.activation = 1 + netapi.link(source, 'gen', double, 'sub') + + runtime.step_nodenet(test_nodenet) + assert worldadapter.datatarget_values[0] == 0 + assert np.all(worldadapter.get_flow_datatarget_feedback('motor') == sources * 2) + assert np.allclose(worldadapter.get_flow_datatarget_feedback('stop'), [0.73]) + + runtime.save_nodenet(test_nodenet) + runtime.revert_nodenet(test_nodenet) + + nodenet = runtime.nodenets[test_nodenet] + worldadapter = nodenet.worldadapter_instance + + assert len(nodenet.flow_module_instances) == 3 + + sources = np.zeros((6), dtype=nodenet.numpyfloatX) + sources[:] = np.random.randn(*sources.shape) + worldadapter.set_flow_datasource('vision', sources) + worldadapter.set_flow_datasource('start', np.asarray([0.64]).astype(nodenet.numpyfloatX)) + runtime.step_nodenet(test_nodenet) + assert np.all(worldadapter.get_flow_datatarget_feedback('motor') == np.zeros(6)) + assert np.allclose(worldadapter.get_flow_datatarget_feedback('stop'), [0.64]) + + with open(os.path.join(resourcepath, 'flowworld.py'), 'w') as fp: + fp.write(""" +import numpy as np +from micropsi_core.world.world import World +from micropsi_core.world.worldadapter import ArrayWorldAdapter + +class FlowWorld(World): + supported_worldadapters = ["SimpleArrayWA"] + +class SimpleArrayWA(ArrayWorldAdapter): + def __init__(self, world, **kwargs): + super().__init__(world, **kwargs) + self.add_datasource("execute") + self.add_flow_datasource("renamed", shape=(5)) + self.add_flow_datasource("vision", (6)) + self.add_datatarget("reset") + self.add_flow_datatarget("renamed", shape=(5)) + self.add_flow_datatarget("motor", (6)) + self.update_data_sources_and_targets() + + def update_data_sources_and_targets(self): + pass +""") + runtime.reload_code() + assert len(nodenet.flow_module_instances) == 3 + sources = nodenet.get_node(nodenet.worldadapter_flow_nodes['datasources']) + targets = nodenet.get_node(nodenet.worldadapter_flow_nodes['datatargets']) + assert sources.outputs == ["renamed", "vision"] + assert targets.inputs == ["renamed", "motor"] + assert nodenet.get_node(double.uid).inputmap['inputs'] == (sources.uid, 'vision') + assert (double.uid, 'inputs') in nodenet.get_node(sources.uid).outputmap['vision'] + assert (targets.uid, 'motor') in nodenet.get_node(double.uid).outputmap['outputs'] + + +@pytest.mark.engine("theano_engine") +def test_flownode_output_only(runtime, test_nodenet, default_world, resourcepath): + nodenet, netapi, worldadapter = prepare(runtime, test_nodenet, default_world, resourcepath) + out = netapi.create_node("out12345") + source = netapi.create_node("Neuron") + source.activation = 1 + netapi.link(source, 'gen', source, 'gen') + netapi.link(source, 'gen', out, 'sub') + netapi.flow(out, 'out', 'worldadapter', 'bar') + nodenet.step() + assert np.all(worldadapter.get_flow_datatarget('bar') == [1, 2, 3, 4, 5]) + + +@pytest.mark.engine("theano_engine") +def test_flownode_generate_netapi_fragment(runtime, test_nodenet, default_world, resourcepath): + """ Takes the above-tested edgecase, creates a recipe via generate_netapi_fragment + and runs the result""" + import os + nodenet, netapi, worldadapter = prepare(runtime, test_nodenet, default_world, resourcepath) + + twoout = netapi.create_node("TwoOutputs", None, "twoout") + double = netapi.create_node("Double", None, "double") + numpy = netapi.create_node("Numpy", None, "numpy") + add = netapi.create_node("Add", None, "add") + nodes = [twoout, double, numpy, add] + + netapi.flow(twoout, "A", double, "inputs") + netapi.flow(twoout, "B", numpy, "inputs") + netapi.flow(double, "outputs", add, "input1") + netapi.flow(numpy, "outputs", add, "input2") + + fragment = runtime.generate_netapi_fragment(test_nodenet, [n.uid for n in nodes]) + + res, pastenet = runtime.new_nodenet('pastnet', "theano_engine") + code = "def foo(netapi):\n " + "\n ".join(fragment.split('\n')) + # save the fragment as recipe & run + with open(os.path.join(resourcepath, 'recipes', 'test.py'), 'w+') as fp: + fp.write(code) + runtime.reload_code() + runtime.run_recipe(pastenet, 'foo', {}) + pastnetapi = runtime.get_nodenet(pastenet).netapi + + function = pastnetapi.get_callable_flowgraph(netapi.get_nodes()) + + x = np.array([1, 2, 3], dtype=netapi.floatX) + result = np.array([5, 8, 11], dtype=netapi.floatX) + assert np.all(function(X=x) == result) + + +@pytest.mark.engine("theano_engine") +def test_flow_inf_guard(runtime, test_nodenet, default_world, resourcepath): + nodenet, netapi, worldadapter = prepare(runtime, test_nodenet, default_world, resourcepath) + + infmaker = netapi.create_node("infmaker") + add = netapi.create_node("Add") + netapi.flow(infmaker, "A", add, "input1") + netapi.flow('worldadapter', 'foo', add, "input2") + netapi.flow(add, 'outputs', 'worldadapter', 'bar') + source = netapi.create_node("Neuron") + source.activation = 1 + netapi.link(source, 'gen', source, 'gen') + netapi.link(source, 'gen', add, 'sub') + with pytest.raises(ValueError) as excinfo: + runtime.step_nodenet(test_nodenet) + assert "output A" in str(excinfo.value) + assert "infmaker" in str(excinfo.value) + assert "NAN value" in str(excinfo.value) + + infmaker.set_parameter('what', 'inf') + with pytest.raises(ValueError) as excinfo: + runtime.step_nodenet(test_nodenet) + assert "INF value" in str(excinfo.value) + + worldadapter.flow_datasources['foo'][3] = np.nan + with pytest.raises(ValueError) as excinfo: + runtime.step_nodenet(test_nodenet) + assert type(worldadapter).__name__ in str(excinfo.value) + assert "foo" in str(excinfo.value) + + +@pytest.mark.engine("theano_engine") +def test_flow_inf_guard_on_list_outputs(runtime, test_nodenet, default_world, resourcepath): + nodenet, netapi, worldadapter = prepare(runtime, test_nodenet, default_world, resourcepath) + + trpoout = netapi.create_node("TRPOOut", None, "TRPOOut") + trpoout.set_parameter("makeinf", "True") + trpoin = netapi.create_node("TRPOIn", None, "TRPOIn") + + netapi.flow(trpoout, "Y", trpoin, "Y") + netapi.flow(trpoout, "Z", trpoin, "Z") + netapi.flow('worldadapter', 'foo', trpoout, "X") + netapi.flow(trpoin, 'A', 'worldadapter', 'bar') + source = netapi.create_node("Neuron") + source.activation = 1 + netapi.link(source, 'gen', source, 'gen') + netapi.link(source, 'gen', trpoin, 'sub') + with pytest.raises(ValueError) as excinfo: + runtime.step_nodenet(test_nodenet) + assert "INF value in" in str(excinfo.value) + assert "output A of graph" in str(excinfo.value) diff --git a/micropsi_core/tests/test_island.py b/micropsi_core/tests/test_island.py deleted file mode 100644 index c5aab50c..00000000 --- a/micropsi_core/tests/test_island.py +++ /dev/null @@ -1,31 +0,0 @@ -#!/usr/local/bin/python -# -*- coding: utf-8 -*- - -""" - -""" -from micropsi_core import runtime -from micropsi_core import runtime as micropsi - -__author__ = 'joscha' -__date__ = '29.10.12' - - -def test_island(resourcepath): - success, world_uid = micropsi.new_world("Misland", "Island", owner="tester") - assert success - world = runtime.worlds[world_uid] - assert world.__class__.__name__ == 'Island' - runtime.add_worldobject(world_uid, "Lightsource", (10, 10), uid='foobar', name='foobar', parameters={}) - runtime.save_world(world_uid) - runtime.revert_world(world_uid) - world = runtime.worlds[world_uid] - assert world.objects["foobar"].__class__.__name__ == 'Lightsource' - assert world.objects["foobar"].position == [10, 10] - assert world.data['objects']['foobar']['position'] == [10, 10] - assert world.__class__.__name__ == 'Island' - runtime.set_worldobject_properties(world_uid, "foobar", position=(5, 5)) - assert world.objects["foobar"].position == (5, 5) - assert world.data['objects']['foobar']['position'] == (5, 5) - assert runtime.get_world_view(world_uid, -1)['objects']['foobar']['position'] == (5, 5) - runtime.delete_world(world_uid) diff --git a/micropsi_core/tests/test_node.py b/micropsi_core/tests/test_node.py index 1d8f30df..b776e391 100644 --- a/micropsi_core/tests/test_node.py +++ b/micropsi_core/tests/test_node.py @@ -6,26 +6,33 @@ """ from micropsi_core.nodenet.node import Nodetype -from micropsi_core.nodenet.nodefunctions import register, concept -from micropsi_core import runtime as micropsi +from micropsi_core.nodenet.nodefunctions import neuron, concept import pytest +plotting_available = False +try: + import matplotlib + plotting_available = True +except ImportError: + pass + + @pytest.mark.engine("theano_engine") -def test_nodetype_function_definition_overwrites_default_function_name_theano(fixed_nodenet): - nodenet = micropsi.get_nodenet(fixed_nodenet) - nodetype = nodenet.get_standard_nodetype_definitions()['Register'].copy() +def test_nodetype_function_definition_overwrites_default_function_name_theano(runtime, test_nodenet): + nodenet = runtime.get_nodenet(test_nodenet) + nodetype = nodenet.get_standard_nodetype_definitions()['Neuron'].copy() foo = Nodetype(nodenet=nodenet, **nodetype) - assert foo.nodefunction == register + assert foo.nodefunction == neuron nodetype['nodefunction_definition'] = 'return 17' foo = Nodetype(nodenet=nodenet, **nodetype) - assert foo.nodefunction != register + assert foo.nodefunction != neuron assert foo.nodefunction(nodenet, None) == 17 @pytest.mark.engine("dict_engine") -def test_nodetype_function_definition_overwrites_default_function_name(fixed_nodenet): - nodenet = micropsi.get_nodenet(fixed_nodenet) +def test_nodetype_function_definition_overwrites_default_function_name(runtime, test_nodenet): + nodenet = runtime.get_nodenet(test_nodenet) nodetype = nodenet.get_standard_nodetype_definitions()['Concept'].copy() foo = Nodetype(nodenet=nodenet, **nodetype) assert foo.nodefunction == concept @@ -35,22 +42,120 @@ def test_nodetype_function_definition_overwrites_default_function_name(fixed_nod assert foo.nodefunction(nodenet, None) == 17 -def test_node_states(test_nodenet, node): - nodenet = micropsi.get_nodenet(test_nodenet) - node = nodenet.get_node(node) - assert node.get_state('foobar') is None - node.set_state('foobar', 'bazbaz') - assert node.get_state('foobar') == 'bazbaz' - node.set_state('foobar', 42) - assert node.get_state('foobar') == 42 - - -def test_entity_positions_as_tuples(test_nodenet): - nodenet = micropsi.get_nodenet(test_nodenet) +def test_node_positions_as_tuples(runtime, test_nodenet): + nodenet = runtime.get_nodenet(test_nodenet) api = nodenet.netapi node = api.create_node("Pipe", None, "node1") nodespace = api.create_nodespace(None, "nodespace1") node.position = (23, 42) - nodespace.position = (13, 23, 42) assert node.position == [23, 42, 0] - assert nodespace.position == [13, 23, 42] + + +@pytest.mark.engine("theano_engine") +def test_fat_native_modules(runtime, test_nodenet, resourcepath): + import os + import numpy as np + with open(os.path.join(resourcepath, 'nodetypes', 'PhatNM.py'), 'w') as fp: + fp.write(""" +nodetype_definition = { + "name": "PhatNM", + "slottypes": ["gen", "sub", "sur", "A_in", "B_in"], + "gatetypes": ["gen", "sub", "sur", "A_out", "B_out"], + "nodefunction_name": "phatNM", + "symbol": "F", + "dimensionality": { + "gates": { + "A_out": 768, + "B_out": 13 + }, + "slots": { + "A_in": 1024, + "B_in": 62 + } + } +} + +def phatNM(netapi, node, **_): + pass +""") + + runtime.reload_code() + netapi = runtime.nodenets[test_nodenet].netapi + node = netapi.create_node("PhatNM", None, "phatty") + node.take_slot_activation_snapshot() + + # test get_slot_activation + data = node.get_slot_activations() + assert len(data) == 1024 + 62 + 3 # fat_slots + gen/sub/sur + new_activation = np.random.rand(768 + 13 + 3) # fat gates + gen/sub/sur + + # test set_gate_activation + node.set_gate_activations(new_activation) + target = netapi.create_node("Neuron", None, "Target") + for g in node.get_gate_types(): + netapi.link(node, g, target, 'gen') + runtime.step_nodenet(test_nodenet) + assert target.activation > 0 + + # test saving/loading data + node.save_data(new_activation) + assert np.all(node.load_data() == new_activation) + + # test persistency + runtime.save_nodenet(test_nodenet) + runtime.revert_nodenet(test_nodenet) + netapi = runtime.nodenets[test_nodenet].netapi + node = netapi.get_node(node.uid) + target = netapi.get_node(target.uid) + assert np.all(node.load_data() == new_activation) + + # test setting gate details, get_gate_activation + node.set_gate_configuration("A_out0", "sigmoid") + config = node.get_gate_configuration() + assert config['A_out0']['gatefunction'] == 'sigmoid' + runtime.step_nodenet(test_nodenet) + act = node.get_gate_activations() + assert act[3] == 0.5 + assert np.all(act[4:] == 0) + + # test delivery to frontend + netapi.link(target, 'gen', node, 'A_in580') + pipe = netapi.create_node("Pipe", None, "pipe") + netapi.link_with_reciprocal(pipe, node, 'subsur') + data = runtime.nodenets[test_nodenet].get_nodes() + nodedata = data['nodes'][node.uid] + assert len(nodedata['gate_activations'].keys()) == 5 + assert 'gen' in nodedata['gate_activations'] + assert len(nodedata['links']['A_out0']) == 1 # all to same node + assert 'A_out1' not in nodedata['links'] + assert data['nodes'][target.uid]['links']['gen'][0]['target_slot_name'] == 'A_in0' + assert nodedata['links']['sur'][0]['target_node_uid'] == pipe.uid + assert nodedata['links']['sur'][0]['target_slot_name'] == 'sur' + assert data['nodes'][pipe.uid]['links']['sub'][0]['target_slot_name'] == 'sub' + + # test get nodetypes + result = runtime.get_available_native_module_types(test_nodenet)['PhatNM'] + assert result['dimensionality']['gates']['A_out0'] == 768 + assert result['dimensionality']['gates']['B_out0'] == 13 + assert result['dimensionality']['slots']['A_in0'] == 1024 + assert result['dimensionality']['slots']['B_in0'] == 62 + assert result['gatetypes'] == ['gen', 'sub', 'sur', 'A_out0', 'B_out0'] + assert result['is_highdimensional'] + + +@pytest.mark.skipif(not plotting_available, reason="requires matplotlib") +def test_node_show_plot_and_close_plot(runtime, test_nodenet): + from matplotlib import pyplot as plt + net = runtime.nodenets[test_nodenet] + netapi = net.netapi + node = netapi.create_node("Neuron", None, "Neuron") + fig = plt.figure(figsize=(3, 2)) + node.show_plot(fig) + assert net.figures[node.uid] == [fig] + netapi.delete_node(node) + assert node.uid not in net.figures + node = netapi.create_node("Neuron", None, "Neuron") + node.show_plot(fig) + runtime.unload_nodenet(test_nodenet) + assert plt.get_fignums() == [] + diff --git a/micropsi_core/tests/test_node_activation.py b/micropsi_core/tests/test_node_activation.py index 98278014..ede2ce7c 100644 --- a/micropsi_core/tests/test_node_activation.py +++ b/micropsi_core/tests/test_node_activation.py @@ -5,76 +5,92 @@ Tests for node activation propagation and gate arithmetic """ -from micropsi_core import runtime as micropsi - -def prepare(fixed_nodenet): - nodenet = micropsi.get_nodenet(fixed_nodenet) +def prepare(runtime, test_nodenet): + nodenet = runtime.get_nodenet(test_nodenet) netapi = nodenet.netapi - source = netapi.create_node("Register", None, "Source") + source = netapi.create_node("Neuron", None, "Source") netapi.link(source, "gen", source, "gen") source.activation = 1 nodenet.step() - register = netapi.create_node("Register", None) + register = netapi.create_node("Neuron", None) netapi.link(source, "gen", register, "gen") return nodenet, netapi, source, register -def test_gate_arithmetics_propagation(fixed_nodenet): +def test_gate_arithmetics_propagation(runtime, test_nodenet): # propagate activation, expect it to show up at the gen gate - net, netapi, source, register = prepare(fixed_nodenet) + net, netapi, source, register = prepare(runtime, test_nodenet) net.step() assert register.get_gate("gen").activation == 1 -def test_gate_arithmetics_maximum(fixed_nodenet): +def test_gate_arithmetics_maximum(runtime, test_nodenet): # set maximum, expect the cutoff to work - net, netapi, source, register = prepare(fixed_nodenet) - register.set_gate_parameter("gen", "maximum", 0.5) + net, netapi, source, register = prepare(runtime, test_nodenet) + params = {'maximum': 0.5} + register.set_gate_configuration("gen", "threshold", params) net.step() assert register.get_gate("gen").activation == 0.5 -def test_gate_arithmetics_minimum(fixed_nodenet): +def test_gate_arithmetics_minimum(runtime, test_nodenet): # set minimum, expect it to show up - net, netapi, source, register = prepare(fixed_nodenet) - register.set_gate_parameter("gen", "maximum", 2) - register.set_gate_parameter("gen", "minimum", 1.5) + net, netapi, source, register = prepare(runtime, test_nodenet) + params = { + "maximum": 2, + "minimum": 1.5, + } + register.set_gate_configuration("gen", "threshold", params) + runtime.save_nodenet(test_nodenet) net.step() assert register.get_gate("gen").activation == 1.5 + runtime.revert_nodenet(test_nodenet) + net = runtime.nodenets[test_nodenet] + net.step() + assert net.get_node(register.uid).get_gate("gen").activation == 1.5 -def test_gate_arithmetics_threshold(fixed_nodenet): +def test_gate_arithmetics_threshold(runtime, test_nodenet): # set threshold, expect it to mute the node - net, netapi, source, register = prepare(fixed_nodenet) - register.set_gate_parameter("gen", "maximum", 2) - register.set_gate_parameter("gen", "threshold", 1.5) + net, netapi, source, register = prepare(runtime, test_nodenet) + params = { + "maximum": 2, + "threshold": 1.5, + } + register.set_gate_configuration("gen", "threshold", params) net.step() assert register.get_gate("gen").activation == 0 -def test_gate_arithmetics_amplification(fixed_nodenet): +def test_gate_arithmetics_amplification(runtime, test_nodenet): # set maximum and amplification, expect amplification to be applied after maximum - net, netapi, source, register = prepare(fixed_nodenet) - register.set_gate_parameter("gen", "maximum", 10) - register.set_gate_parameter("gen", "amplification", 10) + net, netapi, source, register = prepare(runtime, test_nodenet) + params = { + "maximum": 10, + "amplification": 10, + } + register.set_gate_configuration("gen", "threshold", params) net.step() assert register.get_gate("gen").activation == 10 -def test_gate_arithmetics_amplification_and_threshold(fixed_nodenet): +def test_gate_arithmetics_amplification_and_threshold(runtime, test_nodenet): # set maximum, amplification and threshold, expect the threshold to mute the node despite amplification - net, netapi, source, register = prepare(fixed_nodenet) - register.set_gate_parameter("gen", "maximum", 10) - register.set_gate_parameter("gen", "amplification", 10) - register.set_gate_parameter("gen", "threshold", 2) + net, netapi, source, register = prepare(runtime, test_nodenet) + params = { + "maximum": 10, + "amplification": 10, + "threshold": 2, + } + register.set_gate_configuration("gen", "threshold", params) net.step() assert register.get_gate("gen").activation == 0 -def test_gate_arithmetics_directional_activator_amplification(fixed_nodenet): +def test_gate_arithmetics_directional_activator_amplification(runtime, test_nodenet): # set maximum and threshold with a directional activator in place - net, netapi, source, register = prepare(fixed_nodenet) + net, netapi, source, register = prepare(runtime, test_nodenet) activator = netapi.create_node("Activator", None) activator.set_parameter('type', 'sub') @@ -82,15 +98,18 @@ def test_gate_arithmetics_directional_activator_amplification(fixed_nodenet): testpipe = netapi.create_node("Pipe", None) netapi.link(source, "gen", testpipe, "sub", 1) - testpipe.set_gate_parameter("sub", "maximum", 10) - testpipe.set_gate_parameter("sub", "threshold", 0) + params = { + "maximum": 10, + "threshold": 0, + } + register.set_gate_configuration("gen", "threshold", params) net.step() assert testpipe.get_gate("sub").activation == 5 -def test_gate_arithmetics_directional_activator_muting(fixed_nodenet): +def test_gate_arithmetics_directional_activator_muting(runtime, test_nodenet): # have the directional activator mute the node - net, netapi, source, register = prepare(fixed_nodenet) + net, netapi, source, register = prepare(runtime, test_nodenet) activator = netapi.create_node("Activator", None) activator.set_parameter('type', 'sub') @@ -98,15 +117,18 @@ def test_gate_arithmetics_directional_activator_muting(fixed_nodenet): testpipe = netapi.create_node("Pipe", None) netapi.link(source, "gen", testpipe, "sub", 1) - testpipe.set_gate_parameter("sub", "maximum", 10) - testpipe.set_gate_parameter("sub", "threshold", 0) + params = { + "maximum": 10, + "threshold": 0, + } + register.set_gate_configuration("gen", "threshold", params) net.step() assert testpipe.get_gate("sub").activation == 0 -def test_gate_arithmetics_directional_activator_threshold(fixed_nodenet): +def test_gate_arithmetics_directional_activator_threshold(runtime, test_nodenet): # have the directional activator amplify alpha above threshold - net, netapi, source, register = prepare(fixed_nodenet) + net, netapi, source, register = prepare(runtime, test_nodenet) activator = netapi.create_node("Activator", None) activator.set_parameter('type', 'sub') @@ -114,43 +136,94 @@ def test_gate_arithmetics_directional_activator_threshold(fixed_nodenet): testpipe = netapi.create_node("Pipe", None) netapi.link(source, "gen", testpipe, "sub", 1) - testpipe.set_gate_parameter("sub", "maximum", 10) - testpipe.set_gate_parameter("sub", "threshold", 1) + params = { + "maximum": 10, + "threshold": 1, + } + register.set_gate_configuration("gen", "threshold", params) net.step() assert testpipe.get_gate("sub").activation == 2 -def test_gatefunction_sigmoid(fixed_nodenet): +def test_gatefunction_sigmoid(runtime, test_nodenet): # set a node function for gen gates, expect it to be used from micropsi_core.nodenet.gatefunctions import sigmoid - net, netapi, source, register = prepare(fixed_nodenet) - register.set_gatefunction_name("gen", "sigmoid") + net, netapi, source, register = prepare(runtime, test_nodenet) + register.set_gate_configuration("gen", "sigmoid", {'bias': 1.2}) + runtime.save_nodenet(test_nodenet) + net.step() + assert round(register.get_gate("gen").activation, 5) == round(sigmoid(1, 1.2), 5) + runtime.revert_nodenet(test_nodenet) + net = runtime.nodenets[test_nodenet] net.step() - assert round(register.get_gate("gen").activation, 5) == round(sigmoid(1, 0, 0), 5) + assert round(net.get_node(register.uid).get_gate("gen").activation, 5) == round(sigmoid(1, 1.2), 5) -def test_gatefunction_none_is_identity(fixed_nodenet): - from micropsi_core.nodenet.gatefunctions import identity - net, netapi, source, register = prepare(fixed_nodenet) - register.set_gatefunction_name("gen", None) +def test_gatefunction_elu(runtime, test_nodenet): + from micropsi_core.nodenet.gatefunctions import elu + net, netapi, source, register = prepare(runtime, test_nodenet) + register.set_gate_configuration("gen", "elu", {'bias': 1.2}) + runtime.save_nodenet(test_nodenet) + net.step() + assert round(register.get_gate("gen").activation, 5) == round(elu(1, 1.2), 5) + runtime.revert_nodenet(test_nodenet) + net = runtime.nodenets[test_nodenet] net.step() - assert register.get_gate("gen").activation == identity(1, 0, 0) + assert round(net.get_node(register.uid).get_gate("gen").activation, 5) == round(elu(1, 1.2), 5) -def test_gatefunctions(fixed_nodenet): - # call every gatefunction once - import micropsi_core.nodenet.gatefunctions as funcs - assert funcs.absolute(-1., 0, 0) == 1 - assert funcs.one_over_x(2., 0, 0) == 0.5 - assert funcs.identity(1, 0, 0) == 1 - assert funcs.sigmoid(0, 0, 0) == 0.5 +def test_gatefunction_relu(runtime, test_nodenet): + from micropsi_core.nodenet.gatefunctions import relu + net, netapi, source, register = prepare(runtime, test_nodenet) + register.set_gate_configuration("gen", "relu", {'bias': 1.2}) + runtime.save_nodenet(test_nodenet) + net.step() + assert round(register.get_gate("gen").activation, 5) == round(relu(1, 1.2), 5) + runtime.revert_nodenet(test_nodenet) + net = runtime.nodenets[test_nodenet] + net.step() + assert round(net.get_node(register.uid).get_gate("gen").activation, 5) == round(relu(1, 1.2), 5) + + +def test_gatefunction_on_over_x(runtime, test_nodenet): + from micropsi_core.nodenet.gatefunctions import one_over_x + net, netapi, source, register = prepare(runtime, test_nodenet) + register.set_gate_configuration("gen", "one_over_x",) + runtime.save_nodenet(test_nodenet) + net.step() + assert round(register.get_gate("gen").activation, 5) == round(one_over_x(1), 5) + runtime.revert_nodenet(test_nodenet) + net = runtime.nodenets[test_nodenet] + net.step() + assert round(net.get_node(register.uid).get_gate("gen").activation) == round(one_over_x(1), 5) + + +def test_gatefunction_absolute(runtime, test_nodenet): + from micropsi_core.nodenet.gatefunctions import absolute + net, netapi, source, register = prepare(runtime, test_nodenet) + register.set_gate_configuration("gen", "absolute") + runtime.save_nodenet(test_nodenet) + net.step() + assert round(register.get_gate("gen").activation, 5) == round(absolute(1), 5) + runtime.revert_nodenet(test_nodenet) + net = runtime.nodenets[test_nodenet] + net.step() + assert round(net.get_node(register.uid).get_gate("gen").activation, 5) == round(absolute(1), 5) + + +def test_gatefunction_none_is_identity(runtime, test_nodenet): + from micropsi_core.nodenet.gatefunctions import identity + net, netapi, source, register = prepare(runtime, test_nodenet) + register.set_gate_configuration("gen", None) + net.step() + assert register.get_gate("gen").activation == identity(1) -def test_node_activation_is_gen_gate_activation(fixed_nodenet): +def test_node_activation_is_gen_gate_activation(runtime, test_nodenet): from micropsi_core.nodenet.gatefunctions import sigmoid - net, netapi, source, register = prepare(fixed_nodenet) - register.set_gatefunction_name('gen', 'sigmoid') - sig = round(sigmoid(1, 0, 0), 4) + net, netapi, source, register = prepare(runtime, test_nodenet) + register.set_gate_configuration('gen', 'sigmoid', {'bias': 1.3}) + sig = round(sigmoid(1, 1.3), 4) net.step() assert round(register.activation, 4) == sig assert round(register.get_gate('gen').activation, 4) == sig diff --git a/micropsi_core/tests/test_node_logic.py b/micropsi_core/tests/test_node_logic.py index 5bc15f2e..beec3f0a 100644 --- a/micropsi_core/tests/test_node_logic.py +++ b/micropsi_core/tests/test_node_logic.py @@ -5,22 +5,20 @@ Tests for node activation propagation and gate arithmetic """ -from micropsi_core import runtime as micropsi - -def prepare(test_nodenet): - nodenet = micropsi.get_nodenet(test_nodenet) +def prepare(runtime, test_nodenet): + nodenet = runtime.get_nodenet(test_nodenet) netapi = nodenet.netapi - source = netapi.create_node("Register", None, "Source") + source = netapi.create_node("Neuron", None, "Source") netapi.link(source, "gen", source, "gen") source.activation = 1 nodenet.step() return nodenet, netapi, source -def test_node_logic_loop(test_nodenet): +def test_node_logic_loop(runtime, test_nodenet): # test gen looping behaviour - net, netapi, source = prepare(test_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) net.step() assert source.get_gate("gen").activation == 1 net.step() @@ -30,22 +28,22 @@ def test_node_logic_loop(test_nodenet): assert source.get_gate("gen").activation == 0.5 -def test_node_logic_die(test_nodenet): +def test_node_logic_die(runtime, test_nodenet): # without the link, activation ought to drop to 0 - net, netapi, source = prepare(test_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) netapi.unlink(source, "gen", source, "gen") net.step() assert source.get_gate("gen").activation == 0 -def test_node_logic_sum(test_nodenet): +def test_node_logic_sum(runtime, test_nodenet): # propagate positive activation, expect sum - net, netapi, source = prepare(test_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) - reg_a = netapi.create_node("Register", None, "RegA") - reg_b = netapi.create_node("Register", None, "RegB") - reg_result = netapi.create_node("Register", None, "RegResult") + reg_a = netapi.create_node("Neuron", None, "RegA") + reg_b = netapi.create_node("Neuron", None, "RegB") + reg_result = netapi.create_node("Neuron", None, "RegResult") netapi.link(source, "gen", reg_a, "gen", 0.5) netapi.link(source, "gen", reg_b, "gen", 0.5) @@ -57,14 +55,13 @@ def test_node_logic_sum(test_nodenet): assert reg_result.get_gate("gen").activation == 1 -def test_node_logic_cancel(test_nodenet): +def test_node_logic_cancel(runtime, test_nodenet): # propagate positive and negative activation, expect cancellation - net, netapi, source = prepare(test_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) - reg_a = netapi.create_node("Register", None, "RegA") - reg_b = netapi.create_node("Register", None, "RegB") - reg_b.set_gate_parameter("gen", "threshold", -100) - reg_result = netapi.create_node("Register", None, "RegResult") + reg_a = netapi.create_node("Neuron", None, "RegA") + reg_b = netapi.create_node("Neuron", None, "RegB") + reg_result = netapi.create_node("Neuron", None, "RegResult") netapi.link(source, "gen", reg_a, "gen", 1) netapi.link(source, "gen", reg_b, "gen", -1) @@ -76,15 +73,13 @@ def test_node_logic_cancel(test_nodenet): assert reg_result.get_gate("gen").activation == 0 -def test_node_logic_store_and_forward(test_nodenet): +def test_node_logic_store_and_forward(runtime, test_nodenet): # collect activation in one node, go forward only if both dependencies are met - net, netapi, source = prepare(test_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) - reg_a = netapi.create_node("Register", None, "RegA") - reg_b = netapi.create_node("Register", None, "RegB") - reg_b.set_gate_parameter("gen", "threshold", -100) - reg_result = netapi.create_node("Register", None, "RegResult") - reg_b.set_gate_parameter("gen", "threshold", 1) + reg_a = netapi.create_node("Neuron", None, "RegA") + reg_b = netapi.create_node("Neuron", None, "RegB") + reg_result = netapi.create_node("Neuron", None, "RegResult") netapi.link(source, "gen", reg_a, "gen") netapi.link(reg_a, "gen", reg_result, "gen") @@ -97,8 +92,8 @@ def test_node_logic_store_and_forward(test_nodenet): assert reg_result.get_gate("gen").activation == 1 -def test_node_logic_activators(test_nodenet): - net, netapi, source = prepare(test_nodenet) +def test_node_logic_activators(runtime, test_nodenet): + net, netapi, source = prepare(runtime, test_nodenet) activator = netapi.create_node('Activator', None) activator.set_parameter('type', 'sub') activator.activation = 1 @@ -110,64 +105,64 @@ def test_node_logic_activators(test_nodenet): assert testpipe.get_gate("sub").activation == 0 -def test_node_logic_sensor_modulator(test_nodenet, default_world): - net, netapi, source = prepare(test_nodenet) - register = netapi.create_node("Register", None) +def test_node_logic_sensor_modulator(runtime, test_nodenet, default_world): + net, netapi, source = prepare(runtime, test_nodenet) + register = netapi.create_node("Neuron", None) netapi.link_sensor(register, "emo_activation", "gen") - micropsi.step_nodenet(test_nodenet) - micropsi.step_nodenet(test_nodenet) - micropsi.step_nodenet(test_nodenet) + runtime.step_nodenet(test_nodenet) + runtime.step_nodenet(test_nodenet) + runtime.step_nodenet(test_nodenet) assert round(netapi.get_modulator("emo_activation"), 3) == round(register.activation, 3) -def test_node_logic_sensor_datasource(test_nodenet, default_world): - net, netapi, source = prepare(test_nodenet) - micropsi.set_nodenet_properties(test_nodenet, worldadapter="Default", world_uid=default_world) - register = netapi.create_node("Register", None) +def test_node_logic_sensor_datasource(runtime, test_nodenet, default_world): + net, netapi, source = prepare(runtime, test_nodenet) + runtime.set_nodenet_properties(test_nodenet, worldadapter="Default", world_uid=default_world) + register = netapi.create_node("Neuron", None) netapi.link_sensor(register, "static_on", "gen", weight=0.35) - micropsi.step_nodenet(test_nodenet) - micropsi.step_nodenet(test_nodenet) + runtime.step_nodenet(test_nodenet) + runtime.step_nodenet(test_nodenet) assert round(register.get_gate("gen").activation, 3) == 0.35 -def test_node_logic_actor_modulator(test_nodenet, default_world): - net, netapi, source = prepare(test_nodenet) - netapi.link_actor(source, "base_porret_decay_factor", weight=0.3, gate="gen") - micropsi.step_nodenet(test_nodenet) +def test_node_logic_actuator_modulator(runtime, test_nodenet, default_world): + net, netapi, source = prepare(runtime, test_nodenet) + netapi.link_actuator(source, "base_porret_decay_factor", weight=0.3, gate="gen") + runtime.step_nodenet(test_nodenet) assert round(netapi.get_modulator("base_porret_decay_factor"), 3) == 0.3 -def test_node_logic_actor_datatarget(test_nodenet, default_world): - net, netapi, source = prepare(test_nodenet) - micropsi.set_nodenet_properties(test_nodenet, worldadapter="Default", world_uid=default_world) - netapi.link_actor(source, "echo", weight=0.5, gate="gen") - register = netapi.create_node("Register", None) - actor = netapi.get_nodes(node_name_prefix="echo")[0] - netapi.link(actor, "gen", register, "gen") - micropsi.step_nodenet(test_nodenet) - micropsi.step_nodenet(test_nodenet) - micropsi.step_nodenet(test_nodenet) +def test_node_logic_actuator_datatarget(runtime, test_nodenet, default_world): + net, netapi, source = prepare(runtime, test_nodenet) + runtime.set_nodenet_properties(test_nodenet, worldadapter="Default", world_uid=default_world) + netapi.link_actuator(source, "echo", weight=0.5, gate="gen") + register = netapi.create_node("Neuron", None) + actuator = netapi.get_nodes(node_name_prefix="echo")[0] + netapi.link(actuator, "gen", register, "gen") + runtime.step_nodenet(test_nodenet) + runtime.step_nodenet(test_nodenet) + runtime.step_nodenet(test_nodenet) assert round(register.get_gate("gen").activation, 1) == 0.5 -def test_node_logic_sensor_nomodulators(engine, default_world): - result, nnuid = micropsi.new_nodenet("adf", engine, "Default", world_uid=default_world, use_modulators=False) - net, netapi, source = prepare(nnuid) - register = netapi.create_node("Register", None) +def test_node_logic_sensor_nomodulators(runtime, engine, default_world): + result, nnuid = runtime.new_nodenet("adf", engine, "Default", world_uid=default_world, use_modulators=False) + net, netapi, source = prepare(runtime, nnuid) + register = netapi.create_node("Neuron", None) netapi.link_sensor(register, "static_on", "gen", weight=0.4) - micropsi.step_nodenet(nnuid) - micropsi.step_nodenet(nnuid) + runtime.step_nodenet(nnuid) + runtime.step_nodenet(nnuid) assert round(register.get_gate("gen").activation, 1) == 0.4 -def test_node_logic_actor_nomodulators(engine, default_world): - result, nnuid = micropsi.new_nodenet("adf", engine, "Default", world_uid=default_world, use_modulators=False) - net, netapi, source = prepare(nnuid) - netapi.link_actor(source, "echo", weight=0.7, gate="gen") - register = netapi.create_node("Register", None) - actor = netapi.get_nodes(node_name_prefix="echo")[0] - netapi.link(actor, "gen", register, "gen") - micropsi.step_nodenet(nnuid) - micropsi.step_nodenet(nnuid) - micropsi.step_nodenet(nnuid) +def test_node_logic_actuator_nomodulators(runtime, engine, default_world): + result, nnuid = runtime.new_nodenet("adf", engine, "Default", world_uid=default_world, use_modulators=False) + net, netapi, source = prepare(runtime, nnuid) + netapi.link_actuator(source, "echo", weight=0.7, gate="gen") + register = netapi.create_node("Neuron", None) + actuator = netapi.get_nodes(node_name_prefix="echo")[0] + netapi.link(actuator, "gen", register, "gen") + runtime.step_nodenet(nnuid) + runtime.step_nodenet(nnuid) + runtime.step_nodenet(nnuid) assert round(register.get_gate("gen").activation, 1) == 0.7 diff --git a/micropsi_core/tests/test_node_lstm_logic.py b/micropsi_core/tests/test_node_lstm_logic.py index 881d003e..d9d991a6 100644 --- a/micropsi_core/tests/test_node_lstm_logic.py +++ b/micropsi_core/tests/test_node_lstm_logic.py @@ -5,7 +5,6 @@ Tests for node activation propagation and gate arithmetic """ -from micropsi_core import runtime as micropsi import math @@ -21,29 +20,28 @@ def g(x): return (4 / (1 + math.exp(-x))) - 2 -def prepare(fixed_nodenet): - nodenet = micropsi.get_nodenet(fixed_nodenet) +def prepare(runtime, test_nodenet): + nodenet = runtime.get_nodenet(test_nodenet) netapi = nodenet.netapi - netapi.delete_node(netapi.get_node("n0006")) - source = netapi.create_node("Register", None, "Source") + source = netapi.create_node("Neuron", None, "Source") netapi.link(source, "gen", source, "gen") source.activation = 1 nodenet.step() return nodenet, netapi, source -def prepare_lstm(fixed_nodenet): - nodenet = micropsi.get_nodenet(fixed_nodenet) +def prepare_lstm(runtime, test_nodenet): + nodenet = runtime.get_nodenet(test_nodenet) netapi = nodenet.netapi lstm = netapi.create_node("LSTM", None, "Test") netapi.link(lstm, "gen", lstm, "gen") return lstm -def test_node_lstm_logic_passthrough(fixed_nodenet): +def test_node_lstm_logic_passthrough(runtime, test_nodenet): # test for an LSTM node with only the cell slot connected - net, netapi, source = prepare(fixed_nodenet) - lstm = prepare_lstm(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) + lstm = prepare_lstm(runtime, test_nodenet) x = 1 @@ -70,10 +68,10 @@ def test_node_lstm_logic_passthrough(fixed_nodenet): assert round(lstm.get_gate("por").activation, 4) == round(f(0) * h(s), 4) -def test_node_lstm_logic_active_in_gate(fixed_nodenet): +def test_node_lstm_logic_active_in_gate(runtime, test_nodenet): # test lsm with cell and in connected - net, netapi, source = prepare(fixed_nodenet) - lstm = prepare_lstm(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) + lstm = prepare_lstm(runtime, test_nodenet) x = 1 @@ -102,10 +100,10 @@ def test_node_lstm_logic_active_in_gate(fixed_nodenet): -def test_node_lstm_logic_active_in_out_gates(fixed_nodenet): +def test_node_lstm_logic_active_in_out_gates(runtime, test_nodenet): # test lstm with in and out gates connected - net, netapi, source = prepare(fixed_nodenet) - lstm = prepare_lstm(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) + lstm = prepare_lstm(runtime, test_nodenet) x = 1 @@ -134,10 +132,10 @@ def test_node_lstm_logic_active_in_out_gates(fixed_nodenet): assert round(lstm.get_gate("por").activation, 4) == round(f(1) * h(s), 4) -def test_node_lstm_logic_active_in_out_phi_gates(fixed_nodenet): +def test_node_lstm_logic_active_in_out_phi_gates(runtime, test_nodenet): # test lstm with in, out and forget gates connected - net, netapi, source = prepare(fixed_nodenet) - lstm = prepare_lstm(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) + lstm = prepare_lstm(runtime, test_nodenet) x = 1 @@ -167,11 +165,11 @@ def test_node_lstm_logic_active_in_out_phi_gates(fixed_nodenet): assert round(lstm.get_gate("por").activation, 4) == round(f(1) * h(s), 4) -def test_node_lstm_logic_sampling_activator(fixed_nodenet): +def test_node_lstm_logic_sampling_activator(runtime, test_nodenet): # test for an LSTM node that's not supposed to update itself as long as a sampling activator is present, # but inactive - net, netapi, source = prepare(fixed_nodenet) - lstm = prepare_lstm(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) + lstm = prepare_lstm(runtime, test_nodenet) x = 1 diff --git a/micropsi_core/tests/test_node_netapi.py b/micropsi_core/tests/test_node_netapi.py index d7ae7d86..f39138c8 100644 --- a/micropsi_core/tests/test_node_netapi.py +++ b/micropsi_core/tests/test_node_netapi.py @@ -6,32 +6,30 @@ """ import pytest -from micropsi_core import runtime as micropsi -def prepare(fixed_nodenet): - nodenet = micropsi.get_nodenet(fixed_nodenet) +def prepare(runtime, test_nodenet): + nodenet = runtime.get_nodenet(test_nodenet) netapi = nodenet.netapi - source = netapi.create_node("Register", None, "Source") + source = netapi.create_node("Neuron", None, "Source") netapi.link(source, "gen", source, "gen") source.activation = 1 nodenet.step() return nodenet, netapi, source -def test_node_netapi_create_register_node(fixed_nodenet): - # test register node creation - net, netapi, source = prepare(fixed_nodenet) - node = netapi.create_node("Register", None, "TestName") +def test_node_netapi_create_neuron_node(runtime, test_nodenet): + # test neuron node creation + net, netapi, source = prepare(runtime, test_nodenet) + node = netapi.create_node("Neuron", None, "TestName") # basic logic tests assert node is not None root_ns = netapi.get_nodespace(None) assert node.parent_nodespace == root_ns.uid - assert node.type == "Register" + assert node.type == "Neuron" assert node.uid is not None assert len(node.get_gate('gen').get_links()) == 0 - assert len(node.get_gate('gen').activations) == 1 # frontend/persistency-oriented data dictionary test data = node.get_data() @@ -39,14 +37,14 @@ def test_node_netapi_create_register_node(fixed_nodenet): assert data['name'] == node.name assert data['type'] == node.type - node = netapi.create_node("Register", None) + node = netapi.create_node("Neuron", None) # TODO: teh weirdness, server-internally, we return uids as names, clients don't see this, confusion ensues # assert data['name'] == node.name -def test_node_netapi_create_pipe_node(fixed_nodenet): +def test_node_netapi_create_pipe_node(runtime, test_nodenet): # test concept node generation - net, netapi, source = prepare(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) node = netapi.create_node("Pipe", None, "TestName") # basic logic tests @@ -55,27 +53,16 @@ def test_node_netapi_create_pipe_node(fixed_nodenet): assert node.type == "Pipe" assert node.uid is not None assert len(node.get_gate('gen').get_links()) == 0 - assert len(node.get_gate('gen').activations) == 1 assert len(node.get_gate('sub').get_links()) == 0 - assert len(node.get_gate('sub').activations) == 1 assert len(node.get_gate('sur').get_links()) == 0 - assert len(node.get_gate('sur').activations) == 1 assert len(node.get_gate('por').get_links()) == 0 - assert len(node.get_gate('por').activations) == 1 assert len(node.get_gate('ret').get_links()) == 0 - assert len(node.get_gate('ret').activations) == 1 assert len(node.get_gate('cat').get_links()) == 0 - assert len(node.get_gate('cat').activations) == 1 assert len(node.get_gate('exp').get_links()) == 0 - assert len(node.get_gate('exp').activations) == 1 # frontend/persistency-oriented data dictionary test data = node.get_data() assert data['uid'] == node.uid - for key in node.get_gate_types(): - assert key not in data['gate_parameters'] - for parameter, value in node.nodetype.gate_defaults[key].items(): - assert node.get_gate(key).get_parameter(parameter) == value assert data['name'] == node.name assert data['type'] == node.type @@ -84,10 +71,20 @@ def test_node_netapi_create_pipe_node(fixed_nodenet): # assert data['name'] == node.name +def test_node_netapi_create_optional_arguments(runtime, test_nodenet): + # test the different optional arguments and defaults + netapi = runtime.nodenets[test_nodenet].netapi + pipe = netapi.create_node("Pipe") + assert pipe.parent_nodespace == netapi.get_nodespace(None).uid + # assert pipe.name == pipe.uid + pipe = netapi.create_node("Pipe", wait=3) + assert pipe.get_parameter('wait') == 3 + + @pytest.mark.engine("dict_engine") -def test_node_netapi_create_concept_node(fixed_nodenet): +def test_node_netapi_create_concept_node(runtime, test_nodenet): # test concept node generation - net, netapi, source = prepare(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) node = netapi.create_node("Concept", None, "TestName") # basic logic tests @@ -96,23 +93,14 @@ def test_node_netapi_create_concept_node(fixed_nodenet): assert node.type == "Concept" assert node.uid is not None assert len(node.get_gate('gen').get_links()) == 0 - assert len(node.get_gate('gen').activations) == 1 assert len(node.get_gate('sub').get_links()) == 0 - assert len(node.get_gate('sub').activations) == 1 assert len(node.get_gate('sur').get_links()) == 0 - assert len(node.get_gate('sur').activations) == 1 assert len(node.get_gate('por').get_links()) == 0 - assert len(node.get_gate('por').activations) == 1 assert len(node.get_gate('ret').get_links()) == 0 - assert len(node.get_gate('ret').activations) == 1 assert len(node.get_gate('cat').get_links()) == 0 - assert len(node.get_gate('cat').activations) == 1 assert len(node.get_gate('exp').get_links()) == 0 - assert len(node.get_gate('exp').activations) == 1 assert len(node.get_gate('sym').get_links()) == 0 - assert len(node.get_gate('sym').activations) == 1 assert len(node.get_gate('ref').get_links()) == 0 - assert len(node.get_gate('ref').activations) == 1 # frontend/persistency-oriented data dictionary test data = node.get_data() @@ -125,19 +113,19 @@ def test_node_netapi_create_concept_node(fixed_nodenet): # assert data['name'] == node.name -def test_node_netapi_create_node_in_nodespace(fixed_nodenet): - # test register node in nodespace creation - net, netapi, source = prepare(fixed_nodenet) +def test_node_netapi_create_node_in_nodespace(runtime, test_nodenet): + # test neuron node in nodespace creation + net, netapi, source = prepare(runtime, test_nodenet) nodespace = netapi.create_nodespace(None, "NestedNodespace") - node = netapi.create_node("Register", nodespace.uid, "TestName") + node = netapi.create_node("Neuron", nodespace.uid, "TestName") assert node.parent_nodespace == nodespace.uid assert node.get_data()['parent_nodespace'] == nodespace.uid -def test_node_netapi_get_nodespace_one(fixed_nodenet): +def test_node_netapi_get_nodespace_one(runtime, test_nodenet): # test single nodespace querying - net, netapi, source = prepare(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) nodespace = netapi.create_nodespace(None, "TestName") queried_nodespace = netapi.get_nodespace(nodespace.uid) @@ -145,9 +133,9 @@ def test_node_netapi_get_nodespace_one(fixed_nodenet): assert queried_nodespace.name == nodespace.name -def test_node_netapi_get_nodespace_multi(fixed_nodenet): +def test_node_netapi_get_nodespace_multi(runtime, test_nodenet): # test nodespace listing - net, netapi, source = prepare(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) nodespace1 = netapi.create_nodespace(None, "TestName1") nodespace2 = netapi.create_nodespace(None, "TestName2") nodespace3 = netapi.create_nodespace(nodespace2.uid, "TestName3") @@ -159,10 +147,10 @@ def test_node_netapi_get_nodespace_multi(fixed_nodenet): assert nodespace3.uid not in [x.uid for x in queried_nodespaces] -def test_node_netapi_get_node(fixed_nodenet): - # test register node creation - net, netapi, source = prepare(fixed_nodenet) - node = netapi.create_node("Register", None, "TestName") +def test_node_netapi_get_node(runtime, test_nodenet): + # test neuron node creation + net, netapi, source = prepare(runtime, test_nodenet) + node = netapi.create_node("Neuron", None, "TestName") queried_node = netapi.get_node(node.uid) assert queried_node.uid == node.uid @@ -171,22 +159,22 @@ def test_node_netapi_get_node(fixed_nodenet): assert queried_node.type == node.type -def test_node_netapi_get_nodes(fixed_nodenet): +def test_node_netapi_get_nodes(runtime, test_nodenet): # test get_nodes plain - net, netapi, source = prepare(fixed_nodenet) - node1 = netapi.create_node("Register", None, "TestName1") - node2 = netapi.create_node("Register", None, "TestName2") + net, netapi, source = prepare(runtime, test_nodenet) + node1 = netapi.create_node("Neuron", None, "TestName1") + node2 = netapi.create_node("Neuron", None, "TestName2") nodes = netapi.get_nodes(netapi.get_nodespace(None).uid) assert node1.uid in [n.uid for n in nodes] assert node2.uid in [n.uid for n in nodes] -def test_node_netapi_get_nodes_by_name(fixed_nodenet): +def test_node_netapi_get_nodes_by_name(runtime, test_nodenet): # test get_nodes by name - net, netapi, source = prepare(fixed_nodenet) - node1 = netapi.create_node("Register", None, "TestName1") - node2 = netapi.create_node("Register", None, "TestName2") + net, netapi, source = prepare(runtime, test_nodenet) + node1 = netapi.create_node("Neuron", None, "TestName1") + node2 = netapi.create_node("Neuron", None, "TestName2") nodes = netapi.get_nodes(netapi.get_nodespace(None).uid, node_name_prefix="TestName") assert len(nodes) == 2 @@ -194,12 +182,12 @@ def test_node_netapi_get_nodes_by_name(fixed_nodenet): assert node2.uid in [n.uid for n in nodes] -def test_node_netapi_get_nodes_by_nodespace(fixed_nodenet): +def test_node_netapi_get_nodes_by_nodespace(runtime, test_nodenet): # test get_nodes by name and nodespace - net, netapi, source = prepare(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) nodespace = netapi.create_nodespace(None, "NestedNodespace") - node1 = netapi.create_node("Register", nodespace.uid, "TestName1") - node2 = netapi.create_node("Register", nodespace.uid, "TestName2") + node1 = netapi.create_node("Neuron", nodespace.uid, "TestName1") + node2 = netapi.create_node("Neuron", nodespace.uid, "TestName2") nodes = netapi.get_nodes(nodespace.uid) assert len(nodes) == 2 @@ -207,14 +195,14 @@ def test_node_netapi_get_nodes_by_nodespace(fixed_nodenet): assert node2.uid in [n.uid for n in nodes] -def test_node_netapi_get_nodes_by_nodetype(fixed_nodenet): +def test_node_netapi_get_nodes_by_nodetype(runtime, test_nodenet): # test get_nodes by name and nodespace - net, netapi, source = prepare(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) nodespace = netapi.create_nodespace(None, "NestedNodespace") node1 = netapi.create_node("Pipe", nodespace.uid, "TestName1") - node2 = netapi.create_node("Register", nodespace.uid, "TestName2") + node2 = netapi.create_node("Neuron", nodespace.uid, "TestName2") - nodes = netapi.get_nodes(nodetype="Register") + nodes = netapi.get_nodes(nodetype="Neuron") assert len(nodes) == 2 uids = [n.uid for n in nodes] assert node1.uid not in uids @@ -222,21 +210,21 @@ def test_node_netapi_get_nodes_by_nodetype(fixed_nodenet): assert source.uid in uids -def test_node_netapi_get_nodes_by_name_and_nodespace(fixed_nodenet): +def test_node_netapi_get_nodes_by_name_and_nodespace(runtime, test_nodenet): # test get_nodes by name and nodespace - net, netapi, source = prepare(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) nodespace = netapi.create_nodespace(None, "NestedNodespace") - node1 = netapi.create_node("Register", None, "TestName1") - node2 = netapi.create_node("Register", nodespace.uid, "TestName2") + node1 = netapi.create_node("Neuron", None, "TestName1") + node2 = netapi.create_node("Neuron", nodespace.uid, "TestName2") nodes = netapi.get_nodes(nodespace.uid, "TestName") assert len(nodes) == 1 assert node2.uid in [n.uid for n in nodes] -def test_node_netapi_get_nodes_in_gate_field(fixed_nodenet): +def test_node_netapi_get_nodes_in_gate_field(runtime, test_nodenet): # test get_nodes_in_gate_field - net, netapi, source = prepare(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) node1 = netapi.create_node("Pipe", None, "TestName1") node2 = netapi.create_node("Pipe", None, "TestName2") node3 = netapi.create_node("Pipe", None, "TestName3") @@ -253,9 +241,9 @@ def test_node_netapi_get_nodes_in_gate_field(fixed_nodenet): assert node4.uid in [n.uid for n in nodes] -def test_node_netapi_get_nodes_in_gate_field_all_links(fixed_nodenet): +def test_node_netapi_get_nodes_in_gate_field_all_links(runtime, test_nodenet): # test get_nodes_in_gate_field without specifying a gate parameter - net, netapi, source = prepare(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) node1 = netapi.create_node("Pipe", None, "TestName1") node2 = netapi.create_node("Pipe", None, "TestName2") node3 = netapi.create_node("Pipe", None, "TestName3") @@ -271,9 +259,9 @@ def test_node_netapi_get_nodes_in_gate_field_all_links(fixed_nodenet): assert node3.uid in [n.uid for n in nodes] -def test_node_netapi_get_nodes_in_gate_field_with_limitations(fixed_nodenet): +def test_node_netapi_get_nodes_in_gate_field_with_limitations(runtime, test_nodenet): # test get_nodes_in_gate_field with limitations: no por links - net, netapi, source = prepare(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) node1 = netapi.create_node("Pipe", None, "TestName1") node2 = netapi.create_node("Pipe", None, "TestName2") node3 = netapi.create_node("Pipe", None, "TestName3") @@ -289,9 +277,9 @@ def test_node_netapi_get_nodes_in_gate_field_with_limitations(fixed_nodenet): assert node4.uid in [n.uid for n in nodes] -def test_node_netapi_get_nodes_in_gate_field_with_limitations_and_nodespace(fixed_nodenet): +def test_node_netapi_get_nodes_in_gate_field_with_limitations_and_nodespace(runtime, test_nodenet): # test get_nodes_in_gate_field with limitations: no por links - net, netapi, source = prepare(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) nodespace = netapi.create_nodespace(None, "NestedNodespace") node1 = netapi.create_node("Pipe", None, "TestName1") node2 = netapi.create_node("Pipe", None, "TestName2") @@ -306,13 +294,13 @@ def test_node_netapi_get_nodes_in_gate_field_with_limitations_and_nodespace(fixe assert node3.uid in [n.uid for n in nodes] -def test_node_netapi_get_nodes_in_slot_field(fixed_nodenet): +def test_node_netapi_get_nodes_in_slot_field(runtime, test_nodenet): # test get_nodes_in_slot_field - net, netapi, source = prepare(fixed_nodenet) - node1 = netapi.create_node("Register", None, "TestName1") - node2 = netapi.create_node("Register", None, "TestName2") - node3 = netapi.create_node("Register", None, "TestName3") - node4 = netapi.create_node("Register", None, "TestName4") + net, netapi, source = prepare(runtime, test_nodenet) + node1 = netapi.create_node("Neuron", None, "TestName1") + node2 = netapi.create_node("Neuron", None, "TestName2") + node3 = netapi.create_node("Neuron", None, "TestName3") + node4 = netapi.create_node("Neuron", None, "TestName4") netapi.link(node2, "gen", node1, "gen") netapi.link(node3, "gen", node1, "gen") netapi.link(node3, "gen", node1, "gen") @@ -325,10 +313,9 @@ def test_node_netapi_get_nodes_in_slot_field(fixed_nodenet): assert node4.uid in [n.uid for n in nodes] -def test_node_netapi_get_nodes_in_slot_field_all_links(fixed_nodenet): +def test_node_netapi_get_nodes_in_slot_field_all_links(runtime, test_nodenet): # test get_nodes_in_slot_field without a gate parameter - net, netapi, source = prepare(fixed_nodenet) - net, netapi, source = prepare(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) node1 = netapi.create_node("Pipe", None, "TestName1") node2 = netapi.create_node("Pipe", None, "TestName2") node3 = netapi.create_node("Pipe", None, "TestName3") @@ -345,14 +332,14 @@ def test_node_netapi_get_nodes_in_slot_field_all_links(fixed_nodenet): assert node4.uid in [n.uid for n in nodes] -def test_node_netapi_get_nodes_with_nodespace_limitation(fixed_nodenet): +def test_node_netapi_get_nodes_with_nodespace_limitation(runtime, test_nodenet): # test get_nodes_feed with nodespace limitation - net, netapi, source = prepare(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) nodespace = netapi.create_nodespace(None, "NestedNodespace") - node1 = netapi.create_node("Register", None, "TestName1") - node2 = netapi.create_node("Register", None, "TestName2") - node3 = netapi.create_node("Register", None, "TestName3") - node4 = netapi.create_node("Register", nodespace.uid, "TestName4") + node1 = netapi.create_node("Neuron", None, "TestName1") + node2 = netapi.create_node("Neuron", None, "TestName2") + node3 = netapi.create_node("Neuron", None, "TestName3") + node4 = netapi.create_node("Neuron", nodespace.uid, "TestName4") netapi.link(node2, "gen", node1, "gen") netapi.link(node3, "gen", node1, "gen") netapi.link(node3, "gen", node1, "gen") @@ -364,9 +351,9 @@ def test_node_netapi_get_nodes_with_nodespace_limitation(fixed_nodenet): assert node3.uid in [n.uid for n in nodes] -def test_node_netapi_get_nodes_in_slot_field_with_limitations_and_nodespace(fixed_nodenet): +def test_node_netapi_get_nodes_in_slot_field_with_limitations_and_nodespace(runtime, test_nodenet): # test get_nodes_in_gate_field with limitations: no por links - net, netapi, source = prepare(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) nodespace = netapi.create_nodespace(None, "NestedNodespace") node1 = netapi.create_node("Pipe", None, "TestName1") node2 = netapi.create_node("Pipe", None, "TestName2") @@ -381,14 +368,14 @@ def test_node_netapi_get_nodes_in_slot_field_with_limitations_and_nodespace(fixe assert node3.uid in [n.uid for n in nodes] -def test_node_netapi_get_nodes_active(fixed_nodenet): +def test_node_netapi_get_nodes_active(runtime, test_nodenet): # test get_nodes_active - net, netapi, source = prepare(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) nodespace = netapi.create_nodespace(None, "NestedNodespace") - node1 = netapi.create_node("Register", None, "TestName1") - node2 = netapi.create_node("Register", None, "TestName2") - node3 = netapi.create_node("Register", None, "TestName3") - node4 = netapi.create_node("Register", nodespace.uid, "TestName4") + node1 = netapi.create_node("Neuron", None, "TestName1") + node2 = netapi.create_node("Neuron", None, "TestName2") + node3 = netapi.create_node("Neuron", None, "TestName3") + node4 = netapi.create_node("Neuron", nodespace.uid, "TestName4") netapi.link(node2, "gen", node1, "gen") netapi.link(node3, "gen", node1, "gen") netapi.link(node3, "gen", node1, "gen") @@ -399,25 +386,25 @@ def test_node_netapi_get_nodes_active(fixed_nodenet): net.step() net.step() - nodes = netapi.get_nodes_active(netapi.get_nodespace(None).uid, "Register", 0.7, "gen") + nodes = netapi.get_nodes_active(netapi.get_nodespace(None).uid, "Neuron", 0.7, "gen") assert len(nodes) == 2 assert node1.uid in [n.uid for n in nodes] assert source.uid in [n.uid for n in nodes] - nodes = netapi.get_nodes_active(netapi.get_nodespace(None).uid, "Register") + nodes = netapi.get_nodes_active(netapi.get_nodespace(None).uid, "Neuron") assert len(nodes) == 2 assert node1.uid in [n.uid for n in nodes] assert source.uid in [n.uid for n in nodes] -def test_node_netapi_get_nodes_active_with_nodespace_limitation(fixed_nodenet): +def test_node_netapi_get_nodes_active_with_nodespace_limitation(runtime, test_nodenet): # test get_nodes_active with nodespace filtering - net, netapi, source = prepare(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) nodespace = netapi.create_nodespace(None, "NestedNodespace") - node1 = netapi.create_node("Register", None, "TestName1") - node2 = netapi.create_node("Register", None, "TestName2") - node3 = netapi.create_node("Register", None, "TestName3") - node4 = netapi.create_node("Register", nodespace.uid, "TestName4") + node1 = netapi.create_node("Neuron", None, "TestName1") + node2 = netapi.create_node("Neuron", None, "TestName2") + node3 = netapi.create_node("Neuron", None, "TestName3") + node4 = netapi.create_node("Neuron", nodespace.uid, "TestName4") netapi.link(node2, "gen", node1, "gen") netapi.link(node3, "gen", node1, "gen") netapi.link(node3, "gen", node1, "gen") @@ -428,17 +415,17 @@ def test_node_netapi_get_nodes_active_with_nodespace_limitation(fixed_nodenet): net.step() net.step() - nodes = netapi.get_nodes_active(nodespace.uid, "Register", 0.4) + nodes = netapi.get_nodes_active(nodespace.uid, "Neuron", 0.4) assert len(nodes) == 1 assert node4.uid in [n.uid for n in nodes] -def test_node_netapi_delete_node(fixed_nodenet): +def test_node_netapi_delete_node(runtime, test_nodenet): # test simple delete node case - net, netapi, source = prepare(fixed_nodenet) - node1 = netapi.create_node("Register", None, "TestName1") - node2 = netapi.create_node("Register", None, "TestName2") - node3 = netapi.create_node("Register", None, "TestName3") + net, netapi, source = prepare(runtime, test_nodenet) + node1 = netapi.create_node("Neuron", None, "TestName1") + node2 = netapi.create_node("Neuron", None, "TestName2") + node3 = netapi.create_node("Neuron", None, "TestName3") netapi.link(node2, "gen", node1, "gen") netapi.link(node3, "gen", node1, "gen") netapi.link(node3, "gen", node1, "gen") @@ -450,14 +437,14 @@ def test_node_netapi_delete_node(fixed_nodenet): assert len(node2.get_gate("gen").get_links()) == 0 -def test_node_netapi_delete_nodespace(fixed_nodenet): +def test_node_netapi_delete_nodespace(runtime, test_nodenet): # test delete node case deleting a nodespace - net, netapi, source = prepare(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) nodespace = netapi.create_nodespace(None, "NestedNodespace") - node1 = netapi.create_node("Register", None, "TestName1") - node2 = netapi.create_node("Register", None, "TestName2") - node3 = netapi.create_node("Register", None, "TestName3") - node4 = netapi.create_node("Register", nodespace.uid, "TestName4") + node1 = netapi.create_node("Neuron", None, "TestName1") + node2 = netapi.create_node("Neuron", None, "TestName2") + node3 = netapi.create_node("Neuron", None, "TestName3") + node4 = netapi.create_node("Neuron", nodespace.uid, "TestName4") netapi.link(node2, "gen", node1, "gen") netapi.link(node3, "gen", node1, "gen") netapi.link(node3, "gen", node1, "gen") @@ -469,11 +456,11 @@ def test_node_netapi_delete_nodespace(fixed_nodenet): netapi.get_node(node4uid) -def test_node_netapi_link(fixed_nodenet): +def test_node_netapi_link(runtime, test_nodenet): # test linking nodes - net, netapi, source = prepare(fixed_nodenet) - node1 = netapi.create_node("Register", None, "TestName1") - node2 = netapi.create_node("Register", None, "TestName2") + net, netapi, source = prepare(runtime, test_nodenet) + node1 = netapi.create_node("Neuron", None, "TestName1") + node2 = netapi.create_node("Neuron", None, "TestName2") netapi.link(node2, "gen", node1, "gen") assert len(node2.get_gate("gen").get_links()) == 1 @@ -498,11 +485,11 @@ def test_node_netapi_link(fixed_nodenet): assert link.get_data(complete=True)['source_gate_name'] == 'gen' -def test_node_netapi_link_change_weight(fixed_nodenet): +def test_node_netapi_link_change_weight(runtime, test_nodenet): # test linking nodes, the changing weights - net, netapi, source = prepare(fixed_nodenet) - node1 = netapi.create_node("Register", None, "TestName1") - node2 = netapi.create_node("Register", None, "TestName2") + net, netapi, source = prepare(runtime, test_nodenet) + node1 = netapi.create_node("Neuron", None, "TestName1") + node2 = netapi.create_node("Neuron", None, "TestName2") netapi.link(node2, "gen", node1, "gen") net.step() @@ -528,9 +515,9 @@ def test_node_netapi_link_change_weight(fixed_nodenet): assert link.get_data()['target_slot_name'] == 'gen' -def test_node_netapi_link_with_reciprocal(fixed_nodenet): +def test_node_netapi_link_with_reciprocal(runtime, test_nodenet): # test linking pipe and concept nodes with reciprocal links - net, netapi, source = prepare(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) n_head = netapi.create_node("Pipe", None, "Head") n_a = netapi.create_node("Pipe", None, "A") n_b = netapi.create_node("Pipe", None, "B") @@ -563,9 +550,9 @@ def test_node_netapi_link_with_reciprocal(fixed_nodenet): @pytest.mark.engine("dict_engine") -def test_node_netapi_link_with_reciprocal_and_concepts(fixed_nodenet): +def test_node_netapi_link_with_reciprocal_and_concepts(runtime, test_nodenet): # test linking pipe and concept nodes with reciprocal links - net, netapi, source = prepare(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) n_head = netapi.create_node("Pipe", None, "Head") n_d = netapi.create_node("Concept", None, "D") @@ -578,9 +565,9 @@ def test_node_netapi_link_with_reciprocal_and_concepts(fixed_nodenet): assert len(n_head.get_slot("exp").get_links()) == 1 -def test_node_netapi_unlink(fixed_nodenet): +def test_node_netapi_unlink(runtime, test_nodenet): # test completely unlinking a node - net, netapi, source = prepare(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) n_head = netapi.create_node("Pipe", None, "Head") n_a = netapi.create_node("Pipe", None, "A") n_b = netapi.create_node("Pipe", None, "B") @@ -600,9 +587,9 @@ def test_node_netapi_unlink(fixed_nodenet): assert len(n_d.get_slot('por').get_links()) == 3 -def test_node_netapi_unlink_specific_link(fixed_nodenet): +def test_node_netapi_unlink_specific_link(runtime, test_nodenet): # test removing a specific link - net, netapi, source = prepare(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) n_head = netapi.create_node("Pipe", None, "Head") n_a = netapi.create_node("Pipe", None, "A") n_b = netapi.create_node("Pipe", None, "B") @@ -622,9 +609,9 @@ def test_node_netapi_unlink_specific_link(fixed_nodenet): assert len(n_d.get_slot('por').get_links()) == 4 -def test_node_netapi_unlink_gate(fixed_nodenet): +def test_node_netapi_unlink_gate(runtime, test_nodenet): # test unlinking a gate - net, netapi, source = prepare(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) n_head = netapi.create_node("Pipe", None, "Head") n_a = netapi.create_node("Pipe", None, "A") n_b = netapi.create_node("Pipe", None, "B") @@ -644,105 +631,59 @@ def test_node_netapi_unlink_gate(fixed_nodenet): assert len(n_d.get_slot('por').get_links()) == 3 -def test_node_netapi_unlink_direction(fixed_nodenet): - # test unlinking a gate - net, netapi, source = prepare(fixed_nodenet) - n_head = netapi.create_node("Pipe", None, "Head") - n_a = netapi.create_node("Pipe", None, "A") - n_b = netapi.create_node("Pipe", None, "B") - n_c = netapi.create_node("Pipe", None, "C") - - netapi.link_with_reciprocal(n_head, n_a, "subsur") - netapi.link_with_reciprocal(n_head, n_b, "subsur") - netapi.link_with_reciprocal(n_head, n_c, "subsur") - - nodes = [n_a, n_b, n_c] - for source in nodes: - for target in nodes: - netapi.link_with_reciprocal(source, target, "porret") - - netapi.unlink_direction(n_b, "por") - - assert len(n_head.get_gate('sub').get_links()) == 3 - assert len(n_head.get_slot('sur').get_links()) == 3 - - assert len(n_a.get_slot('por').get_links()) == 2 - assert len(n_b.get_slot('por').get_links()) == 0 - assert len(n_c.get_slot('por').get_links()) == 2 - - netapi.unlink_direction(n_head, "sub") - - assert len(n_head.get_gate('sub').get_links()) == 0 - assert len(n_head.get_slot('sur').get_links()) == 3 - - assert len(n_a.get_slot('sub').get_links()) == 0 - assert len(n_b.get_slot('sub').get_links()) == 0 - assert len(n_c.get_slot('sub').get_links()) == 0 - - -def test_node_netapi_import_actors(fixed_nodenet, test_world): - # test importing data targets as actors - net, netapi, source = prepare(fixed_nodenet) - micropsi.set_nodenet_properties(fixed_nodenet, world_uid=test_world, worldadapter='Braitenberg') +def test_node_netapi_import_actuators(runtime, test_nodenet, default_world): + # test importing data targets as actuators + net, netapi, source = prepare(runtime, test_nodenet) + runtime.set_nodenet_properties(test_nodenet, world_uid=default_world, worldadapter='Default') root_ns = netapi.get_nodespace(None) - netapi.import_actors(root_ns.uid) - actors = netapi.get_nodes(root_ns.uid, nodetype="Actor") - assert len(actors) == 2 - assert set([a.get_parameter('datatarget') for a in actors]) == set(net.worldadapter_instance.datatargets.keys()) + netapi.import_actuators(root_ns.uid) + actuators = netapi.get_nodes(root_ns.uid, nodetype="Actuator") + assert len(actuators) == 1 + assert set([a.get_parameter('datatarget') for a in actuators]) == set(net.worldadapter_instance.datatargets.keys()) # do it again, make sure we can call import multiple times - netapi.import_actors(root_ns.uid) - actors = netapi.get_nodes(root_ns.uid, nodetype="Actor") - assert len(actors) == 2 + netapi.import_actuators(root_ns.uid) + actuators = netapi.get_nodes(root_ns.uid, nodetype="Actuator") + assert len(actuators) == 1 -def test_node_netapi_import_sensors(fixed_nodenet, test_world): +def test_node_netapi_import_sensors(runtime, test_nodenet, default_world): # test importing data sources as sensors - net, netapi, source = prepare(fixed_nodenet) - micropsi.set_nodenet_properties(fixed_nodenet, world_uid=test_world, worldadapter='Braitenberg') + net, netapi, source = prepare(runtime, test_nodenet) + runtime.set_nodenet_properties(test_nodenet, world_uid=default_world, worldadapter='Default') root_ns = netapi.get_nodespace(None) netapi.import_sensors(root_ns.uid) sensors = netapi.get_nodes(root_ns.uid, nodetype="Sensor") - assert len(sensors) == 2 + assert len(sensors) == 3 assert set([s.get_parameter('datasource') for s in sensors]) == set(net.worldadapter_instance.datasources.keys()) # do it again, make sure we can call import multiple times netapi.import_sensors(root_ns.uid) sensors = netapi.get_nodes(root_ns.uid, nodetype="Sensor") - assert len(sensors) == 2 - - -def test_set_gate_function(fixed_nodenet): - # test setting a custom gate function - from micropsi_core.nodenet.gatefunctions import sigmoid - net, netapi, source = prepare(fixed_nodenet) - - some_other_node_type = netapi.create_node("Pipe", None) - netapi.unlink(source, "gen") - - net.step() - assert source.get_gate("gen").activation == 0 - - netapi.set_gatefunction(netapi.get_nodespace(None).uid, "Register", "gen", "sigmoid") - - source.set_gate_parameter('gen', 'theta', 1) - - net.step() - - assert round(source.get_gate("gen").activation, 5) == round(sigmoid(0, 0, 1), 5) - assert some_other_node_type.get_gate("gen").activation == 0 - - -def test_autoalign(fixed_nodenet): - net, netapi, source = prepare(fixed_nodenet) - for uid in net.get_node_uids(): - net.get_node(uid).position = [12, 13, 11] + assert len(sensors) == 3 + + +def test_autoalign(runtime, test_nodenet): + net = runtime.nodenets[test_nodenet] + netapi = net.netapi + nodes = [] + # create a tiny script + for i in range(3): + nodes.append(netapi.create_node('Pipe', None, 'p%d' % i)) + nodes[i].position = [i * 10, i * 10, i * 10] + n1, n2, n3 = nodes + netapi.link_with_reciprocal(n1, n2, 'subsur') + netapi.link_with_reciprocal(n1, n3, 'subsur') + netapi.link_with_reciprocal(n2, n3, 'porret') netapi.autoalign_nodespace(netapi.get_nodespace(None).uid) positions = [] - for uid in net.get_node_uids(): - if net.get_node(uid).parent_nodespace == netapi.get_nodespace(None).uid: - positions.extend(net.get_node(uid).position) - assert set(positions) != set([12, 13, 11]) + for n in nodes: + positions.append(netapi.get_node(n.uid).position) + + assert positions[0][0] == positions[1][0] # topnode x-aligned with first porret + assert positions[0][1] != positions[1][1] # but different y + assert positions[1][0] != positions[2][0] # porret-nodes have different x + assert positions[1][1] == positions[2][1] # but same y for uid in net.get_node_uids(): net.get_node(uid).position = [12, 13, 11] @@ -753,26 +694,36 @@ def test_autoalign(fixed_nodenet): assert set(positions) == set([12, 13, 11]) -def test_autoalign_updates_last_changed(fixed_nodenet): - net, netapi, source = prepare(fixed_nodenet) - for uid in net.get_node_uids(): - net.get_node(uid).position = [12, 13, 11] +def test_autoalign_updates_last_changed(runtime, test_nodenet): + net = runtime.nodenets[test_nodenet] + netapi = net.netapi + nodes = [] + # create a tiny script + for i in range(3): + nodes.append(netapi.create_node('Pipe', None, 'p%d' % i)) + nodes[i].position = [i * 10, i * 10, i * 10] + n1, n2, n3 = nodes + netapi.link_with_reciprocal(n1, n2, 'subsur') + netapi.link_with_reciprocal(n1, n3, 'subsur') + netapi.link_with_reciprocal(n2, n3, 'porret') net.step() net.step() netapi.autoalign_nodespace(netapi.get_nodespace(None).uid) changes = net.get_nodespace_changes([None], 2) for uid in net.get_node_uids(): - if net.get_node(uid).position != [12, 13, 11]: - assert uid in changes['nodes_dirty'] + assert uid in changes['nodes_dirty'] -def test_copy_nodes(fixed_nodenet): - net, netapi, source = prepare(fixed_nodenet) +def test_copy_nodes(runtime, test_nodenet): + netapi = runtime.nodenets[test_nodenet].netapi nodespace = netapi.create_nodespace(None, name='copy') - a1 = netapi.get_node('n0001') - a2 = netapi.get_node('n0002') - a1.set_parameter('expecation', 0.6) - a1.set_gate_parameter('gen', 'amplification', 0.27) + a1 = netapi.create_node('Pipe', None, "a1") + a2 = netapi.create_node('Pipe', None, "a2") + a3 = netapi.create_node('Pipe', None, "a3") + netapi.link(a3, 'gen', a1, 'gen') + netapi.link(a1, 'por', a2, 'por') + a1.set_parameter('expectation', 0.6) + a1.set_gate_configuration('gen', 'sigmoid', {'bias': 1.3}) mapping = netapi.copy_nodes([a1, a2], nodespace.uid) assert a1 in mapping assert a2 in mapping @@ -783,39 +734,43 @@ def test_copy_nodes(fixed_nodenet): assert len(mapping[a1].get_slot('gen').get_links()) == 0 # incoming link from outside not copied assert mapping[a1].get_gate('por').get_links()[0].target_node.uid == mapping[a2].uid assert a1.clone_parameters() == mapping[a1].clone_parameters() - assert a1.get_gate_parameters() == mapping[a1].get_gate_parameters() + assert mapping[a1].get_gate_configuration() == a1.get_gate_configuration() -def test_group_nodes_by_names(fixed_nodenet): - net, netapi, source = prepare(fixed_nodenet) - sepp1 = netapi.create_node("Register", None, "sepp1") - sepp2 = netapi.create_node("Register", None, "sepp2") - sepp3 = netapi.create_node("Register", None, "sepp3") +def test_group_nodes_by_names(runtime, test_nodenet): + net, netapi, source = prepare(runtime, test_nodenet) + sepp1 = netapi.create_node("Neuron", None, "sepp1") + sepp2 = netapi.create_node("Neuron", None, "sepp2") + sepp3 = netapi.create_node("Neuron", None, "sepp3") netapi.group_nodes_by_names(None, node_name_prefix="sepp") seppen_act = netapi.get_activations(None, "sepp") assert len(seppen_act) == 3 -def test_group_nodes_by_ids(fixed_nodenet): - net, netapi, source = prepare(fixed_nodenet) - ids = ["n0001", "n0002"] +def test_group_nodes_by_ids(runtime, test_nodenet): + net, netapi, source = prepare(runtime, test_nodenet) + reg = netapi.create_node("Neuron", None, 'reg') + ids = [source.uid, reg.uid] netapi.group_nodes_by_ids(None, ids, "some") some_act = netapi.get_activations(None, "some") assert len(some_act) == 2 -def test_ungroup_nodes(fixed_nodenet): - net, netapi, source = prepare(fixed_nodenet) - ids = ["n0001", "n0002"] +def test_ungroup_nodes(runtime, test_nodenet): + net, netapi, source = prepare(runtime, test_nodenet) + reg = netapi.create_node("Neuron", None, 'reg') + ids = [source.uid, reg.uid] netapi.group_nodes_by_ids(None, ids, "some") netapi.ungroup_nodes(None, "some") + with pytest.raises(ValueError): + netapi.get_activations(None, "some") -def test_get_activations(fixed_nodenet): - net, netapi, source = prepare(fixed_nodenet) - sepp1 = netapi.create_node("Register", None, "sepp1") - sepp2 = netapi.create_node("Register", None, "sepp2") - sepp3 = netapi.create_node("Register", None, "sepp3") +def test_get_activations(runtime, test_nodenet): + net, netapi, source = prepare(runtime, test_nodenet) + sepp1 = netapi.create_node("Neuron", None, "sepp1") + sepp2 = netapi.create_node("Neuron", None, "sepp2") + sepp3 = netapi.create_node("Neuron", None, "sepp3") netapi.group_nodes_by_names(None, node_name_prefix="sepp") seppen_act = netapi.get_activations(None, "sepp") assert len(seppen_act) == 3 @@ -832,11 +787,11 @@ def test_get_activations(fixed_nodenet): assert seppen_act[2] == 0 -def test_substitute_activations(fixed_nodenet): - net, netapi, source = prepare(fixed_nodenet) - sepp1 = netapi.create_node("Register", None, "sepp1").uid - sepp2 = netapi.create_node("Register", None, "sepp2").uid - sepp3 = netapi.create_node("Register", None, "sepp3").uid +def test_substitute_activations(runtime, test_nodenet): + net, netapi, source = prepare(runtime, test_nodenet) + sepp1 = netapi.create_node("Neuron", None, "sepp1").uid + sepp2 = netapi.create_node("Neuron", None, "sepp2").uid + sepp3 = netapi.create_node("Neuron", None, "sepp3").uid netapi.group_nodes_by_names(None, node_name_prefix="sepp") netapi.link(source, "gen", netapi.get_node(sepp2), "gen") @@ -858,47 +813,51 @@ def test_substitute_activations(fixed_nodenet): assert round(seppen_act[2], 2) == -1 -def test_get_thetas(fixed_nodenet): - net, netapi, source = prepare(fixed_nodenet) - sepp1 = netapi.create_node("Register", None, "sepp1") - sepp2 = netapi.create_node("Register", None, "sepp2") - sepp3 = netapi.create_node("Register", None, "sepp3") +def test_set_gate_get_gate_config(runtime, test_nodenet): + net, netapi, source = prepare(runtime, test_nodenet) + sepp1 = netapi.create_node("Neuron", None, "sepp1") + sepp2 = netapi.create_node("Neuron", None, "sepp2") + sepp3 = netapi.create_node("Neuron", None, "sepp3") netapi.group_nodes_by_names(None, node_name_prefix="sepp") - seppen_theta = netapi.get_thetas(None, "sepp") - assert len(seppen_theta) == 3 - assert seppen_theta[0] == 0 - assert seppen_theta[1] == 0 - assert seppen_theta[2] == 0 - - -def test_set_thetas(fixed_nodenet): - net, netapi, source = prepare(fixed_nodenet) - sepp1 = netapi.create_node("Register", None, "sepp1") - sepp2 = netapi.create_node("Register", None, "sepp2") - sepp3 = netapi.create_node("Register", None, "sepp3") + data = netapi.get_gate_configurations(None, "sepp", 'bias') + assert data['gatefunction'] == 'identity' + assert data['parameter_values'][0] == 0 + assert data['parameter_values'][1] == 0 + assert data['parameter_values'][2] == 0 + + +def test_set_gate_config(runtime, test_nodenet): + from micropsi_core.nodenet.gatefunctions import sigmoid + net, netapi, source = prepare(runtime, test_nodenet) + sepp1 = netapi.create_node("Neuron", None, "sepp1") + sepp2 = netapi.create_node("Neuron", None, "sepp2") + sepp3 = netapi.create_node("Neuron", None, "sepp3") netapi.group_nodes_by_names(None, node_name_prefix="sepp") - some_thetas = [1, 2, 3] - netapi.set_thetas(None, "sepp", some_thetas) + netapi.set_gate_configurations(None, "sepp", 'sigmoid', 'bias', [1, 2, 3]) net.step() - seppen_theta = netapi.get_thetas(None, "sepp") - assert round(seppen_theta[0], 2) == 1 - assert round(seppen_theta[1], 2) == 2 - assert round(seppen_theta[2], 2) == 3 + data = netapi.get_gate_configurations(None, "sepp", 'bias') + assert data['gatefunction'] == 'sigmoid' + assert data['parameter_values'][0] == 1 + assert data['parameter_values'][1] == 2 + assert data['parameter_values'][2] == 3 + assert round(sepp1.get_gate('gen').activation, 5) == round(sigmoid(0, 1), 5) + assert round(sepp2.get_gate('gen').activation, 5) == round(sigmoid(0, 2), 5) + assert round(sepp3.get_gate('gen').activation, 5) == round(sigmoid(0, 3), 5) -def test_get_link_weights(fixed_nodenet): - net, netapi, source = prepare(fixed_nodenet) +def test_get_link_weights(runtime, test_nodenet): + net, netapi, source = prepare(runtime, test_nodenet) - sepp1 = netapi.create_node("Register", None, "sepp1") - sepp2 = netapi.create_node("Register", None, "sepp2") - sepp3 = netapi.create_node("Register", None, "sepp3") + sepp1 = netapi.create_node("Neuron", None, "sepp1") + sepp2 = netapi.create_node("Neuron", None, "sepp2") + sepp3 = netapi.create_node("Neuron", None, "sepp3") netapi.group_nodes_by_names(None, node_name_prefix="sepp") - hugo1 = netapi.create_node("Register", None, "hugo1") - hugo2 = netapi.create_node("Register", None, "hugo2") + hugo1 = netapi.create_node("Neuron", None, "hugo1") + hugo2 = netapi.create_node("Neuron", None, "hugo2") netapi.group_nodes_by_names(None, node_name_prefix="hugo") netapi.link(sepp2, "gen", hugo1, "gen", 0.4) @@ -921,16 +880,16 @@ def test_get_link_weights(fixed_nodenet): assert value == 0.4 -def test_set_link_weights(fixed_nodenet): - net, netapi, source = prepare(fixed_nodenet) +def test_set_link_weights(runtime, test_nodenet): + net, netapi, source = prepare(runtime, test_nodenet) - sepp1 = netapi.create_node("Register", None, "sepp1") - sepp2 = netapi.create_node("Register", None, "sepp2") - sepp3 = netapi.create_node("Register", None, "sepp3") + sepp1 = netapi.create_node("Neuron", None, "sepp1") + sepp2 = netapi.create_node("Neuron", None, "sepp2") + sepp3 = netapi.create_node("Neuron", None, "sepp3") netapi.group_nodes_by_names(None, node_name_prefix="sepp") - hugo1 = netapi.create_node("Register", None, "hugo1") - hugo2 = netapi.create_node("Register", None, "hugo2") + hugo1 = netapi.create_node("Neuron", None, "hugo1") + hugo2 = netapi.create_node("Neuron", None, "hugo2") netapi.group_nodes_by_names(None, node_name_prefix="hugo") netapi.link(sepp2, "gen", hugo1, "gen", 0.4) @@ -984,11 +943,11 @@ def test_set_link_weights(fixed_nodenet): assert len(netapi.get_node(sepp2.uid).get_gate('gen').get_links()) == 1 -def test_get_node_ids(fixed_nodenet): - net, netapi, source = prepare(fixed_nodenet) - sepp1 = netapi.create_node("Register", None, "sepp1") - sepp2 = netapi.create_node("Register", None, "sepp2") - sepp3 = netapi.create_node("Register", None, "sepp3") +def test_get_node_ids(runtime, test_nodenet): + net, netapi, source = prepare(runtime, test_nodenet) + sepp1 = netapi.create_node("Neuron", None, "sepp1") + sepp2 = netapi.create_node("Neuron", None, "sepp2") + sepp3 = netapi.create_node("Neuron", None, "sepp3") netapi.group_nodes_by_names(None, node_name_prefix="sepp") seppen_ids = netapi.get_node_ids(None, "sepp") assert len(seppen_ids) == 3 @@ -997,8 +956,8 @@ def test_get_node_ids(fixed_nodenet): assert seppen_ids[2] == sepp3.uid -def test_add_gate_monitor(test_nodenet, node): - nodenet = micropsi.get_nodenet(test_nodenet) +def test_add_gate_monitor(runtime, test_nodenet, node): + nodenet = runtime.get_nodenet(test_nodenet) netapi = nodenet.netapi uid = netapi.add_gate_monitor(node, 'gen', name='sepp', color='#987654') assert nodenet.get_monitor(uid).name == 'sepp' @@ -1007,23 +966,22 @@ def test_add_gate_monitor(test_nodenet, node): @pytest.mark.engine("dict_engine") -def test_add_slot_monitor(test_nodenet, node): - nodenet = micropsi.get_nodenet(test_nodenet) +def test_add_slot_monitor(runtime, test_nodenet, node): + nodenet = runtime.get_nodenet(test_nodenet) netapi = nodenet.netapi uid = netapi.add_slot_monitor(node, 'gen') assert nodenet.get_monitor(uid).type == 'slot' -def test_add_link_monitor(test_nodenet, node): - nodenet = micropsi.get_nodenet(test_nodenet) +def test_add_link_monitor(runtime, test_nodenet, node): + nodenet = runtime.get_nodenet(test_nodenet) netapi = nodenet.netapi uid = netapi.add_link_monitor(node, 'gen', node, 'gen', name='sepplink') assert nodenet.get_monitor(uid).name == 'sepplink' - assert nodenet.get_monitor(uid).property == 'weight' -def test_add_modulator_monitor(test_nodenet): - nodenet = micropsi.get_nodenet(test_nodenet) +def test_add_modulator_monitor(runtime, test_nodenet): + nodenet = runtime.get_nodenet(test_nodenet) netapi = nodenet.netapi nodenet.step() uid = netapi.add_modulator_monitor('base_age', 'age') @@ -1031,8 +989,8 @@ def test_add_modulator_monitor(test_nodenet): assert nodenet.get_monitor(uid).name == 'age' -def test_add_custom_monitor(test_nodenet): - nodenet = micropsi.get_nodenet(test_nodenet) +def test_add_custom_monitor(runtime, test_nodenet): + nodenet = runtime.get_nodenet(test_nodenet) netapi = nodenet.netapi function = "return len(netapi.get_nodes())" uid = netapi.add_custom_monitor(function, 'number_of_nodes', color=None) @@ -1040,30 +998,30 @@ def test_add_custom_monitor(test_nodenet): assert nodenet.get_monitor(uid).function == function -def test_get_monitor(test_nodenet, node): - nodenet = micropsi.get_nodenet(test_nodenet) +def test_get_monitor(runtime, test_nodenet, node): + nodenet = runtime.get_nodenet(test_nodenet) netapi = nodenet.netapi uid = netapi.add_gate_monitor(node, 'gen') assert nodenet.get_monitor(uid) == netapi.get_monitor(uid) -def test_remove_monitor(test_nodenet, node): - nodenet = micropsi.get_nodenet(test_nodenet) +def test_remove_monitor(runtime, test_nodenet, node): + nodenet = runtime.get_nodenet(test_nodenet) netapi = nodenet.netapi uid = netapi.add_gate_monitor(node, 'gen') netapi.remove_monitor(uid) assert nodenet.get_monitor(uid) is None -def test_set_dashboard_value(test_nodenet, node): - nodenet = micropsi.get_nodenet(test_nodenet) +def test_set_dashboard_value(runtime, test_nodenet, node): + nodenet = runtime.get_nodenet(test_nodenet) netapi = nodenet.netapi netapi.set_dashboard_value('foo', 'bar') assert nodenet.dashboard_values['foo'] == 'bar' -def test_decay_porret_links(test_nodenet): - nodenet = micropsi.get_nodenet(test_nodenet) +def test_decay_porret_links(runtime, test_nodenet): + nodenet = runtime.get_nodenet(test_nodenet) netapi = nodenet.netapi pipes = [] netapi.set_modulator('base_porret_decay_factor', 0.1) @@ -1074,7 +1032,7 @@ def test_decay_porret_links(test_nodenet): netapi.link_with_reciprocal(pipes[i - 1], node, 'porret', weight=0.1 * i) netapi.link_with_reciprocal(pipes[0], pipes[1], 'subsur', weight=0.5) - reg = netapi.create_node("Register", None, "source") + reg = netapi.create_node("Neuron", None, "source") netapi.link(reg, 'gen', pipes[0], 'gen', 0.4) netapi.decay_por_links(None) for i in range(9): @@ -1085,8 +1043,8 @@ def test_decay_porret_links(test_nodenet): assert round(pipes[7].get_gate('ret').get_links()[0].weight, 3) == 0.7 -def test_unlink_gate(test_nodenet): - nodenet = micropsi.get_nodenet(test_nodenet) +def test_unlink_gate(runtime, test_nodenet): + nodenet = runtime.get_nodenet(test_nodenet) netapi = nodenet.netapi node = netapi.create_node("Pipe", None) pipe1 = netapi.create_node("Pipe", None) @@ -1096,26 +1054,26 @@ def test_unlink_gate(test_nodenet): netapi.link(node, 'por', pipe1, 'gen') netapi.link(node, 'por', pipe2, 'gen') netapi.link(node, 'por', pipe1, 'sur') - micropsi.save_nodenet(test_nodenet) + runtime.save_nodenet(test_nodenet) netapi.unlink_gate(node, 'por') assert node.get_gate('por').empty assert not node.get_gate('sub').empty - micropsi.revert_nodenet(test_nodenet) - netapi = micropsi.nodenets[test_nodenet].netapi + runtime.revert_nodenet(test_nodenet) + netapi = runtime.nodenets[test_nodenet].netapi node = netapi.get_node(node.uid) netapi.unlink_gate(node, 'por', target_node_uid=pipe1.uid) assert len(node.get_gate('por').get_links()) == 1 assert node.get_gate('por').get_links()[0].target_node.uid == pipe2.uid - micropsi.revert_nodenet(test_nodenet) - netapi = micropsi.nodenets[test_nodenet].netapi + runtime.revert_nodenet(test_nodenet) + netapi = runtime.nodenets[test_nodenet].netapi node = netapi.get_node(node.uid) netapi.unlink_gate(node, 'por', target_slot_name='sur') assert len(node.get_gate('por').get_links()) == 2 # pipe1:gen, pipe2:gen assert len(node.get_gate('sub').get_links()) == 2 # only por->sub unlinked -def test_unlink_slot(test_nodenet): - nodenet = micropsi.get_nodenet(test_nodenet) +def test_unlink_slot(runtime, test_nodenet): + nodenet = runtime.get_nodenet(test_nodenet) netapi = nodenet.netapi node = netapi.create_node("Pipe", None) pipe1 = netapi.create_node("Pipe", None) @@ -1125,28 +1083,72 @@ def test_unlink_slot(test_nodenet): netapi.link(pipe1, 'gen', node, 'por') netapi.link(pipe2, 'gen', node, 'por') netapi.link(pipe1, 'sur', node, 'por') - micropsi.save_nodenet(test_nodenet) + runtime.save_nodenet(test_nodenet) netapi.unlink_slot(node, 'por') assert node.get_slot('por').empty assert not node.get_slot('sur').empty - micropsi.revert_nodenet(test_nodenet) - netapi = micropsi.nodenets[test_nodenet].netapi + runtime.revert_nodenet(test_nodenet) + netapi = runtime.nodenets[test_nodenet].netapi node = netapi.get_node(node.uid) netapi.unlink_slot(node, 'por', source_node_uid=pipe1.uid) assert len(node.get_slot('por').get_links()) == 1 assert node.get_slot('por').get_links()[0].source_node.uid == pipe2.uid - micropsi.revert_nodenet(test_nodenet) - netapi = micropsi.nodenets[test_nodenet].netapi + runtime.revert_nodenet(test_nodenet) + netapi = runtime.nodenets[test_nodenet].netapi node = netapi.get_node(node.uid) netapi.unlink_slot(node, 'por', source_gate_name='sur') assert len(node.get_slot('por').get_links()) == 2 # pipe1:gen, pipe2:gen assert len(node.get_slot('sur').get_links()) == 2 # only sur->por unlinked -def test_nodespace_properties(test_nodenet): - nodenet = micropsi.get_nodenet(test_nodenet) +def test_nodespace_properties(runtime, test_nodenet): + nodenet = runtime.get_nodenet(test_nodenet) netapi = nodenet.netapi rootns = netapi.get_nodespace(None) netapi.set_nodespace_properties(None, {'foo': 'bar'}) data = netapi.get_nodespace_properties() assert data[rootns.uid] == {'foo': 'bar'} + + +@pytest.mark.engine("theano_engine") +def test_group_highdim_elements(runtime, test_nodenet, resourcepath): + import numpy as np + import os + with open(os.path.join(resourcepath, 'nodetypes', 'testnode.py'), 'w') as fp: + fp.write(""" +nodetype_definition = { + "name": "PhatNM", + "slottypes": ["gen", "sub", "sur", "inbound"], + "gatetypes": ["gen", "sub", "sur", "outbound"], + "nodefunction_name": "phatNM", + "symbol": "F", + "dimensionality": { + "gates": { + "outbound": 2 + }, + "slots": { + "inbound": 10 + } + } +} +def phatNM(netapi, node, **_): + pass +""") + + runtime.reload_code() + nodenet = runtime.get_nodenet(test_nodenet) + netapi = nodenet.netapi + node = netapi.create_node("PhatNM", None, 'fatnode') + registers = [] + for i in range(10): + registers.append(netapi.create_node("Neuron", None, 'reg%d' % i)) + netapi.group_nodes_by_names(None, node_name_prefix='reg', gate='gen') + netapi.group_node_slots(node.uid, slot_prefix='inbound', group_name='fat_in') + netapi.set_link_weights(None, 'reg', None, 'fat_in', np.eye(10)) + for i, r in enumerate(registers): + links = r.get_gate('gen').get_links() + assert len(links) == 1 + assert links[0].target_node.uid == node.uid + assert links[0].target_slot.type == 'inbound%d' % i + netapi.group_node_gates(node.uid, 'outbound', group_name='fat_out') + assert np.all(netapi.get_activations(None, 'fat_out') == np.zeros(2)) diff --git a/micropsi_core/tests/test_node_pipe_logic.py b/micropsi_core/tests/test_node_pipe_logic.py index 2789b96c..51e84f55 100644 --- a/micropsi_core/tests/test_node_pipe_logic.py +++ b/micropsi_core/tests/test_node_pipe_logic.py @@ -5,22 +5,19 @@ Tests for node activation propagation and gate arithmetic """ -from micropsi_core import runtime as micropsi - -def prepare(fixed_nodenet): - nodenet = micropsi.get_nodenet(fixed_nodenet) +def prepare(runtime, test_nodenet): + nodenet = runtime.get_nodenet(test_nodenet) netapi = nodenet.netapi - netapi.delete_node(netapi.get_node("n0006")) - source = netapi.create_node("Register", None, "Source") + source = netapi.create_node("Neuron", None, "Source") netapi.link(source, "gen", source, "gen") source.activation = 1 nodenet.step() return nodenet, netapi, source -def add_directional_activators(fixed_nodenet): - net = micropsi.get_nodenet(fixed_nodenet) +def add_directional_activators(runtime, test_nodenet): + net = runtime.get_nodenet(test_nodenet) netapi = net.netapi sub_act = netapi.create_node("Activator", None, "sub-activator") net.get_node(sub_act.uid).set_parameter("type", "sub") @@ -43,9 +40,9 @@ def add_directional_activators(fixed_nodenet): return sub_act, sur_act, por_act, ret_act, cat_act, exp_act -def test_node_pipe_logic_subtrigger(fixed_nodenet): +def test_node_pipe_logic_subtrigger(runtime, test_nodenet): # test a resting classifier, expect sub to be activated - net, netapi, source = prepare(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) n_head = netapi.create_node("Pipe", None, "Head") netapi.link(source, "gen", n_head, "sub", 1) @@ -54,9 +51,9 @@ def test_node_pipe_logic_subtrigger(fixed_nodenet): assert n_head.get_gate("sub").activation == 1 -def test_node_pipe_logic_classifier_two_off(fixed_nodenet): +def test_node_pipe_logic_classifier_two_off(runtime, test_nodenet): # test a resting classifier, expect no activation - net, netapi, source = prepare(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) n_head = netapi.create_node("Pipe", None, "Head") n_a = netapi.create_node("Pipe", None, "A") n_b = netapi.create_node("Pipe", None, "B") @@ -68,9 +65,9 @@ def test_node_pipe_logic_classifier_two_off(fixed_nodenet): assert n_head.get_gate("gen").activation == 0 -def test_node_pipe_logic_classifier_two_partial(fixed_nodenet): +def test_node_pipe_logic_classifier_two_partial(runtime, test_nodenet): # test partial success of a classifier (fuzzyness) - net, netapi, source = prepare(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) n_head = netapi.create_node("Pipe", None, "Head") n_head.set_parameter("expectation", 0) n_a = netapi.create_node("Pipe", None, "A") @@ -81,6 +78,7 @@ def test_node_pipe_logic_classifier_two_partial(fixed_nodenet): netapi.link(n_a, "sur", n_head, "sur", 0.5) netapi.link_with_reciprocal(n_head, n_b, "subsur") netapi.link(n_b, "sur", n_head, "sur", 0.5) + netapi.link(source, "gen", n_head, "sub", 1) netapi.link(source, "gen", n_a, "sur") @@ -95,9 +93,9 @@ def test_node_pipe_logic_classifier_two_partial(fixed_nodenet): assert n_head.get_gate("gen").activation == 1 -def test_node_pipe_logic_classifier_two_partially_failing(fixed_nodenet): +def test_node_pipe_logic_classifier_two_partially_failing(runtime, test_nodenet): # test fuzzyness with one node failing - net, netapi, source = prepare(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) n_head = netapi.create_node("Pipe", None, "Head") n_a = netapi.create_node("Pipe", None, "A") n_b = netapi.create_node("Pipe", None, "B") @@ -105,6 +103,7 @@ def test_node_pipe_logic_classifier_two_partially_failing(fixed_nodenet): netapi.link(n_a, "sur", n_head, "sur", 0.5) netapi.link_with_reciprocal(n_head, n_b, "subsur") netapi.link(n_b, "sur", n_head, "sur", 0.5) + netapi.link(source, "gen", n_head, "sub", 1) netapi.link(source, "gen", n_a, "sur", -1) @@ -119,9 +118,9 @@ def test_node_pipe_logic_classifier_two_partially_failing(fixed_nodenet): assert n_head.get_gate("gen").activation == 0 -def test_node_pipe_logic_classifier_three_off(fixed_nodenet): +def test_node_pipe_logic_classifier_three_off(runtime, test_nodenet): # test a resting classifier, expect no activation - net, netapi, source = prepare(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) n_head = netapi.create_node("Pipe", None, "Head") n_a = netapi.create_node("Pipe", None, "A") n_b = netapi.create_node("Pipe", None, "B") @@ -135,9 +134,9 @@ def test_node_pipe_logic_classifier_three_off(fixed_nodenet): assert n_head.get_gate("gen").activation == 0 -def test_node_pipe_logic_classifier_three_partial(fixed_nodenet): +def test_node_pipe_logic_classifier_three_partial(runtime, test_nodenet): # test partial success of a classifier (fuzzyness) - net, netapi, source = prepare(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) n_head = netapi.create_node("Pipe", None, "Head") n_head.set_parameter("expectation", 0) n_a = netapi.create_node("Pipe", None, "A") @@ -156,6 +155,7 @@ def test_node_pipe_logic_classifier_three_partial(fixed_nodenet): netapi.link_with_reciprocal(n_head, n_c, "subsur") netapi.link(n_c, "sur", n_head, "sur", 1/3) + netapi.link(source, "gen", n_head, "sub", 1) netapi.link(source, "gen", n_a, "sur") for i in range(3): @@ -175,9 +175,9 @@ def test_node_pipe_logic_classifier_three_partial(fixed_nodenet): assert round(n_head.get_gate("gen").activation, 2) == 1 -def test_node_pipe_logic_classifier_three_partially_failing(fixed_nodenet): +def test_node_pipe_logic_classifier_three_partially_failing(runtime, test_nodenet): # test fuzzyness with one node failing - net, netapi, source = prepare(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) n_head = netapi.create_node("Pipe", None, "Head") n_head.set_parameter("expectation", 0) n_a = netapi.create_node("Pipe", None, "A") @@ -193,6 +193,7 @@ def test_node_pipe_logic_classifier_three_partially_failing(fixed_nodenet): netapi.link_with_reciprocal(n_head, n_c, "subsur") netapi.link(n_c, "sur", n_head, "sur", 1/3) + netapi.link(source, "gen", n_head, "sub", 1) netapi.link(source, "gen", n_a, "sur", -1) for i in range(3): @@ -212,9 +213,9 @@ def test_node_pipe_logic_classifier_three_partially_failing(fixed_nodenet): assert round(n_head.get_gate("gen").activation, 2) == round(1 / 3, 2) -def test_node_pipe_logic_two_script(fixed_nodenet): +def test_node_pipe_logic_two_script(runtime, test_nodenet): # test whether scripts work - net, netapi, source = prepare(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) n_head = netapi.create_node("Pipe", None, "Head") n_a = netapi.create_node("Pipe", None, "A") n_a.set_parameter("wait", 100) @@ -263,9 +264,9 @@ def test_node_pipe_logic_two_script(fixed_nodenet): assert round(n_head.get_gate("gen").activation, 2) == 1 -def test_node_pipe_logic_three_script(fixed_nodenet): +def test_node_pipe_logic_three_script(runtime, test_nodenet): # test whether scripts work - net, netapi, source = prepare(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) n_head = netapi.create_node("Pipe", None, "Head") n_a = netapi.create_node("Pipe", None, "A") n_a.set_parameter("wait", 100) @@ -354,9 +355,9 @@ def test_node_pipe_logic_three_script(fixed_nodenet): assert n_head.get_gate("gen").activation == -1 -def test_node_pipe_logic_alternatives(fixed_nodenet): +def test_node_pipe_logic_alternatives(runtime, test_nodenet): # create a script with alternatives, let one fail, one one succeed - net, netapi, source = prepare(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) n_head = netapi.create_node("Pipe", None, "Head") n_a = netapi.create_node("Pipe", None, "A") n_a.set_parameter("wait", 100) @@ -465,9 +466,9 @@ def test_node_pipe_logic_alternatives(fixed_nodenet): assert round(n_head.get_gate("gen").activation, 2) == -1 -def test_node_pipe_logic_timeout_fail(fixed_nodenet): +def test_node_pipe_logic_timeout_fail(runtime, test_nodenet): # test whether scripts work - net, netapi, source = prepare(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) n_head = netapi.create_node("Pipe", None, "Head") n_head.set_parameter("wait", 100) n_a = netapi.create_node("Pipe", None, "A") @@ -526,9 +527,26 @@ def test_node_pipe_logic_timeout_fail(fixed_nodenet): assert round(n_head.get_gate("gen").activation, 2) == -1 -#def test_node_pipe_logic_feature_binding(fixed_nodenet): +def test_node_pipe_unrequested_behaviour(runtime, test_nodenet): + # two possible choices for sur-activation if not requested: + # gen mirrors sur, or gen delivers incoming activation + # current decision: gen mirrors sur, no gen-activation w/o being requested + net, netapi, source = prepare(runtime, test_nodenet) + pipe = netapi.create_node("Pipe", None, "pipe") + netapi.link(source, 'gen', pipe, 'sur') + net.step() + assert round(pipe.get_gate("gen").activation, 2) == 0 + assert round(pipe.get_gate("por").activation, 2) == 0 + assert round(pipe.get_gate("ret").activation, 2) == 0 + assert round(pipe.get_gate("sub").activation, 2) == 0 + assert round(pipe.get_gate("sur").activation, 2) == 0 + assert round(pipe.get_gate("cat").activation, 2) == 0 + assert round(pipe.get_gate("exp").activation, 2) == 1 + + +#def test_node_pipe_logic_feature_binding(runtime, test_nodenet): # # check if the same feature can be checked and bound twice -# net, netapi, source = prepare(fixed_nodenet) +# net, netapi, source = prepare(runtime, test_nodenet) # schema = netapi.create_node("Pipe", None, "Schema") # element1 = netapi.create_node("Pipe", None, "Element1") # element2 = netapi.create_node("Pipe", None, "Element2") @@ -561,54 +579,54 @@ def test_node_pipe_logic_timeout_fail(fixed_nodenet): # assert schema.get_gate("gen").activation == 1 -def test_node_pipe_logic_search_sub(fixed_nodenet): - # check if sub-searches work - net, netapi, source = prepare(fixed_nodenet) - n_a = netapi.create_node("Pipe", None, "A") - n_b = netapi.create_node("Pipe", None, "B") - netapi.link_with_reciprocal(n_a, n_b, "subsur") - - sub_act, sur_act, por_act, ret_act, cat_act, exp_act = add_directional_activators(fixed_nodenet) - netapi.link(source, "gen", sub_act, "gen") - - netapi.link(source, "gen", n_a, "sub") - - net.step() - net.step() - net.step() - - assert round(n_a.get_gate("sub").activation, 2) == 1 - assert round(n_b.get_gate("sub").activation, 2) == 1 - - -def test_node_pipe_logic_search_sur(fixed_nodenet): - # check if sur-searches work - net, netapi, source = prepare(fixed_nodenet) - n_a = netapi.create_node("Pipe", None, "A") - n_b = netapi.create_node("Pipe", None, "B") - netapi.link_with_reciprocal(n_a, n_b, "subsur") - - sub_act, sur_act, por_act, ret_act, cat_act, exp_act = add_directional_activators(fixed_nodenet) - netapi.link(source, "gen", sur_act, "gen") - - netapi.link(source, "gen", n_b, "sur") - - net.step() - net.step() - net.step() - - assert n_b.get_gate("sur").activation > 0 - assert n_a.get_gate("sur").activation > 0 +#def test_node_pipe_logic_search_sub(runtime, test_nodenet): +# # check if sub-searches work +# net, netapi, source = prepare(runtime, test_nodenet) +# n_a = netapi.create_node("Pipe", None, "A") +# n_b = netapi.create_node("Pipe", None, "B") +# netapi.link_with_reciprocal(n_a, n_b, "subsur") +# +# sub_act, sur_act, por_act, ret_act, cat_act, exp_act = add_directional_activators(runtime, test_nodenet) +# netapi.link(source, "gen", sub_act, "gen") +# +# netapi.link(source, "gen", n_a, "sub") +# +# net.step() +# net.step() +# net.step() +# +# assert round(n_a.get_gate("sub").activation, 2) == 1 +# assert round(n_b.get_gate("sub").activation, 2) == 1 +# +# +#def test_node_pipe_logic_search_sur(runtime, test_nodenet): +# # check if sur-searches work +# net, netapi, source = prepare(runtime, test_nodenet) +# n_a = netapi.create_node("Pipe", None, "A") +# n_b = netapi.create_node("Pipe", None, "B") +# netapi.link_with_reciprocal(n_a, n_b, "subsur") +# +# sub_act, sur_act, por_act, ret_act, cat_act, exp_act = add_directional_activators(runtime, test_nodenet) +# netapi.link(source, "gen", sur_act, "gen") +# +# netapi.link(source, "gen", n_b, "sur") +# +# net.step() +# net.step() +# net.step() +# +# assert n_b.get_gate("sur").activation > 0 +# assert n_a.get_gate("sur").activation > 0 -def test_node_pipe_logic_search_por(fixed_nodenet): +def test_node_pipe_logic_search_por(runtime, test_nodenet): # check if por-searches work - net, netapi, source = prepare(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) n_a = netapi.create_node("Pipe", None, "A") n_b = netapi.create_node("Pipe", None, "B") netapi.link_with_reciprocal(n_a, n_b, "porret") - sub_act, sur_act, por_act, ret_act, cat_act, exp_act = add_directional_activators(fixed_nodenet) + sub_act, sur_act, por_act, ret_act, cat_act, exp_act = add_directional_activators(runtime, test_nodenet) netapi.link(source, "gen", por_act, "gen") netapi.link(source, "gen", n_a, "por") @@ -621,14 +639,14 @@ def test_node_pipe_logic_search_por(fixed_nodenet): assert round(n_b.get_gate("por").activation, 2) == 1 -def test_node_pipe_logic_search_ret(fixed_nodenet): +def test_node_pipe_logic_search_ret(runtime, test_nodenet): # check if ret-searches work - net, netapi, source = prepare(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) n_a = netapi.create_node("Pipe", None, "A") n_b = netapi.create_node("Pipe", None, "B") netapi.link_with_reciprocal(n_a, n_b, "porret") - sub_act, sur_act, por_act, ret_act, cat_act, exp_act = add_directional_activators(fixed_nodenet) + sub_act, sur_act, por_act, ret_act, cat_act, exp_act = add_directional_activators(runtime, test_nodenet) netapi.link(source, "gen", ret_act, "gen") netapi.link(source, "gen", n_b, "ret") @@ -641,14 +659,14 @@ def test_node_pipe_logic_search_ret(fixed_nodenet): assert round(n_a.get_gate("ret").activation, 2) == 1 -def test_node_pipe_logic_search_cat(fixed_nodenet): +def test_node_pipe_logic_search_cat(runtime, test_nodenet): # check if cat-searches work - net, netapi, source = prepare(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) n_a = netapi.create_node("Pipe", None, "A") n_b = netapi.create_node("Pipe", None, "B") netapi.link_with_reciprocal(n_a, n_b, "catexp") - sub_act, sur_act, por_act, ret_act, cat_act, exp_act = add_directional_activators(fixed_nodenet) + sub_act, sur_act, por_act, ret_act, cat_act, exp_act = add_directional_activators(runtime, test_nodenet) netapi.link(source, "gen", cat_act, "gen") netapi.link(source, "gen", n_a, "cat") @@ -661,14 +679,14 @@ def test_node_pipe_logic_search_cat(fixed_nodenet): assert round(n_b.get_gate("cat").activation, 2) == 1 -def test_node_pipe_logic_search_exp(fixed_nodenet): +def test_node_pipe_logic_search_exp(runtime, test_nodenet): # check if exp-searches work - net, netapi, source = prepare(fixed_nodenet) + net, netapi, source = prepare(runtime, test_nodenet) n_a = netapi.create_node("Pipe", None, "A") n_b = netapi.create_node("Pipe", None, "B") netapi.link_with_reciprocal(n_a, n_b, "catexp") - sub_act, sur_act, por_act, ret_act, cat_act, exp_act = add_directional_activators(fixed_nodenet) + sub_act, sur_act, por_act, ret_act, cat_act, exp_act = add_directional_activators(runtime, test_nodenet) netapi.link(source, "gen", exp_act, "gen") netapi.link(source, "gen", n_b, "exp") diff --git a/micropsi_core/tests/test_nodenet_partitions.py b/micropsi_core/tests/test_nodenet_partitions.py index b3fbaecc..bb0ef57b 100644 --- a/micropsi_core/tests/test_nodenet_partitions.py +++ b/micropsi_core/tests/test_nodenet_partitions.py @@ -1,13 +1,12 @@ import pytest -from micropsi_core import runtime as micropsi def prepare(netapi, partition_options={}): partition_options.update({'new_partition': True}) nodespace = netapi.create_nodespace(None, name="partition", options=partition_options) - source = netapi.create_node('Register', None, "Source") - register = netapi.create_node('Register', nodespace.uid, "Register") + source = netapi.create_node('Neuron', None, "Source") + register = netapi.create_node('Neuron', nodespace.uid, "Neuron") netapi.link(source, 'gen', register, 'gen') netapi.link(source, 'gen', source, 'gen') source.activation = 1 @@ -15,16 +14,16 @@ def prepare(netapi, partition_options={}): @pytest.mark.engine("theano_engine") -def test_partition_creation(test_nodenet): - nodenet = micropsi.get_nodenet(test_nodenet) +def test_partition_creation(runtime, test_nodenet): + nodenet = runtime.get_nodenet(test_nodenet) netapi = nodenet.netapi netapi.create_nodespace(None, name="partition", options={'new_partition': True}) assert len(nodenet.partitions.keys()) == 2 @pytest.mark.engine("theano_engine") -def test_cross_partition_links(test_nodenet): - nodenet = micropsi.get_nodenet(test_nodenet) +def test_cross_partition_links(runtime, test_nodenet): + nodenet = runtime.get_nodenet(test_nodenet) netapi = nodenet.netapi nodespace, source, register = prepare(netapi) nodenet.step() @@ -48,22 +47,22 @@ def test_cross_partition_links(test_nodenet): @pytest.mark.engine("theano_engine") -def test_partition_persistence(test_nodenet): - nodenet = micropsi.get_nodenet(test_nodenet) +def test_partition_persistence(runtime, test_nodenet): + nodenet = runtime.get_nodenet(test_nodenet) netapi = nodenet.netapi nodespace, source, register = prepare(netapi) - micropsi.save_nodenet(test_nodenet) - micropsi.revert_nodenet(test_nodenet) + runtime.save_nodenet(test_nodenet) + runtime.revert_nodenet(test_nodenet) nodenet.step() assert register.activation == 1 @pytest.mark.engine("theano_engine") -def test_delete_node_deletes_inlinks(test_nodenet): - nodenet = micropsi.get_nodenet(test_nodenet) +def test_delete_node_deletes_inlinks(runtime, test_nodenet): + nodenet = runtime.get_nodenet(test_nodenet) netapi = nodenet.netapi nodespace, source, register = prepare(netapi) - target = netapi.create_node("Register", None, "target") + target = netapi.create_node("Neuron", None, "target") netapi.link(register, 'gen', target, 'gen') netapi.delete_node(register) links = netapi.get_node(source.uid).get_gate('gen').get_links() @@ -75,13 +74,13 @@ def test_delete_node_deletes_inlinks(test_nodenet): @pytest.mark.engine("theano_engine") -def test_delete_node_modifies_inlinks(test_nodenet): - nodenet = micropsi.get_nodenet(test_nodenet) +def test_delete_node_modifies_inlinks(runtime, test_nodenet): + nodenet = runtime.get_nodenet(test_nodenet) netapi = nodenet.netapi nodespace, source, register = prepare(netapi) - target = netapi.create_node("Register", None, "target") + target = netapi.create_node("Neuron", None, "target") - register2 = netapi.create_node("Register", nodespace.uid, "reg2") + register2 = netapi.create_node("Neuron", nodespace.uid, "reg2") netapi.link(register, 'gen', target, 'gen') netapi.link(register2, 'gen', target, 'gen') netapi.link(source, 'gen', register2, 'gen') @@ -97,8 +96,8 @@ def test_delete_node_modifies_inlinks(test_nodenet): @pytest.mark.engine("theano_engine") -def test_grow_partitions(test_nodenet): - nodenet = micropsi.get_nodenet(test_nodenet) +def test_grow_partitions(runtime, test_nodenet): + nodenet = runtime.get_nodenet(test_nodenet) netapi = nodenet.netapi nodespace = netapi.create_nodespace(None, name="partition", options={ "new_partition": True, @@ -122,15 +121,15 @@ def test_grow_partitions(test_nodenet): assert len(partition.allocated_nodespaces) == 4 # step, save, and load the net to make sure all data structures have been grown properly - micropsi.step_nodenet(test_nodenet) - micropsi.save_nodenet(test_nodenet) - micropsi.revert_nodenet(test_nodenet) - micropsi.step_nodenet(test_nodenet) + runtime.step_nodenet(test_nodenet) + runtime.save_nodenet(test_nodenet) + runtime.revert_nodenet(test_nodenet) + runtime.step_nodenet(test_nodenet) @pytest.mark.engine("theano_engine") -def test_announce_nodes(test_nodenet): - nodenet = micropsi.get_nodenet(test_nodenet) +def test_announce_nodes(runtime, test_nodenet): + nodenet = runtime.get_nodenet(test_nodenet) netapi = nodenet.netapi nodespace = netapi.create_nodespace(None, name="partition", options={ "new_partition": True, @@ -158,8 +157,8 @@ def test_announce_nodes(test_nodenet): @pytest.mark.engine("theano_engine") -def test_delete_partition(test_nodenet): - nodenet = micropsi.get_nodenet(test_nodenet) +def test_delete_partition(runtime, test_nodenet): + nodenet = runtime.get_nodenet(test_nodenet) netapi = nodenet.netapi nodespace, source, register = prepare(netapi) netapi.delete_nodespace(nodespace) @@ -169,22 +168,21 @@ def test_delete_partition(test_nodenet): @pytest.mark.engine("theano_engine") -def test_delete_partition_unlinks_native_module(test_nodenet, resourcepath): - nodenet = micropsi.get_nodenet(test_nodenet) +def test_delete_partition_unlinks_native_module(runtime, test_nodenet, resourcepath): + nodenet = runtime.get_nodenet(test_nodenet) netapi = nodenet.netapi nodespace, source, register = prepare(netapi) import os - nodetype_file = os.path.join(resourcepath, 'Test', 'nodetypes.json') - nodefunc_file = os.path.join(resourcepath, 'Test', 'nodefunctions.py') - with open(nodetype_file, 'w') as fp: - fp.write('{"Testnode": {\ - "name": "Testnode",\ - "slottypes": ["gen", "foo", "bar"],\ - "nodefunction_name": "testnodefunc",\ - "gatetypes": ["gen", "foo", "bar"]}}') - with open(nodefunc_file, 'w') as fp: - fp.write("def testnodefunc(netapi, node=None, **prams):\r\n return 17") - micropsi.reload_native_modules() + with open(os.path.join(resourcepath, 'nodetypes', 'Test', 'Testnode.py'), 'w') as fp: + fp.write("""nodetype_definition = { + "name": "Testnode", + "slottypes": ["gen", "foo", "bar"], + "nodefunction_name": "testnodefunc", + "gatetypes": ["gen", "foo", "bar"]} + +def testnodefunc(netapi, node=None, **prams):\r\n return 17 +""") + runtime.reload_code() testnode = netapi.create_node("Testnode", None, "test") netapi.link(testnode, 'foo', register, 'gen') netapi.link(register, 'gen', testnode, 'bar') @@ -194,43 +192,42 @@ def test_delete_partition_unlinks_native_module(test_nodenet, resourcepath): @pytest.mark.engine("theano_engine") -def test_delete_nodespace_unlinks_native_module(test_nodenet, resourcepath): - nodenet = micropsi.get_nodenet(test_nodenet) +def test_delete_nodespace_unlinks_native_module(runtime, test_nodenet, resourcepath): + nodenet = runtime.get_nodenet(test_nodenet) netapi = nodenet.netapi nodespace = netapi.create_nodespace(None, "foo") foopipe = netapi.create_node("Pipe", nodespace.uid, 'foopipe') import os - nodetype_file = os.path.join(resourcepath, 'Test', 'nodetypes.json') - nodefunc_file = os.path.join(resourcepath, 'Test', 'nodefunctions.py') - with open(nodetype_file, 'w') as fp: - fp.write('{"Testnode": {\ - "name": "Testnode",\ - "slottypes": ["gen", "foo", "bar"],\ - "nodefunction_name": "testnodefunc",\ - "gatetypes": ["gen", "foo", "bar"]}}') - with open(nodefunc_file, 'w') as fp: - fp.write("def testnodefunc(netapi, node=None, **prams):\r\n return 17") - micropsi.reload_native_modules() + with open(os.path.join(resourcepath, 'nodetypes', 'Test', 'foo.py'), 'w') as fp: + fp.write("""nodetype_definition = { + "name": "Testnode", + "slottypes": ["gen", "foo", "bar"], + "nodefunction_name": "testnodefunc", + "gatetypes": ["gen", "foo", "bar"] +} +def testnodefunc(netapi, node=None, **prams):\r\n return 17 +""") + runtime.reload_code() testnode = netapi.create_node("Testnode", None, "test") netapi.link(testnode, 'foo', foopipe, 'sub') netapi.link(foopipe, 'sur', testnode, 'bar') - micropsi.save_nodenet(test_nodenet) + runtime.save_nodenet(test_nodenet) # I don't understand why, but this is necessary. - micropsi.revert_nodenet(test_nodenet) + runtime.revert_nodenet(test_nodenet) netapi.delete_nodespace(nodespace) data = netapi.get_node(testnode.uid).get_data(include_links=True) assert data['links'] == {} @pytest.mark.engine("theano_engine") -def test_delete_subnodespace_removes_x_partition_links(test_nodenet, resourcepath): - nodenet = micropsi.get_nodenet(test_nodenet) +def test_delete_subnodespace_removes_x_partition_links(runtime, test_nodenet): + nodenet = runtime.get_nodenet(test_nodenet) netapi = nodenet.netapi nodespace = netapi.create_nodespace(None, "partition", options={'new_partition': True}) subnodespace = netapi.create_nodespace(nodespace.uid, "foo") - r1 = netapi.create_node("Register", None) - r2 = netapi.create_node("Register", subnodespace.uid) - r3 = netapi.create_node("Register", None) + r1 = netapi.create_node("Neuron", None) + r2 = netapi.create_node("Neuron", subnodespace.uid) + r3 = netapi.create_node("Neuron", None) netapi.link(r1, 'gen', r2, 'gen') netapi.link(r2, 'gen', r3, 'gen') netapi.delete_nodespace(subnodespace) @@ -242,60 +239,165 @@ def test_delete_subnodespace_removes_x_partition_links(test_nodenet, resourcepat @pytest.mark.engine("theano_engine") -def test_sensor_actuator_indices(test_nodenet): - nodenet = micropsi.get_nodenet(test_nodenet) +def test_sensor_actuator_indices(runtime, test_nodenet): + nodenet = runtime.get_nodenet(test_nodenet) netapi = nodenet.netapi - result, world_uid = micropsi.new_world('default', 'World') - micropsi.set_nodenet_properties(test_nodenet, worldadapter='Default', world_uid=world_uid) + result, world_uid = runtime.new_world('default', 'DefaultWorld') + runtime.set_nodenet_properties(test_nodenet, worldadapter='Default', world_uid=world_uid) sensor = netapi.create_node("Sensor", None, "static_sensor") sensor.set_parameter("datasource", "static_on") - actor = netapi.create_node("Actor", None, "echo_actor") - actor.set_parameter("datatarget", "echo") - register = netapi.create_node("Register", None, "source") + actuator = netapi.create_node("Actuator", None, "echo_actuator") + actuator.set_parameter("datatarget", "echo") + register = netapi.create_node("Neuron", None, "source") register.activation = 0.8 netapi.link(register, 'gen', register, 'gen', weight=0.5) - netapi.link(register, 'gen', actor, 'gen') + netapi.link(register, 'gen', actuator, 'gen') assert sensor.activation == 0 - assert actor.get_gate('gen').activation == 0 - micropsi.step_nodenet(test_nodenet) - micropsi.step_nodenet(test_nodenet) + assert actuator.get_gate('gen').activation == 0 + runtime.step_nodenet(test_nodenet) + runtime.step_nodenet(test_nodenet) assert sensor.activation == 1 - assert round(actor.get_gate('gen').activation, 3) == 0.8 + assert round(actuator.get_gate('gen').activation, 3) == 0.8 netapi.delete_node(sensor) - netapi.delete_node(actor) - assert set(nodenet.rootpartition.actuator_indices) == {0} - assert set(nodenet.rootpartition.sensor_indices) == {0} + netapi.delete_node(actuator) + assert set(nodenet.rootpartition.actuator_indices) == {-1} + assert set(nodenet.rootpartition.sensor_indices) == {-1} -def test_partition_get_node_data(test_nodenet): - nodenet = micropsi.get_nodenet(test_nodenet) +@pytest.mark.engine("theano_engine") +def test_partition_get_node_data(runtime, test_nodenet): + nodenet = runtime.get_nodenet(test_nodenet) netapi = nodenet.netapi nodespace, source, register = prepare(netapi) + root_ns = netapi.get_nodespace(None).uid nodes = [] + # 10 nodes, first five in root, other five in new nodespace for i in range(10): n = netapi.create_node("Pipe", nodespace.uid if i > 4 else None, "node %d" % i) nodes.append(n) + # 4 links from root to new nodespace for i in range(4): netapi.link(nodes[i], 'gen', nodes[5], 'gen', weight=((i + 2) / 10)) + + # 1 link back netapi.link(nodes[9], 'gen', nodes[4], 'gen', 0.375) + # 3rd nodespace, with a node linked from root third_ns = netapi.create_nodespace(None, "third") - third = netapi.create_node("Register", third_ns.uid, "third") + third = netapi.create_node("Neuron", third_ns.uid, "third") netapi.link(nodes[4], 'gen', third, 'gen') + n1, n3, n4, n5, n9 = nodes[1], nodes[3], nodes[4], nodes[5], nodes[9] + + # assert outlinks/inlinks in get_node + _, data = runtime.get_node(test_nodenet, n1.uid) + assert data['outlinks'] == 1 + assert data['inlinks'] == 0 + _, data = runtime.get_node(test_nodenet, n4.uid) + assert data['outlinks'] == 1 + assert data['inlinks'] == 1 + node_data = nodenet.get_nodes(nodespace_uids=[None])['nodes'] - assert set(node_data.keys()) == set([n.uid for n in nodes[:5]] + [source.uid, register.uid, third.uid] + [nodes[9].uid, nodes[5].uid]) + assert set(node_data.keys()) == set([n.uid for n in nodes[:5]] + [source.uid]) + assert node_data[n1.uid]['outlinks'] == 1 + assert node_data[n4.uid]['outlinks'] == 1 + assert node_data[n4.uid]['links'] == {} node_data = nodenet.get_nodes()['nodes'] - n1, n3, n4, n9 = nodes[1], nodes[3], nodes[4], nodes[9] assert round(node_data[n1.uid]['links']['gen'][0]['weight'], 3) == 0.3 assert round(node_data[n3.uid]['links']['gen'][0]['weight'], 3) == 0.5 assert round(node_data[n9.uid]['links']['gen'][0]['weight'], 3) == 0.375 - # assert node_data[n4.uid]['links'] == {} node_data = nodenet.get_nodes(nodespace_uids=[nodespace.uid])['nodes'] - assert len(node_data.keys()) == 12 - assert node_data[n4.uid]['links'] == {} + assert len(node_data.keys()) == 6 assert third.uid not in node_data + assert node_data[n5.uid]['inlinks'] == 4 + assert node_data[n5.uid]['links'] == {} + assert node_data[n9.uid]['outlinks'] == 1 + assert node_data[n9.uid]['links'] == {} + + data = nodenet.get_nodes(nodespace_uids=[nodespace.uid], links_to_nodespaces=[root_ns]) + assert 'links' in data + source_uids = [l['source_node_uid'] for l in data['links']] + # source->register + our 4 links: + assert set(source_uids) == set(['n0001', 'n0002', 'n0003', 'n0004', 'n0005']) + assert data['nodes'][n9.uid]['links']['gen'][0]['target_node_uid'] == n4.uid + + +@pytest.mark.engine("theano_engine") +def test_get_links_for_nodes_partitions(runtime, test_nodenet): + + def linkid(linkdict): + return "%s:%s:%s:%s" % (linkdict['source_node_uid'], linkdict['source_gate_name'], linkdict['target_slot_name'], linkdict['target_node_uid']) + + nodenet = runtime.get_nodenet(test_nodenet) + netapi = nodenet.netapi + nodespace, source, register = prepare(netapi) + + root_ns = netapi.get_nodespace(None).uid + p0 = netapi.create_node("Pipe", root_ns, "rootpipe") + p1 = netapi.create_node("Pipe", nodespace.uid, "partitionpipe") + netapi.link_with_reciprocal(p0, p1, 'catexp') + + links, nodes = nodenet.get_links_for_nodes([p1.uid]) + assert p0.uid in nodes + assert len(links) == 2 + assert set([linkid(l) for l in links]) == set(["%s:%s:%s:%s" % (p0.uid, 'cat', 'cat', p1.uid), "%s:%s:%s:%s" % (p1.uid, 'exp', 'exp', p0.uid)]) + + +def prepare_linkweight_tests(netapi): + nodespace = netapi.create_nodespace(None, name="partition", options={'new_partition': True}) + rootpipes = [] + partitionpipes = [] + for i in range(3): + rootpipes.append(netapi.create_node("Pipe", None, "rootpipe%d" % i)) + for i in range(5): + partitionpipes.append(netapi.create_node("Pipe", nodespace.uid, "partitionpipe%d" % i)) + netapi.group_nodes_by_names(None, "rootpipe", sortby="name") + netapi.group_nodes_by_names(nodespace.uid, "partitionpipe", sortby="name") + return nodespace, rootpipes, partitionpipes + + +@pytest.mark.engine("theano_engine") +def test_set_link_weights_across_unlinked_partitions(runtime, test_nodenet): + # first case: unlinked partitions: + import numpy as np + nodenet = runtime.get_nodenet(test_nodenet) + netapi = nodenet.netapi + nodespace, rootpipes, partitionpipes = prepare_linkweight_tests(netapi) + weights = netapi.get_link_weights(None, "rootpipe", nodespace.uid, "partitionpipe") + assert np.all(weights == np.zeros((5, 3))) + weights[0, 0] = 0.3 + weights[1, 1] = 0.5 + netapi.set_link_weights(None, "rootpipe", nodespace.uid, "partitionpipe", weights) + data = nodenet.get_nodes() + l0 = data['nodes'][rootpipes[0].uid]['links']['gen'][0] + l1 = data['nodes'][rootpipes[1].uid]['links']['gen'][0] + assert round(l0['weight'], 3) == 0.3 + assert l0['target_node_uid'] == partitionpipes[0].uid + assert round(l1['weight'], 3) == 0.5 + assert l1['target_node_uid'] == partitionpipes[1].uid + + +@pytest.mark.engine("theano_engine") +def test_set_link_weights_across_already_linked_partitions(runtime, test_nodenet): + # second case: already linked partitions: + import numpy as np + nodenet = runtime.get_nodenet(test_nodenet) + netapi = nodenet.netapi + nodespace, rootpipes, partitionpipes = prepare_linkweight_tests(netapi) + netapi.link_with_reciprocal(rootpipes[0], partitionpipes[0], 'subsur') + weights = netapi.get_link_weights(None, "rootpipe", nodespace.uid, "partitionpipe") + assert np.all(weights == np.zeros((5, 3))) + weights[0, 0] = 0.3 + weights[1, 1] = 0.5 + netapi.set_link_weights(None, "rootpipe", nodespace.uid, "partitionpipe", weights) + data = nodenet.get_nodes() + l0 = data['nodes'][rootpipes[0].uid]['links']['gen'][0] + l1 = data['nodes'][rootpipes[1].uid]['links']['gen'][0] + assert round(l0['weight'], 3) == 0.3 + assert l0['target_node_uid'] == partitionpipes[0].uid + assert round(l1['weight'], 3) == 0.5 + assert l1['target_node_uid'] == partitionpipes[1].uid diff --git a/micropsi_core/tests/test_operations.py b/micropsi_core/tests/test_operations.py index b9cef870..2944bdf5 100644 --- a/micropsi_core/tests/test_operations.py +++ b/micropsi_core/tests/test_operations.py @@ -1,11 +1,11 @@ - +import pytest from micropsi_core import runtime def test_user_operation(test_nodenet, resourcepath): import os - os.makedirs(os.path.join(resourcepath, 'foobar')) - with open(os.path.join(resourcepath, 'foobar', 'operations.py'), 'w+') as fp: + os.makedirs(os.path.join(resourcepath, 'operations', 'foobar')) + with open(os.path.join(resourcepath, 'operations', 'foobar', 'somoperation.py'), 'w+') as fp: fp.write(""" def delete_nodes(netapi, selection): for uid in selection: @@ -16,19 +16,24 @@ def delete_nodes(netapi, selection): 'mincount': 1, 'maxcount': -1 }""") - runtime.reload_native_modules() + runtime.reload_code() ops = runtime.get_available_operations() assert ops['delete_nodes']['category'] == 'foobar' - res, uid = runtime.add_node(test_nodenet, "Register", [10, 10], None) + res, uid = runtime.add_node(test_nodenet, "Neuron", [10, 10], None) runtime.run_operation(test_nodenet, "delete_nodes", {}, [uid]) assert uid not in runtime.nodenets[test_nodenet].get_node_uids() def test_autoalign_operation(test_nodenet): ops = runtime.get_available_operations() - assert ops['autoalign']['selection']['nodetypes'] == [] - assert ops['autoalign']['selection']['mincount'] == 1 - assert ops['autoalign']['selection']['maxcount'] == -1 + for selectioninfo in ops['autoalign']['selection']: + if selectioninfo['nodetypes'] == ['Nodespace']: + assert selectioninfo['mincount'] == 1 + assert selectioninfo['maxcount'] == -1 + else: + assert selectioninfo['mincount'] == 2 + assert selectioninfo['maxcount'] == -1 + assert selectioninfo['nodetypes'] == [] assert ops['autoalign']['category'] == 'layout' assert ops['autoalign']['parameters'] == [] @@ -51,4 +56,63 @@ def test_autoalign_operation(test_nodenet): assert p1.position[1] < p2.position[1] assert p2.position[1] == p3.position[1] result, data = runtime.run_operation(test_nodenet, "autoalign", {}, [p1.uid]) - assert 'error' in data \ No newline at end of file + assert 'error' in data + + +@pytest.mark.engine("theano_engine") +def test_add_gate_activation_recorder_operation(test_nodenet): + nodenet = runtime.nodenets[test_nodenet] + netapi = nodenet.netapi + nodes = [] + for i in range(3): + nodes.append(netapi.create_node("Neuron", None, "node%d" % i)) + res, data = runtime.run_operation(test_nodenet, 'add_gate_activation_recorder', { + 'gate': 'gen', + 'interval': 1, + 'name': 'gate_activation_recorder', + }, [n.uid for n in nodes]) + runtime.step_nodenet(test_nodenet) + runtime.get_recorder(test_nodenet, data['uid']).values['activations'].shape == (3) + + +@pytest.mark.engine("theano_engine") +def test_add_node_activation_recorder_operation(test_nodenet): + nodenet = runtime.nodenets[test_nodenet] + netapi = nodenet.netapi + nodes = [] + for i in range(3): + nodes.append(netapi.create_node("Pipe", None, "node%d" % i)) + res, data = runtime.run_operation(test_nodenet, 'add_node_activation_recorder', { + 'interval': 1, + 'name': 'node_activation_recorder', + }, [n.uid for n in nodes]) + runtime.step_nodenet(test_nodenet) + runtime.get_recorder(test_nodenet, data['uid']).values['activations'].shape == (7, 3) + + +@pytest.mark.engine("theano_engine") +def test_add_linkweight_recorder_operation(test_nodenet): + nodenet = runtime.nodenets[test_nodenet] + netapi = nodenet.netapi + nodes1 = [] + nodes2 = [] + for i in range(3): + n1 = netapi.create_node("Neuron", None, "node1%d" % i) + n2 = netapi.create_node("Neuron", None, "node2%d" % i) + n1.position = [i * 20, 20] + n2.position = [i * 20, 40] + nodes1.append(n1) + nodes2.append(n2) + for i in range(3): + for j in range(3): + netapi.link(nodes1[i], 'gen', nodes2[j], 'gen') + + res, data = runtime.run_operation(test_nodenet, 'add_linkweight_recorder', { + 'direction': 'down', + 'from_gate': 'gen', + 'to_slot': 'gen', + 'interval': 1, + 'name': 'linkweight_recorder' + }, [n.uid for n in nodes1] + [n.uid for n in nodes2]) + runtime.step_nodenet(test_nodenet) + runtime.get_recorder(test_nodenet, data['uid']).values['linkweights'].shape == (3, 3) diff --git a/micropsi_core/tests/test_recorders.py b/micropsi_core/tests/test_recorders.py new file mode 100644 index 00000000..ab9415fa --- /dev/null +++ b/micropsi_core/tests/test_recorders.py @@ -0,0 +1,164 @@ + +import os +import pytest + + +@pytest.mark.engine("theano_engine") +def test_activation_recorder(runtime, test_nodenet, resourcepath): + nodenet = runtime.nodenets[test_nodenet] + netapi = nodenet.netapi + nodespace = netapi.get_nodespace(None) + nodes = [] + for i in range(10): + runtime.step_nodenet(test_nodenet) + node = netapi.create_node('Neuron', None, "testnode_%d" % i) + nodes.append(node) + if i > 0: + netapi.link(nodes[i - 1], 'gen', node, 'gen') + source = netapi.create_node("Neuron", None, "Source") + netapi.link(source, 'gen', source, 'gen') + netapi.link(source, 'gen', nodes[0], 'gen') + source.activation = 1 + recorder = netapi.add_gate_activation_recorder(group_definition={'nodespace_uid': nodespace.uid, 'node_name_prefix': 'testnode'}, name="recorder", interval=2) + assert recorder.name == 'recorder' + assert recorder.interval == 2 + for i in range(5): + runtime.step_nodenet(test_nodenet) + assert recorder.first_step == 12 + assert recorder.current_index == 1 + filename = os.path.join(resourcepath, 'recorder.npz') + recorder.save(filename=filename) + assert os.path.isfile(filename) + assert recorder.values['activations'][1].tolist() == [1, 1, 1, 1, 0, 0, 0, 0, 0, 0] + runtime.save_nodenet(test_nodenet) + runtime.revert_nodenet(test_nodenet) + recorder = netapi.get_recorder(recorder.uid) + assert recorder.values['activations'][1].tolist() == [1, 1, 1, 1, 0, 0, 0, 0, 0, 0] + + +@pytest.mark.engine("theano_engine") +def test_nodeactivation_recorder(runtime, test_nodenet): + import numpy as np + nodenet = runtime.nodenets[test_nodenet] + netapi = nodenet.netapi + nodes = [] + source = netapi.create_node("Neuron", None, 'source') + source.activation = 1 + for i in range(10): + node = netapi.create_node('Pipe', None, "testnode_%d" % i) + netapi.link(source, 'gen', node, 'sub') + nodes.append(node) + + recorder = netapi.add_node_activation_recorder(group_definition={'nodespace_uid': None, 'node_name_prefix': 'testnode'}, name="recorder") + + gatecount = len(nodes[0].get_gate_types()) + runtime.step_nodenet(test_nodenet) + values = recorder.values['activations'][0] + + assert values.shape == (gatecount, 10) + assert np.all(values[5] == 1) + assert np.all(values[3] == 1) + assert np.all(values[0] == 0) + + +@pytest.mark.engine("theano_engine") +def test_linkweight_recorder(runtime, test_nodenet): + nodenet = runtime.nodenets[test_nodenet] + netapi = nodenet.netapi + nodespace = netapi.get_nodespace(None) + layer1 = [] + layer2 = [] + for i in range(10): + layer1.append(netapi.create_node('Neuron', None, "l1_%d" % i)) + layer2.append(netapi.create_node('Neuron', None, "l2_%d" % i)) + for i in range(10): + for j in range(10): + netapi.link(layer1[i], 'gen', layer2[j], 'gen', weight=0.89) + + recorder = netapi.add_linkweight_recorder( + from_group_definition={'nodespace_uid': nodespace.uid, 'node_name_prefix': 'l1'}, + to_group_definition={'nodespace_uid': nodespace.uid, 'node_name_prefix': 'l2'}, + name="recorder", interval=1) + + runtime.step_nodenet(test_nodenet) + values = recorder.values + assert set(["%.2f" % item for row in values['linkweights'][0] for item in row]) == {"0.89"} + assert len(values['from_bias'][0]) == 10 + assert len(values['to_bias'][0]) == 10 + runtime.save_nodenet(test_nodenet) + runtime.revert_nodenet(test_nodenet) + recorder = netapi.get_recorder(recorder.uid) + assert set(["%.2f" % item for row in recorder.values['linkweights'][0] for item in row]) == {"0.89"} + assert len(values['from_bias'][0]) == 10 + assert len(values['to_bias'][0]) == 10 + + +@pytest.mark.engine("theano_engine") +def test_clear_recorder(runtime, test_nodenet): + nodenet = runtime.nodenets[test_nodenet] + netapi = nodenet.netapi + nodespace = netapi.get_nodespace(None) + for i in range(5): + netapi.create_node('Neuron', None, "testnode_%d" % i) + recorder = netapi.add_gate_activation_recorder(group_definition={'nodespace_uid': nodespace.uid, 'node_name_prefix': 'testnode'}, name="recorder") + for i in range(3): + runtime.step_nodenet(test_nodenet) + assert len(recorder.values['activations'].tolist()[3]) == 5 + recorder.clear() + assert recorder.values == {} + + +@pytest.mark.engine("theano_engine") +def test_remove_recorder(runtime, test_nodenet): + nodenet = runtime.nodenets[test_nodenet] + netapi = nodenet.netapi + nodespace = netapi.get_nodespace(None) + for i in range(5): + netapi.create_node('Neuron', None, "testnode_%d" % i) + recorder = netapi.add_gate_activation_recorder(group_definition={'nodespace_uid': nodespace.uid, 'node_name_prefix': 'testnode'}, name="recorder") + for i in range(3): + runtime.step_nodenet(test_nodenet) + netapi.remove_recorder(recorder.uid) + assert netapi.get_recorder(recorder.uid) is None + + +@pytest.mark.engine("theano_engine") +def test_grow_recorder_values(runtime, test_nodenet): + from micropsi_core.nodenet.recorder import Recorder + nodenet = runtime.nodenets[test_nodenet] + netapi = nodenet.netapi + nodespace = netapi.get_nodespace(None) + for i in range(5): + netapi.create_node('Neuron', None, "testnode_%d" % i) + Recorder.initial_size = 5 + recorder = netapi.add_gate_activation_recorder(group_definition={'nodespace_uid': nodespace.uid, 'node_name_prefix': 'testnode'}, name="recorder") + runtime.step_nodenet(test_nodenet) + assert len(recorder.values['activations']) == 5 + for i in range(20): + runtime.step_nodenet(test_nodenet) + assert len(recorder.values['activations'] == 25) + + +@pytest.mark.engine("theano_engine") +def test_export_recorders(runtime, test_nodenet): + from micropsi_core.nodenet.recorder import Recorder + import numpy as np + from io import BytesIO + nodenet = runtime.nodenets[test_nodenet] + netapi = nodenet.netapi + for i in range(4): + runtime.step_nodenet(test_nodenet) + nodespace = netapi.get_nodespace(None) + for i in range(5): + netapi.create_node('Neuron', None, "testnode_%d" % i) + Recorder.initial_size = 5 + recorder = netapi.add_gate_activation_recorder(group_definition={'nodespace_uid': nodespace.uid, 'node_name_prefix': 'testnode'}, interval=2, name="recorder") + runtime.step_nodenet(test_nodenet) + runtime.step_nodenet(test_nodenet) + data = runtime.export_recorders(test_nodenet, [recorder.uid]) + stream = BytesIO(data) + loaded = np.load(stream) + assert 'recorder_activations' in loaded + assert 'recorder_meta' in loaded + assert np.all(loaded['recorder_meta'] == [6, 2]) + assert loaded['recorder_activations'][0][0] == 0 diff --git a/micropsi_core/tests/test_runtime.py b/micropsi_core/tests/test_runtime.py index ac7b216b..bc256113 100644 --- a/micropsi_core/tests/test_runtime.py +++ b/micropsi_core/tests/test_runtime.py @@ -6,31 +6,31 @@ """ -from micropsi_core import runtime as micropsi import logging +import pytest -def test_set_logging_level(): +def test_set_logging_level(runtime): assert logging.getLogger('system').getEffectiveLevel() == logging.WARNING - micropsi.set_logging_levels({'system': 'DEBUG', 'world': 'DEBUG', 'agent': 'DEBUG'}) + runtime.set_logging_levels({'system': 'DEBUG', 'world': 'DEBUG', 'agent': 'DEBUG'}) assert logging.getLogger('system').getEffectiveLevel() == logging.DEBUG assert logging.getLogger('world').getEffectiveLevel() == logging.DEBUG - assert micropsi.cfg['logging']['level_agent'] == 'DEBUG' + assert runtime.runtime_config['logging']['level_agent'] == 'DEBUG' -def test_get_logging_levels(): +def test_get_logging_levels(runtime): logging.getLogger('system').setLevel(logging.INFO) logging.getLogger('world').setLevel(logging.WARNING) - res = micropsi.get_logging_levels() + res = runtime.get_logging_levels() assert res['system'] == 'INFO' assert res['world'] == 'WARNING' -def test_get_logger_messages(): +def test_get_logger_messages(runtime): msg = "Attention passengers. The next redline train to braintree is now arriving!" - micropsi.set_logging_levels({'system': 'INFO'}) + runtime.set_logging_levels({'system': 'INFO'}) logging.getLogger('system').info(msg) - res = micropsi.get_logger_messages('system') + res = runtime.get_logger_messages('system') item = res['logs'][-1] assert item['msg'] assert item['logger'] == 'system' @@ -39,96 +39,96 @@ def test_get_logger_messages(): assert item['step'] is None -def test_nodenet_specific_loggers(): - res, uid1 = micropsi.new_nodenet("test1") - res, uid2 = micropsi.new_nodenet("test2") +def test_nodenet_specific_loggers(runtime): + res, uid1 = runtime.new_nodenet("test1") + res, uid2 = runtime.new_nodenet("test2") assert "agent.%s" % uid1 in logging.Logger.manager.loggerDict assert "agent.%s" % uid2 in logging.Logger.manager.loggerDict logging.getLogger("agent.%s" % uid1).warning("hello!") - res = micropsi.get_logger_messages("agent.%s" % uid1) + res = runtime.get_logger_messages("agent.%s" % uid1) item = res['logs'][-1] assert item['msg'] == "hello!" assert item['step'] == 0 -def test_single_agent_mode(): - mode = micropsi.cfg['micropsi2'].get('single_agent_mode') - micropsi.cfg['micropsi2'].update({'single_agent_mode': '1'}) - res, uid1 = micropsi.new_nodenet("test1") - res, uid2 = micropsi.new_nodenet("test2") - assert uid1 not in micropsi.nodenets - micropsi.cfg['micropsi2'].update({'single_agent_mode': mode}) +def test_single_agent_mode(runtime): + mode = runtime.runtime_config['micropsi2'].get('single_agent_mode') + runtime.runtime_config['micropsi2'].update({'single_agent_mode': '1'}) + res, uid1 = runtime.new_nodenet("test1") + res, uid2 = runtime.new_nodenet("test2") + assert uid1 not in runtime.nodenets + runtime.runtime_config['micropsi2'].update({'single_agent_mode': mode}) -def test_unregister_logger(): - res, uid1 = micropsi.new_nodenet("test1") +def test_unregister_logger(runtime): + res, uid1 = runtime.new_nodenet("test1") logging.getLogger("agent.%s" % uid1).warning('hello!') - micropsi.delete_nodenet(uid1) - assert "agent.%s" % uid1 not in micropsi.logger.loggers - assert "agent.%s" % uid1 not in micropsi.logger.record_storage - assert "agent.%s" % uid1 not in micropsi.logger.handlers + runtime.delete_nodenet(uid1) + assert "agent.%s" % uid1 not in runtime.logger.loggers + assert "agent.%s" % uid1 not in runtime.logger.record_storage + assert "agent.%s" % uid1 not in runtime.logger.handlers -def test_get_multiple_logger_messages_are_sorted(): +def test_get_multiple_logger_messages_are_sorted(runtime): from time import sleep logging.getLogger('world').warning('First.') sleep(0.01) logging.getLogger('system').warning('Second') sleep(0.01) logging.getLogger('world').warning('Wat?') - res = micropsi.get_logger_messages(['system', 'world']) + res = runtime.get_logger_messages(['system', 'world']) assert len(res['logs']) == 3 assert res['logs'][0]['logger'] == 'world' assert res['logs'][1]['logger'] == 'system' assert res['logs'][2]['logger'] == 'world' -def test_register_runner_condition_step(test_nodenet): +def test_register_runner_condition_step(runtime, test_nodenet): import time - success, data = micropsi.set_runner_condition(test_nodenet, steps=7) + success, data = runtime.set_runner_condition(test_nodenet, steps=7) assert data['step'] == 7 assert data['step_amount'] == 7 - micropsi.start_nodenetrunner(test_nodenet) - assert micropsi.nodenets[test_nodenet].is_active + runtime.start_nodenetrunner(test_nodenet) + assert runtime.nodenets[test_nodenet].is_active time.sleep(1) - assert micropsi.nodenets[test_nodenet].current_step == 7 - assert not micropsi.nodenets[test_nodenet].is_active + assert runtime.nodenets[test_nodenet].current_step == 7 + assert not runtime.nodenets[test_nodenet].is_active # test that the condition stays active. - micropsi.start_nodenetrunner(test_nodenet) - assert micropsi.nodenets[test_nodenet].is_active + runtime.start_nodenetrunner(test_nodenet) + assert runtime.nodenets[test_nodenet].is_active time.sleep(1) - assert micropsi.nodenets[test_nodenet].current_step == 14 - assert not micropsi.nodenets[test_nodenet].is_active + assert runtime.nodenets[test_nodenet].current_step == 14 + assert not runtime.nodenets[test_nodenet].is_active -def test_register_runner_condition_monitor(test_nodenet): +def test_register_runner_condition_monitor(runtime, test_nodenet): import time - nn = micropsi.nodenets[test_nodenet] - node = nn.netapi.create_node('Register', None) + nn = runtime.nodenets[test_nodenet] + node = nn.netapi.create_node('Neuron', None) nn.netapi.link(node, 'gen', node, 'gen', weight=2) node.activation = 0.1 - uid = micropsi.add_gate_monitor(test_nodenet, node.uid, 'gen') - micropsi.set_runner_condition(test_nodenet, monitor={ + uid = runtime.add_gate_monitor(test_nodenet, node.uid, 'gen') + runtime.set_runner_condition(test_nodenet, monitor={ 'uid': uid, 'value': 0.8 }) - micropsi.start_nodenetrunner(test_nodenet) - assert micropsi.nodenets[test_nodenet].is_active + runtime.start_nodenetrunner(test_nodenet) + assert runtime.nodenets[test_nodenet].is_active time.sleep(1) - assert not micropsi.nodenets[test_nodenet].is_active - assert micropsi.nodenets[test_nodenet].current_step == 3 + assert not runtime.nodenets[test_nodenet].is_active + assert runtime.nodenets[test_nodenet].current_step == 3 assert round(nn.get_node(node.uid).get_gate('gen').activation, 4) == 0.8 -def test_runner_condition_persists(test_nodenet): - micropsi.set_runner_condition(test_nodenet, steps=7) - micropsi.save_nodenet(test_nodenet) - micropsi.revert_nodenet(test_nodenet) - assert micropsi.nodenets[test_nodenet].get_runner_condition()['step'] == 7 +def test_runner_condition_persists(runtime, test_nodenet): + runtime.set_runner_condition(test_nodenet, steps=7) + runtime.save_nodenet(test_nodenet) + runtime.revert_nodenet(test_nodenet) + assert runtime.nodenets[test_nodenet].get_runner_condition()['step'] == 7 -def test_get_links_for_nodes(test_nodenet, node): - api = micropsi.nodenets[test_nodenet].netapi +def test_get_links_for_nodes(runtime, test_nodenet, node): + api = runtime.nodenets[test_nodenet].netapi ns = api.create_nodespace(None) node = api.get_node(node) pipe1 = api.create_node("Pipe", ns.uid, "pipe1") @@ -136,7 +136,7 @@ def test_get_links_for_nodes(test_nodenet, node): pipe3 = api.create_node("Pipe", ns.uid, "pipe3") api.link(node, 'gen', pipe1, 'gen') api.link(pipe2, 'sub', node, 'sub') - data = micropsi.get_links_for_nodes(test_nodenet, [node.uid]) + data = runtime.get_links_for_nodes(test_nodenet, [node.uid]) assert len(data['links']) == 3 # node has a genloop assert len(data['nodes'].values()) == 2 assert pipe1.uid in data['nodes'] @@ -144,33 +144,38 @@ def test_get_links_for_nodes(test_nodenet, node): assert pipe3.uid not in data['nodes'] -def test_create_nodenet_from_template(test_nodenet, node, engine): - mode = micropsi.cfg['micropsi2'].get('single_agent_mode') - micropsi.cfg['micropsi2'].update({'single_agent_mode': '1'}) - api = micropsi.nodenets[test_nodenet].netapi +def test_create_nodenet_from_template(runtime, test_nodenet, node, engine): + mode = runtime.runtime_config['micropsi2'].get('single_agent_mode') + runtime.runtime_config['micropsi2'].update({'single_agent_mode': '1'}) + api = runtime.nodenets[test_nodenet].netapi node1 = api.get_node(node) - node2 = api.create_node("Register", None, "node2") + node2 = api.create_node("Neuron", None, "node2") api.link(node1, 'gen', node2, 'gen') - micropsi.save_nodenet(test_nodenet) - result, uid = micropsi.new_nodenet('copynet', engine=engine, template=test_nodenet) - data = micropsi.get_nodes(uid) + runtime.save_nodenet(test_nodenet) + result, uid = runtime.new_nodenet('copynet', engine=engine, template=test_nodenet) + data = runtime.get_nodes(uid) for uid, n in data['nodes'].items(): if n['name'] == node1.name: assert len(n['links']['gen']) == 2 else: assert n['name'] == node2.name - micropsi.cfg['micropsi2'].update({'single_agent_mode': mode}) + runtime.runtime_config['micropsi2'].update({'single_agent_mode': mode}) -def test_export_json_does_not_send_duplicate_links(fixed_nodenet): +def test_export_json_does_not_send_duplicate_links(runtime, test_nodenet): import json - result = json.loads(micropsi.export_nodenet(fixed_nodenet)) - assert len(result['links']) == 4 + _, uid1 = runtime.add_node(test_nodenet, "Neuron", [10, 10], None) + _, uid2 = runtime.add_node(test_nodenet, "Neuron", [20, 20], None) + runtime.add_link(test_nodenet, uid1, 'gen', uid2, 'gen') + runtime.add_link(test_nodenet, uid1, 'gen', uid2, 'gen') + runtime.add_link(test_nodenet, uid2, 'gen', uid1, 'gen') + result = json.loads(runtime.export_nodenet(test_nodenet)) + assert len(result['links']) == 2 -def test_generate_netapi_fragment(test_nodenet, resourcepath): +def test_generate_netapi_fragment(runtime, test_nodenet, engine, resourcepath): import os - netapi = micropsi.nodenets[test_nodenet].netapi + netapi = runtime.nodenets[test_nodenet].netapi # create a bunch of nodes and link them linktypes = ['subsur', 'porret', 'catexp'] nodes = [] @@ -179,26 +184,27 @@ def test_generate_netapi_fragment(test_nodenet, resourcepath): p2 = netapi.create_node('Pipe', None, t + '2') nodes.extend([p1, p2]) netapi.link_with_reciprocal(p1, p2, t) - reg = netapi.create_node('Register', None, 'reg') + reg = netapi.create_node('Neuron', None, 'reg') + reg.set_gate_configuration('gen', 'threshold', {'amplification': 2}) netapi.link(reg, 'gen', nodes[0], 'gen') - ns = netapi.create_nodespace(None, 'ns1') - nodes.extend([reg, ns]) + nodes.append(reg) # remember their names names = [n.name for n in nodes] - fragment = micropsi.generate_netapi_fragment(test_nodenet, [n.uid for n in nodes]) - micropsi.nodenets[test_nodenet].clear() + fragment = runtime.generate_netapi_fragment(test_nodenet, [n.uid for n in nodes]) + res, pastenet = runtime.new_nodenet('pastnet', engine) code = "def foo(netapi):\n " + "\n ".join(fragment.split('\n')) # save the fragment as recipe & run - with open(os.path.join(resourcepath, 'recipes.py'), 'w+') as fp: + with open(os.path.join(resourcepath, 'recipes', 'test.py'), 'w+') as fp: fp.write(code) - micropsi.reload_native_modules() - micropsi.run_recipe(test_nodenet, 'foo', {}) + runtime.reload_code() + runtime.run_recipe(pastenet, 'foo', {}) + pastnetapi = runtime.get_nodenet(pastenet).netapi # assert that all the nodes are there again - assert set(names) == set([n.name for n in netapi.get_nodes()] + ['ns1']) + assert set(names) == set([n.name for n in pastnetapi.get_nodes()]) -def test_get_nodes(test_nodenet): - nodenet = micropsi.nodenets[test_nodenet] +def test_get_nodes(runtime, engine, test_nodenet): + nodenet = runtime.nodenets[test_nodenet] netapi = nodenet.netapi ns1 = netapi.create_nodespace(None, "ns1") ns2 = netapi.create_nodespace(None, "ns2") @@ -206,53 +212,58 @@ def test_get_nodes(test_nodenet): n1 = netapi.create_node("Pipe", ns1.uid, "n1") n2 = netapi.create_node("Pipe", ns2.uid, "n2") n3 = netapi.create_node("Pipe", ns3.uid, "n3") - result = micropsi.get_nodes(test_nodenet) + result = runtime.get_nodes(test_nodenet) rootuid = nodenet.get_nodespace(None).uid assert set(result['nodes'].keys()) == {n1.uid, n2.uid, n3.uid} assert set(result['nodespaces'].keys()) == {rootuid, ns1.uid, ns2.uid, ns3.uid} - result = micropsi.get_nodes(test_nodenet, [None]) + result = runtime.get_nodes(test_nodenet, [None]) assert result['nodes'] == {} assert set(result['nodespaces'].keys()) == {ns1.uid, ns2.uid} - result = micropsi.get_nodes(test_nodenet, [ns1.uid]) + result = runtime.get_nodes(test_nodenet, [ns1.uid]) assert set(result['nodes'].keys()) == {n1.uid} assert set(result['nodespaces'].keys()) == {ns3.uid} + if engine == "dict_engine": + # test with followupnodes: + netapi.link_with_reciprocal(n1, n2, 'subsur') + result = runtime.get_nodes(test_nodenet, [ns1.uid]) + assert n2.uid in result['nodes'] -def test_run_netapi_command(test_nodenet): - nodenet = micropsi.nodenets[test_nodenet] +def test_run_netapi_command(runtime, test_nodenet): + nodenet = runtime.nodenets[test_nodenet] netapi = nodenet.netapi command = "foo = netapi.create_node('Pipe', None, 'foo')" - result, _ = micropsi.run_netapi_command(test_nodenet, command) + result, _ = runtime.run_netapi_command(test_nodenet, command) assert result command = "netapi.link(foo, 'gen', foo, 'gen')" - result, _ = micropsi.run_netapi_command(test_nodenet, command) + result, _ = runtime.run_netapi_command(test_nodenet, command) assert result nodes = netapi.get_nodes() assert len(nodes) == 1 assert nodes[0].get_gate('gen').get_links()[0].target_node == nodes[0] command = "netapi.get_node('%s')" % nodes[0].uid - result, node = micropsi.run_netapi_command(test_nodenet, command) + result, node = runtime.run_netapi_command(test_nodenet, command) assert node == str(nodes[0]) command = "[n.name for n in netapi.get_nodes()]" - result, node = micropsi.run_netapi_command(test_nodenet, command) + result, node = runtime.run_netapi_command(test_nodenet, command) assert node == "['foo']" command = "netapi.create_node()" - result, msg = micropsi.run_netapi_command(test_nodenet, command) + result, msg = runtime.run_netapi_command(test_nodenet, command) assert not result assert msg.startswith("TypeError") - command = "for i in range(3): netapi.create_node('Register', None, 'test%d' % i)" - result, msg = micropsi.run_netapi_command(test_nodenet, command) + command = "for i in range(3): netapi.create_node('Neuron', None, 'test%d' % i)" + result, msg = runtime.run_netapi_command(test_nodenet, command) assert result assert len(netapi.get_nodes()) == 4 -def test_get_netapi_autocomplete(test_nodenet): - micropsi.run_netapi_command(test_nodenet, "foonode = netapi.create_node('Pipe', None, 'foo')") - micropsi.run_netapi_command(test_nodenet, "foogate = foonode.get_gate('gen')") - micropsi.run_netapi_command(test_nodenet, "fooslot = foonode.get_slot('gen')") - micropsi.run_netapi_command(test_nodenet, "nodespace = netapi.create_nodespace(None, 'foospace')") - micropsi.run_netapi_command(test_nodenet, "barnode = netapi.create_node('Register', None, 'foo')") - data = micropsi.get_netapi_autocomplete_data(test_nodenet) +def test_get_netapi_autocomplete(runtime, test_nodenet): + runtime.run_netapi_command(test_nodenet, "foonode = netapi.create_node('Pipe', None, 'foo')") + runtime.run_netapi_command(test_nodenet, "foogate = foonode.get_gate('gen')") + runtime.run_netapi_command(test_nodenet, "fooslot = foonode.get_slot('gen')") + runtime.run_netapi_command(test_nodenet, "nodespace = netapi.create_nodespace(None, 'foospace')") + runtime.run_netapi_command(test_nodenet, "barnode = netapi.create_node('Neuron', None, 'foo')") + data = runtime.get_netapi_autocomplete_data(test_nodenet) data['types']['foonode'] = 'Node' data['types']['foogate'] = 'Gate' data['types']['fooslot'] = 'Slot' @@ -263,6 +274,23 @@ def test_get_netapi_autocomplete(test_nodenet): assert data['autocomplete_options']['Slot']["get_links"] == [] assert data['autocomplete_options']['Nodespace']["get_known_ids"][0]['name'] == 'entitytype' assert data['autocomplete_options']['Node']['name'] is None - data = micropsi.get_netapi_autocomplete_data(test_nodenet, name='foonode') + data = runtime.get_netapi_autocomplete_data(test_nodenet, name='foonode') assert list(data['types'].keys()) == ['foonode'] assert list(data['autocomplete_options'].keys()) == ['Node'] + + +def test_get_nodenet_by_name(runtime, test_nodenet): + assert runtime.get_nodenet_uid_by_name("Foobar") is None + assert runtime.get_nodenet_uid_by_name("Testnet") == test_nodenet + + +@pytest.mark.engine("theano_engine") +def test_system_benchmark(runtime, test_nodenet): + from micropsi_core.benchmark_system import benchmark_system + result = benchmark_system(n=10, repeat=1) + assert "numpy version" in result + assert "scipy version" in result + assert "theano version" in result + assert "numpy dot" in result + assert "scipy dot" in result + assert "theano dot" in result diff --git a/micropsi_core/tests/test_runtime_monitors.py b/micropsi_core/tests/test_runtime_monitors.py index b8342804..4e9de67a 100644 --- a/micropsi_core/tests/test_runtime_monitors.py +++ b/micropsi_core/tests/test_runtime_monitors.py @@ -5,128 +5,148 @@ Basic tests for monitor api """ import pytest -from micropsi_core import runtime as micropsi -def test_add_gate_monitor(fixed_nodenet): - uid = micropsi.add_gate_monitor(fixed_nodenet, 'n0001', 'gen', sheaf='default') - monitor = micropsi.nodenets[fixed_nodenet].get_monitor(uid) - assert monitor.name == 'gate gen @ Node A1' - assert monitor.node_uid == 'n0001' +def prepare(runtime, test_nodenet): + net = runtime.nodenets[test_nodenet] + netapi = net.netapi + source = netapi.create_node("Neuron", None, "source") + register = netapi.create_node("Neuron", None, "reg") + netapi.link(source, 'gen', source, 'gen') + netapi.link(source, 'gen', register, 'gen') + return net, netapi, source, register + + +def test_add_gate_monitor(runtime, test_nodenet): + net, netapi, source, _ = prepare(runtime, test_nodenet) + uid = runtime.add_gate_monitor(test_nodenet, source.uid, 'gen') + monitor = net.get_monitor(uid) + assert monitor.name == 'gate gen @ Node source' + assert monitor.node_uid == source.uid assert monitor.target == 'gen' assert monitor.type == 'gate' - assert monitor.sheaf == 'default' assert monitor.color.startswith('#') assert len(monitor.values) == 0 - micropsi.step_nodenet(fixed_nodenet) - monitor = micropsi.nodenets[fixed_nodenet].get_monitor(uid) + runtime.step_nodenet(test_nodenet) + monitor = net.get_monitor(uid) assert len(monitor.values) == 1 @pytest.mark.engine("dict_engine") -def test_add_slot_monitor(fixed_nodenet): - uid = micropsi.add_slot_monitor(fixed_nodenet, 'n0001', 'gen', name="FooBarMonitor", color="#112233") - monitor = micropsi.nodenets[fixed_nodenet].get_monitor(uid) +def test_add_slot_monitor(runtime, test_nodenet): + net, netapi, source, _ = prepare(runtime, test_nodenet) + uid = runtime.add_slot_monitor(test_nodenet, source.uid, 'gen', name="FooBarMonitor", color="#112233") + monitor = net.get_monitor(uid) assert monitor.name == 'FooBarMonitor' - assert monitor.node_uid == 'n0001' + assert monitor.node_uid == source.uid assert monitor.target == 'gen' assert monitor.type == 'slot' assert monitor.color == '#112233' assert len(monitor.values) == 0 - micropsi.step_nodenet(fixed_nodenet) - monitor = micropsi.nodenets[fixed_nodenet].get_monitor(uid) + runtime.step_nodenet(test_nodenet) + monitor = net.get_monitor(uid) assert len(monitor.values) == 1 -def test_add_link_monitor(fixed_nodenet): - uid = micropsi.add_link_monitor(fixed_nodenet, 'n0005', 'gen', 'n0003', 'gen', 'weight', 'Testmonitor', color="#112233") - monitor = micropsi.nodenets[fixed_nodenet].get_monitor(uid) +def test_add_link_monitor(runtime, test_nodenet): + net, netapi, source, register = prepare(runtime, test_nodenet) + uid = runtime.add_link_monitor(test_nodenet, source.uid, 'gen', register.uid, 'gen', 'Testmonitor', color="#112233") + monitor = net.get_monitor(uid) assert monitor.name == 'Testmonitor' - assert monitor.property == 'weight' - assert monitor.source_node_uid == 'n0005' - assert monitor.target_node_uid == 'n0003' + assert monitor.source_node_uid == source.uid + assert monitor.target_node_uid == register.uid assert monitor.gate_type == 'gen' assert monitor.slot_type == 'gen' assert monitor.color == "#112233" assert len(monitor.values) == 0 - micropsi.step_nodenet(fixed_nodenet) - monitor = micropsi.nodenets[fixed_nodenet].get_monitor(uid) + runtime.step_nodenet(test_nodenet) + monitor = net.get_monitor(uid) assert round(monitor.values[1], 2) == 1 - micropsi.nodenets[fixed_nodenet].set_link_weight('n0005', 'gen', 'n0003', 'gen', weight=0.7) - micropsi.step_nodenet(fixed_nodenet) - monitor = micropsi.nodenets[fixed_nodenet].get_monitor(uid) + net.set_link_weight(source.uid, 'gen', register.uid, 'gen', weight=0.7) + runtime.step_nodenet(test_nodenet) + monitor = net.get_monitor(uid) assert len(monitor.values) == 2 assert round(monitor.values[2], 2) == 0.7 -def test_add_modulator_monitor(fixed_nodenet): - uid = micropsi.add_modulator_monitor(fixed_nodenet, 'base_test', 'Testmonitor', color="#112233") - monitor = micropsi.nodenets[fixed_nodenet].get_monitor(uid) +def test_add_modulator_monitor(runtime, test_nodenet): + net, netapi, source, register = prepare(runtime, test_nodenet) + uid = runtime.add_modulator_monitor(test_nodenet, 'base_test', 'Testmonitor', color="#112233") + monitor = net.get_monitor(uid) assert monitor.name == 'Testmonitor' assert monitor.modulator == 'base_test' assert monitor.color == "#112233" assert len(monitor.values) == 0 - micropsi.step_nodenet(fixed_nodenet) - monitor = micropsi.nodenets[fixed_nodenet].get_monitor(uid) + runtime.step_nodenet(test_nodenet) + monitor = net.get_monitor(uid) assert monitor.values[1] == 1 - micropsi.nodenets[fixed_nodenet].set_modulator('base_test', 0.7) - micropsi.step_nodenet(fixed_nodenet) - monitor = micropsi.nodenets[fixed_nodenet].get_monitor(uid) + net.set_modulator('base_test', 0.7) + runtime.step_nodenet(test_nodenet) + monitor = net.get_monitor(uid) assert len(monitor.values) == 2 assert monitor.values[2] == 0.7 -def test_add_custom_monitor(fixed_nodenet): +def test_add_custom_monitor(runtime, test_nodenet): + net, netapi, source, register = prepare(runtime, test_nodenet) code = """return len(netapi.get_nodes())""" - uid = micropsi.add_custom_monitor(fixed_nodenet, code, 'Nodecount', color="#112233") - monitor = micropsi.nodenets[fixed_nodenet].get_monitor(uid) + uid = runtime.add_custom_monitor(test_nodenet, code, 'Nodecount', color="#112233") + monitor = net.get_monitor(uid) assert monitor.name == 'Nodecount' assert monitor.compiled_function is not None assert monitor.function == code assert monitor.color == "#112233" assert len(monitor.values) == 0 - micropsi.step_nodenet(fixed_nodenet) - monitor = micropsi.nodenets[fixed_nodenet].get_monitor(uid) + runtime.step_nodenet(test_nodenet) + monitor = net.get_monitor(uid) assert len(monitor.values) == 1 - assert monitor.values[1] == len(micropsi.nodenets[fixed_nodenet].netapi.get_nodes()) + assert monitor.values[1] == len(net.netapi.get_nodes()) -def test_remove_monitor(fixed_nodenet): - uid = micropsi.add_gate_monitor(fixed_nodenet, 'n0001', 'gen') - assert micropsi.nodenets[fixed_nodenet].get_monitor(uid) is not None - micropsi.remove_monitor(fixed_nodenet, uid) - monitor = micropsi.nodenets[fixed_nodenet].get_monitor(uid) +def test_remove_monitor(runtime, test_nodenet): + net, netapi, source, register = prepare(runtime, test_nodenet) + uid = runtime.add_gate_monitor(test_nodenet, source.uid, 'gen') + assert net.get_monitor(uid) is not None + runtime.remove_monitor(test_nodenet, uid) + monitor = net.get_monitor(uid) assert monitor is None -def test_remove_monitored_node(fixed_nodenet): - uid = micropsi.add_gate_monitor(fixed_nodenet, 'n0001', 'gen', sheaf='default') - micropsi.delete_nodes(fixed_nodenet, ['n0001']) - micropsi.step_nodenet(fixed_nodenet) - monitor = micropsi.export_monitor_data(fixed_nodenet) - assert monitor[uid]['values'][1] is None - - -def test_remove_monitored_link(fixed_nodenet): - uid = micropsi.add_link_monitor(fixed_nodenet, 'n0005', 'gen', 'n0003', 'gen', 'weight', 'Testmonitor') - micropsi.delete_link(fixed_nodenet, 'n0005', 'gen', 'n0003', 'gen') - micropsi.step_nodenet(fixed_nodenet) - monitor = micropsi.export_monitor_data(fixed_nodenet) - assert monitor[uid]['values'][1] is None - - -def test_remove_monitored_link_via_delete_node(fixed_nodenet): - uid = micropsi.add_link_monitor(fixed_nodenet, 'n0005', 'gen', 'n0003', 'gen', 'weight', 'Testmonitor') - micropsi.delete_nodes(fixed_nodenet, ['n0005']) - micropsi.step_nodenet(fixed_nodenet) - monitor = micropsi.export_monitor_data(fixed_nodenet) - assert monitor[uid]['values'][1] is None - - -def test_get_monitor_data(fixed_nodenet): - uid = micropsi.add_gate_monitor(fixed_nodenet, 'n0001', 'gen', name="Testmonitor") - micropsi.step_nodenet(fixed_nodenet) - data = micropsi.get_monitor_data(fixed_nodenet) +def test_remove_monitored_node(runtime, test_nodenet): + net, netapi, source, register = prepare(runtime, test_nodenet) + nodenet = runtime.nodenets[test_nodenet] + uid = runtime.add_gate_monitor(test_nodenet, source.uid, 'gen') + runtime.delete_nodes(test_nodenet, [source.uid]) + runtime.step_nodenet(test_nodenet) + monitor = nodenet.get_monitor(uid) + assert monitor.values[1] is None + + +def test_remove_monitored_link(runtime, test_nodenet): + net, netapi, source, register = prepare(runtime, test_nodenet) + nodenet = runtime.nodenets[test_nodenet] + uid = runtime.add_link_monitor(test_nodenet, source.uid, 'gen', register.uid, 'gen', 'Testmonitor') + runtime.delete_link(test_nodenet, source.uid, 'gen', register.uid, 'gen') + runtime.step_nodenet(test_nodenet) + monitor = nodenet.get_monitor(uid) + assert monitor.values[1] is None + + +def test_remove_monitored_link_via_delete_node(runtime, test_nodenet): + net, netapi, source, register = prepare(runtime, test_nodenet) + nodenet = runtime.nodenets[test_nodenet] + uid = runtime.add_link_monitor(test_nodenet, source.uid, 'gen', register.uid, 'gen', 'Testmonitor') + runtime.delete_nodes(test_nodenet, [register.uid]) + runtime.step_nodenet(test_nodenet) + monitor = nodenet.get_monitor(uid) + assert monitor.values[1] is None + + +def test_get_monitor_data(runtime, test_nodenet): + net, netapi, source, register = prepare(runtime, test_nodenet) + uid = runtime.add_gate_monitor(test_nodenet, source.uid, 'gen', name="Testmonitor") + runtime.step_nodenet(test_nodenet) + data = runtime.get_monitor_data(test_nodenet) assert data['current_step'] == 1 assert data['monitors'][uid]['name'] == 'Testmonitor' values = data['monitors'][uid]['values'] @@ -134,68 +154,123 @@ def test_get_monitor_data(fixed_nodenet): assert [k for k in values.keys()] == [1] -def test_export_monitor_data(fixed_nodenet): - uid1 = micropsi.add_gate_monitor(fixed_nodenet, 'n0001', 'gen') - uid2 = micropsi.add_gate_monitor(fixed_nodenet, 'n0003', 'gen') - micropsi.step_nodenet(fixed_nodenet) - data = micropsi.export_monitor_data(fixed_nodenet) - assert uid1 in data - assert 'values' in data[uid1] - assert uid2 in data - - -def test_export_monitor_data_with_id(fixed_nodenet): - uid1 = micropsi.add_gate_monitor(fixed_nodenet, 'n0001', 'gen', name="Testmonitor") - micropsi.add_gate_monitor(fixed_nodenet, 'n0003', 'gen') - micropsi.step_nodenet(fixed_nodenet) - data = micropsi.export_monitor_data(fixed_nodenet, monitor_uid=uid1) - assert data['name'] == 'Testmonitor' - assert 'values' in data - - -def test_clear_monitor(fixed_nodenet): - uid = micropsi.add_gate_monitor(fixed_nodenet, 'n0001', 'gen') - micropsi.step_nodenet(fixed_nodenet) - micropsi.clear_monitor(fixed_nodenet, uid) - data = micropsi.get_monitor_data(fixed_nodenet) +def test_clear_monitor(runtime, test_nodenet): + net, netapi, source, register = prepare(runtime, test_nodenet) + uid = runtime.add_gate_monitor(test_nodenet, source.uid, 'gen') + runtime.step_nodenet(test_nodenet) + runtime.clear_monitor(test_nodenet, uid) + data = runtime.get_monitor_data(test_nodenet) values = data['monitors'][uid]['values'] assert len(values.keys()) == 0 -def test_fetch_partial_monitor_data(fixed_nodenet): - uid = micropsi.add_gate_monitor(fixed_nodenet, 'n0001', 'gen') +def test_get_partial_monitor_data(runtime, test_nodenet): + net, netapi, source, register = prepare(runtime, test_nodenet) + uid = runtime.add_gate_monitor(test_nodenet, source.uid, 'gen') i = 0 while i < 50: - micropsi.step_nodenet(fixed_nodenet) + runtime.step_nodenet(test_nodenet) i += 1 - assert micropsi.nodenets[fixed_nodenet].current_step == 50 + nodenet = runtime.nodenets[test_nodenet] + assert nodenet.current_step == 50 # get 10 items from [20 - 29] - data = micropsi.export_monitor_data(fixed_nodenet, monitor_from=20, monitor_count=10) - values = data[uid]['values'] - assert len(values.keys()) == 10 - assert set(list(values.keys())) == set(range(20, 30)) - - # get 10 items from [20 - 29] for one monitor - data = micropsi.export_monitor_data(fixed_nodenet, monitor_uid=uid, monitor_from=20, monitor_count=10) - values = data['values'] + data = runtime.get_monitor_data(test_nodenet, from_step=20, count=10) + values = data['monitors'][uid]['values'] assert len(values.keys()) == 10 assert set(list(values.keys())) == set(range(20, 30)) # get 10 newest values [41-50] - data = micropsi.export_monitor_data(fixed_nodenet, monitor_count=10) - values = data[uid]['values'] + data = runtime.get_monitor_data(test_nodenet, count=10) + values = data['monitors'][uid]['values'] assert len(values.keys()) == 10 assert set(list(values.keys())) == set(range(41, 51)) # get 10 items, starting at 45 -- assert they are filled up to the left. - data = micropsi.export_monitor_data(fixed_nodenet, monitor_from=40, monitor_count=15) - values = data[uid]['values'] + data = runtime.get_monitor_data(test_nodenet, from_step=40, count=15) + values = data['monitors'][uid]['values'] assert len(values.keys()) == 15 assert set(list(values.keys())) == set(range(36, 51)) # get all items, starting at 10 - data = micropsi.export_monitor_data(fixed_nodenet, monitor_from=10) - values = data[uid]['values'] + data = runtime.get_monitor_data(test_nodenet, from_step=10) + values = data['monitors'][uid]['values'] assert len(values.keys()) == 41 assert set(list(values.keys())) == set(range(10, 51)) + + +def test_add_group_monitor(runtime, test_nodenet): + nodenet = runtime.nodenets[test_nodenet] + netapi = nodenet.netapi + nodespace = netapi.get_nodespace(None) + nodes = [] + for i in range(10): + node = netapi.create_node('Neuron', None, "testnode_%d" % i) + nodes.append(node) + if i > 0: + netapi.link(nodes[i - 1], 'gen', node, 'gen') + source = netapi.create_node("Neuron", None, "Source") + netapi.link(source, 'gen', source, 'gen') + netapi.link(source, 'gen', nodes[0], 'gen') + source.activation = 1 + monitor_uid = netapi.add_group_monitor(nodespace.uid, 'testndoes', node_name_prefix='testnode', gate='gen', color='purple') + for i in range(5): + runtime.step_nodenet(test_nodenet) + data = nodenet.get_monitor(monitor_uid).get_data() + assert set(data['values'][4][:4]) == {1.0} # first 4 active + assert set(data['values'][4][4:]) == {0.0} # rest off + runtime.save_nodenet(test_nodenet) + runtime.revert_nodenet(test_nodenet) + nodenet = runtime.nodenets[test_nodenet] + data2 = nodenet.get_monitor(monitor_uid).get_data() + assert data2 == data + runtime.step_nodenet(test_nodenet) + data3 = nodenet.get_monitor(monitor_uid).get_data() + assert set(data3['values'][6][:6]) == {1.0} # first 6 active + assert set(data3['values'][6][6:]) == {0.0} # rest off + + +def test_adhoc_monitor(runtime, test_nodenet): + nodenet = runtime.get_nodenet(test_nodenet) + netapi = nodenet.netapi + var = 13 + + def valuefunc(): + return var + netapi.add_adhoc_monitor(valuefunc, 'test') + runtime.step_nodenet(test_nodenet) + items = list(runtime.get_monitor_data(test_nodenet)['monitors'].items()) + assert len(items) == 1 + uid, data = items[0] + assert uid != data['name'] + assert data['name'] == 'test' + assert data['values'][1] == 13 + + def doublefunc(): + return var * 2 + netapi.add_adhoc_monitor(doublefunc, 'test') + runtime.step_nodenet(test_nodenet) + items = list(runtime.get_monitor_data(test_nodenet)['monitors'].items()) + assert len(items) == 1 + uid, data = items[0] + assert uid != data['name'] + assert data['name'] == 'test' + assert data['values'][1] == 13 + assert data['values'][2] == 26 + + def parameterfunc(foo): + return var * foo + netapi.add_adhoc_monitor(doublefunc, 'test', {'foo': 2}) + runtime.step_nodenet(test_nodenet) + items = list(runtime.get_monitor_data(test_nodenet)['monitors'].items()) + assert len(items) == 1 + uid, data = items[0] + assert uid != data['name'] + assert data['name'] == 'test' + assert data['values'][1] == 13 + assert data['values'][2] == 26 + + runtime.save_nodenet(test_nodenet) + runtime.revert_nodenet(test_nodenet) + items = list(runtime.get_monitor_data(test_nodenet)['monitors'].items()) + assert len(items) == 0 diff --git a/micropsi_core/tests/test_runtime_nodenet_basics.py b/micropsi_core/tests/test_runtime_nodenet_basics.py index ac8eee83..3e1bfce4 100644 --- a/micropsi_core/tests/test_runtime_nodenet_basics.py +++ b/micropsi_core/tests/test_runtime_nodenet_basics.py @@ -5,8 +5,6 @@ """ import os -from micropsi_core import runtime -from micropsi_core import runtime as micropsi import mock import pytest @@ -14,228 +12,231 @@ __date__ = '29.10.12' -def test_new_nodenet(test_nodenet, resourcepath, engine): - success, nodenet_uid = micropsi.new_nodenet("Test_Nodenet", engine=engine, worldadapter="Default", owner="tester") +def prepare(runtime, test_nodenet): + net = runtime.nodenets[test_nodenet] + netapi = net.netapi + source = netapi.create_node("Neuron", None, "source") + register = netapi.create_node("Neuron", None, "reg") + netapi.link(source, 'gen', source, 'gen') + netapi.link(source, 'gen', register, 'gen') + return net, netapi, source, register + + +def test_new_nodenet(runtime, test_nodenet, default_world, resourcepath, engine): + success, nodenet_uid = runtime.new_nodenet("Test_Nodenet", engine=engine, world_uid=default_world, worldadapter="Default", owner="tester") assert success + runtime.revert_nodenet(nodenet_uid) + nodenet = runtime.get_nodenet(nodenet_uid) + assert nodenet.world == default_world + assert nodenet.worldadapter == "Default" assert nodenet_uid != test_nodenet - assert micropsi.get_available_nodenets("tester")[nodenet_uid].name == "Test_Nodenet" - n_path = os.path.join(resourcepath, runtime.NODENET_DIRECTORY, nodenet_uid + ".json") + assert runtime.get_available_nodenets("tester")[nodenet_uid].name == "Test_Nodenet" + n_path = os.path.join(resourcepath, runtime.NODENET_DIRECTORY, nodenet_uid, "nodenet.json") assert os.path.exists(n_path) # get_available_nodenets - nodenets = micropsi.get_available_nodenets() - mynets = micropsi.get_available_nodenets("tester") + nodenets = runtime.get_available_nodenets() + mynets = runtime.get_available_nodenets("tester") assert test_nodenet in nodenets assert nodenet_uid in nodenets assert nodenet_uid in mynets assert test_nodenet not in mynets # delete_nodenet - micropsi.delete_nodenet(nodenet_uid) - assert nodenet_uid not in micropsi.get_available_nodenets() + runtime.delete_nodenet(nodenet_uid) + assert nodenet_uid not in runtime.get_available_nodenets() assert not os.path.exists(n_path) -def test_nodenet_data_gate_parameters(fixed_nodenet): - from micropsi_core.nodenet.node import Nodetype - data = micropsi.nodenets[fixed_nodenet].get_data() - assert data['nodes']['n0005']['gate_parameters'] == {} - micropsi.set_gate_parameters(fixed_nodenet, 'n0005', 'gen', {'threshold': 1}) - data = micropsi.nodenets[fixed_nodenet].get_data() - assert data['nodes']['n0005']['gate_parameters'] == {'gen': {'threshold': 1}} - defaults = Nodetype.GATE_DEFAULTS.copy() - defaults.update({'threshold': 1}) - data = micropsi.nodenets[fixed_nodenet].get_node('n0005').get_data()['gate_parameters'] - assert data == {'gen': {'threshold': 1}} - - -def test_user_prompt(fixed_nodenet, resourcepath): +def test_user_prompt(runtime, test_nodenet, resourcepath): import os - nodetype_file = os.path.join(resourcepath, 'Test', 'nodetypes.json') - nodefunc_file = os.path.join(resourcepath, 'Test', 'nodefunctions.py') + nodetype_file = os.path.join(resourcepath, 'nodetypes', 'Test', 'testnode.py') + nodenet = runtime.nodenets[test_nodenet] with open(nodetype_file, 'w') as fp: - fp.write('{"Testnode": {\ - "name": "Testnode",\ - "slottypes": ["gen", "foo", "bar"],\ - "gatetypes": ["gen", "foo", "bar"],\ - "nodefunction_name": "testnodefunc",\ - "parameters": ["testparam"],\ - "parameter_defaults": {\ - "testparam": 13\ - }\ - }}') - with open(nodefunc_file, 'w') as fp: - fp.write("def testnodefunc(netapi, node=None, **prams):\r\n return 17") - - micropsi.reload_native_modules() - res, uid = micropsi.add_node(fixed_nodenet, "Testnode", [10, 10], name="Test") - nativemodule = micropsi.nodenets[fixed_nodenet].get_node(uid) - - options = [{'key': 'foo_parameter', 'label': 'Please give value for "foo"', 'values': [23, 42]}] - micropsi.nodenets[fixed_nodenet].netapi.ask_user_for_parameter( - nativemodule, - "foobar", - options - ) - result, data = micropsi.get_calculation_state(fixed_nodenet, nodenet={}) + fp.write("""nodetype_definition = { + "name": "Testnode", + "slottypes": ["gen", "foo", "bar"], + "gatetypes": ["gen", "foo", "bar"], + "nodefunction_name": "testnodefunc", + "parameters": ["testparam"], + "parameter_defaults": { + "testparam": 13 + }, + "user_prompts": { + "promptident": { + "callback": "user_prompt_callback", + "parameters": [ + {"name": "foo", "description": "value for foo", "default": 23}, + {"name": "bar", "description": "value for bar", "default": 42} + ] + } + } +} + +def testnodefunc(netapi, node=None, **prams): + if not hasattr(node, 'foo'): + node.foo = 0 + node.bar = 1 + netapi.show_user_prompt(node, "promptident") + node.get_gate("foo").gate_function(node.foo) + node.get_gate("bar").gate_function(node.bar) + +def user_prompt_callback(netapi, node, user_prompt_params): + \"\"\"Elaborate explanation as to what this user prompt is for\"\"\" + node.foo = int(user_prompt_params['foo']) + node.bar = int(user_prompt_params['bar']) +""") + + runtime.reload_code() + res, node_uid = runtime.add_node(test_nodenet, "Testnode", [10, 10], name="Test") + runtime.reload_code() # this breaks, if the nodetype overwrites the definition + nativemodule = nodenet.get_node(node_uid) + runtime.step_nodenet(test_nodenet) + result, data = runtime.get_calculation_state(test_nodenet, nodenet={}) assert 'user_prompt' in data - assert data['user_prompt']['msg'] == 'foobar' - assert data['user_prompt']['node']['uid'] == uid - assert data['user_prompt']['options'] == options + assert data['user_prompt']['key'] == "promptident" + assert data['user_prompt']['msg'] == 'Elaborate explanation as to what this user prompt is for' + assert data['user_prompt']['node']['uid'] == node_uid + assert len(data['user_prompt']['parameters']) == 2 + assert nativemodule.get_gate('foo').activation == 0 + assert nativemodule.get_gate('bar').activation == 1 + # response - micropsi.user_prompt_response(fixed_nodenet, uid, {'foo_parameter': 42}, True) - assert micropsi.nodenets[fixed_nodenet].get_node(uid).get_parameter('foo_parameter') == 42 - assert micropsi.nodenets[fixed_nodenet].is_active - from micropsi_core.nodenet import nodefunctions - tmp = nodefunctions.concept - nodefunc = mock.Mock() - nodefunctions.concept = nodefunc - micropsi.nodenets[fixed_nodenet].step() - foo = micropsi.nodenets[fixed_nodenet].get_node('n0001').clone_parameters() - foo.update({'foo_parameter': 42}) - assert nodefunc.called_with(micropsi.nodenets[fixed_nodenet].netapi, micropsi.nodenets[fixed_nodenet].get_node('n0001'), foo) - micropsi.nodenets[fixed_nodenet].get_node('n0001').clear_parameter('foo_parameter') - assert micropsi.nodenets[fixed_nodenet].get_node('n0001').get_parameter('foo_parameter') is None - nodefunctions.concept = tmp - - -def test_user_notification(test_nodenet, node): - api = micropsi.nodenets[test_nodenet].netapi + runtime.user_prompt_response(test_nodenet, node_uid, "promptident", {'foo': '111', 'bar': '222'}, False) + runtime.step_nodenet(test_nodenet) + assert nativemodule.get_gate('foo').activation == 111 + assert nativemodule.get_gate('bar').activation == 222 + + +def test_user_notification(runtime, test_nodenet, node): + api = runtime.nodenets[test_nodenet].netapi node_obj = api.get_node(node) api.notify_user(node_obj, "Hello there") - result, data = micropsi.get_calculation_state(test_nodenet, nodenet={'nodespaces': [None]}) + result, data = runtime.get_calculation_state(test_nodenet, nodenet={'nodespaces': [None]}) assert 'user_prompt' in data assert data['user_prompt']['node']['uid'] == node assert data['user_prompt']['msg'] == "Hello there" -def test_nodespace_removal(fixed_nodenet): - res, uid = micropsi.add_nodespace(fixed_nodenet, [100, 100], nodespace=None, name="testspace") - res, n1_uid = micropsi.add_node(fixed_nodenet, 'Register', [100, 100], nodespace=uid, name="sub1") - res, n2_uid = micropsi.add_node(fixed_nodenet, 'Register', [100, 200], nodespace=uid, name="sub2") - micropsi.add_link(fixed_nodenet, n1_uid, 'gen', n2_uid, 'gen', weight=1, certainty=1) - res, sub_uid = micropsi.add_nodespace(fixed_nodenet, [100, 100], nodespace=uid, name="subsubspace") - micropsi.delete_nodespace(fixed_nodenet, uid) +def test_nodespace_removal(runtime, test_nodenet): + res, uid = runtime.add_nodespace(test_nodenet, nodespace=None, name="testspace") + res, n1_uid = runtime.add_node(test_nodenet, 'Neuron', [100, 100], nodespace=uid, name="sub1") + res, n2_uid = runtime.add_node(test_nodenet, 'Neuron', [100, 200], nodespace=uid, name="sub2") + runtime.add_link(test_nodenet, n1_uid, 'gen', n2_uid, 'gen', weight=1) + res, sub_uid = runtime.add_nodespace(test_nodenet, nodespace=uid, name="subsubspace") + runtime.delete_nodespace(test_nodenet, uid) # assert that the nodespace is gone - assert not micropsi.nodenets[fixed_nodenet].is_nodespace(uid) - assert uid not in micropsi.nodenets[fixed_nodenet].get_data()['nodespaces'] + assert not runtime.nodenets[test_nodenet].is_nodespace(uid) + assert uid not in runtime.nodenets[test_nodenet].get_data()['nodespaces'] # assert that the nodes it contained are gone - assert not micropsi.nodenets[fixed_nodenet].is_node(n1_uid) - assert n1_uid not in micropsi.nodenets[fixed_nodenet].get_data()['nodes'] - assert not micropsi.nodenets[fixed_nodenet].is_node(n2_uid) - assert n2_uid not in micropsi.nodenets[fixed_nodenet].get_data()['nodes'] + assert not runtime.nodenets[test_nodenet].is_node(n1_uid) + assert n1_uid not in runtime.nodenets[test_nodenet].get_data()['nodes'] + assert not runtime.nodenets[test_nodenet].is_node(n2_uid) + assert n2_uid not in runtime.nodenets[test_nodenet].get_data()['nodes'] # assert that sub-nodespaces are gone as well - assert not micropsi.nodenets[fixed_nodenet].is_nodespace(sub_uid) - assert sub_uid not in micropsi.nodenets[fixed_nodenet].get_data()['nodespaces'] + assert not runtime.nodenets[test_nodenet].is_nodespace(sub_uid) + assert sub_uid not in runtime.nodenets[test_nodenet].get_data()['nodespaces'] -def test_clone_nodes_nolinks(fixed_nodenet): - nodenet = micropsi.get_nodenet(fixed_nodenet) - success, result = micropsi.clone_nodes(fixed_nodenet, ['n0001', 'n0002'], 'none', offset=[10, 20, 2]) +def test_clone_nodes_nolinks(runtime, test_nodenet): + net, netapi, source, register = prepare(runtime, test_nodenet) + nodenet = runtime.get_nodenet(test_nodenet) + success, result = runtime.clone_nodes(test_nodenet, [source.uid, register.uid], 'none', offset=[10, 20, 2]) assert success for n in result.values(): - if n['name'] == 'A1_copy': - a1_copy = n - elif n['name'] == 'A2_copy': - a2_copy = n - assert nodenet.is_node(a1_copy['uid']) - assert a1_copy['uid'] != 'n0001' - assert a1_copy['type'] == nodenet.get_node('n0001').type - assert a1_copy['parameters'] == nodenet.get_node('n0001').clone_parameters() - assert a1_copy['position'][0] == nodenet.get_node('n0001').position[0] + 10 - assert a1_copy['position'][1] == nodenet.get_node('n0001').position[1] + 20 - assert a1_copy['position'][2] == nodenet.get_node('n0001').position[2] + 2 - assert nodenet.is_node(a2_copy['uid']) - assert a2_copy['name'] == nodenet.get_node('n0002').name + '_copy' - assert a2_copy['uid'] != 'n0002' + if n['name'] == source.name: + source_copy = n + elif n['name'] == register.name: + register_copy = n + assert nodenet.is_node(source_copy['uid']) + assert source_copy['uid'] != source.uid + assert source_copy['type'] == nodenet.get_node(source.uid).type + assert source_copy['parameters'] == nodenet.get_node(source.uid).clone_parameters() + assert source_copy['position'][0] == nodenet.get_node(source.uid).position[0] + 10 + assert source_copy['position'][1] == nodenet.get_node(source.uid).position[1] + 20 + assert source_copy['position'][2] == nodenet.get_node(source.uid).position[2] + 2 + assert nodenet.is_node(register_copy['uid']) + assert register_copy['name'] == nodenet.get_node(register.uid).name + assert register_copy['uid'] != register.uid assert len(result.keys()) == 2 - assert a1_copy['links'] == {} - assert a2_copy['links'] == {} + assert source_copy['links'] == {} + assert register_copy['links'] == {} -def test_clone_nodes_all_links(fixed_nodenet): - nodenet = micropsi.get_nodenet(fixed_nodenet) - success, result = micropsi.clone_nodes(fixed_nodenet, ['n0001', 'n0002'], 'all') +def test_clone_nodes_all_links(runtime, test_nodenet): + net, netapi, source, register = prepare(runtime, test_nodenet) + nodenet = runtime.get_nodenet(test_nodenet) + thirdnode = netapi.create_node('Neuron', None, 'third') + netapi.link(thirdnode, 'gen', register, 'gen') + success, result = runtime.clone_nodes(test_nodenet, [source.uid, register.uid], 'all') assert success - # expect 3 instead of two results, because the sensor that links to A1 should be delivered - # as a followupdnode to A1_copy to render incoming links + # expect 3 instead of two results, because thirdnode should be delivered + # as a followupdnode to source_copy to render incoming links assert len(result.keys()) == 3 for n in result.values(): - if n['name'] == 'A1_copy': - a1_copy = n - elif n['name'] == 'A2_copy': - a2_copy = n - - # assert the link between a1-copy and a2-copy exists - a1link = a1_copy['links']['por'][0] - assert a1link['target_node_uid'] == a2_copy['uid'] - - # assert the link between sensor and the a1-copy exists - sensor = nodenet.get_node('n0005').get_data() - candidate = None - for link in sensor['links']['gen']: - if link['target_node_uid'] == a1_copy['uid']: - candidate = link - assert candidate['target_slot_name'] == 'gen' - - -def test_clone_nodes_internal_links(fixed_nodenet): - nodenet = micropsi.get_nodenet(fixed_nodenet) - success, result = micropsi.clone_nodes(fixed_nodenet, ['n0001', 'n0002'], 'internal') + if n['name'] == source.name: + source_copy = n + elif n['name'] == register.name: + register_copy = n + + # assert the links between the copied nodes exist: + assert len(source_copy['links']['gen']) == 2 + assert set([l['target_node_uid'] for l in source_copy['links']['gen']]) == {source_copy['uid'], register_copy['uid']} + + # assert the link between thirdnode and register-copy exists + third = nodenet.get_node(thirdnode.uid).get_data() + assert len(third['links']['gen']) == 2 + assert set([l['target_node_uid'] for l in third['links']['gen']]) == {register.uid, register_copy['uid']} + + +def test_clone_nodes_internal_links(runtime, test_nodenet): + net, netapi, source, register = prepare(runtime, test_nodenet) + thirdnode = netapi.create_node('Neuron', None, 'third') + netapi.link(thirdnode, 'gen', register, 'gen') + success, result = runtime.clone_nodes(test_nodenet, [source.uid, register.uid], 'internal') assert success assert len(result.keys()) == 2 for n in result.values(): - if n['name'] == 'A1_copy': - a1_copy = n - elif n['name'] == 'A2_copy': - a2_copy = n + if n['name'] == source.name: + source_copy = n + elif n['name'] == register.name: + register_copy = n - # assert the link between a1-copy and a2-copy exists - a1link = a1_copy['links']['por'][0] - assert a1link['target_node_uid'] == a2_copy['uid'] + # assert the links between the copied nodes exist: + assert len(source_copy['links']['gen']) == 2 + assert set([l['target_node_uid'] for l in source_copy['links']['gen']]) == {source_copy['uid'], register_copy['uid']} - # assert the link between sensor and the a1-copy does not exist - sensor = nodenet.get_node('n0005').get_data() - candidate = None - for link in sensor['links']['gen']: - if link['target_node_uid'] == a1_copy['uid']: - candidate = link - assert candidate is None + # assert the link between thirdnode and register-copy does not exist + third = net.get_node(thirdnode.uid).get_data() + assert len(third['links']['gen']) == 1 -def test_clone_nodes_to_new_nodespace(fixed_nodenet): - nodenet = micropsi.get_nodenet(fixed_nodenet) +def test_clone_nodes_to_new_nodespace(runtime, test_nodenet): + net, netapi, source, register = prepare(runtime, test_nodenet) + thirdnode = netapi.create_node('Neuron', None, 'third') + netapi.link(thirdnode, 'gen', register, 'gen') + success, result = runtime.clone_nodes(test_nodenet, [source.uid, register.uid], 'internal') - res, testspace_uid = micropsi.add_nodespace(fixed_nodenet, [100, 100], nodespace=None, name="testspace") - - success, result = micropsi.clone_nodes(fixed_nodenet, ['n0001', 'n0002'], 'internal', nodespace=testspace_uid) + res, testspace_uid = runtime.add_nodespace(test_nodenet, nodespace=None, name="testspace") + success, result = runtime.clone_nodes(test_nodenet, [source.uid, register.uid], 'internal', nodespace=testspace_uid) assert success assert len(result.keys()) == 2 for n in result.values(): - if n['name'] == 'A1_copy': - a1_copy = n - elif n['name'] == 'A2_copy': - a2_copy = n - - a1_copy = nodenet.get_node(a1_copy['uid']) - a2_copy = nodenet.get_node(a2_copy['uid']) + if n['name'] == source.name: + source_copy = n + elif n['name'] == register.name: + register_copy = n - assert a1_copy.parent_nodespace == testspace_uid - assert a2_copy.parent_nodespace == testspace_uid + source_copy = net.get_node(source_copy['uid']) + register_copy = net.get_node(register_copy['uid']) - -def test_clone_nodes_copies_gate_params(fixed_nodenet): - nodenet = micropsi.get_nodenet(fixed_nodenet) - micropsi.set_gate_parameters(fixed_nodenet, 'n0001', 'gen', {'maximum': 0.1}) - success, result = micropsi.clone_nodes(fixed_nodenet, ['n0001'], 'internal') - assert success - copy = nodenet.get_node(list(result.keys())[0]) - assert round(copy.get_gate_parameters()['gen']['maximum'], 2) == 0.1 + assert source_copy.parent_nodespace == testspace_uid + assert register_copy.parent_nodespace == testspace_uid -def test_modulators(fixed_nodenet, engine): - nodenet = micropsi.get_nodenet(fixed_nodenet) +def test_modulators(runtime, test_nodenet, engine): + nodenet = runtime.get_nodenet(test_nodenet) # assert modulators are instantiated from the beginning assert nodenet._modulators != {} assert nodenet.get_modulator('emo_activation') is not None @@ -249,30 +250,29 @@ def test_modulators(fixed_nodenet, engine): assert round(nodenet.netapi.get_modulator("test_modulator"), 4) == -0.58 # no modulators should be set if we disable the emotional_parameter module - res, uid = micropsi.new_nodenet('foobar', engine, use_modulators=False) - new_nodenet = micropsi.get_nodenet(uid) + res, uid = runtime.new_nodenet('foobar', engine, use_modulators=False) + new_nodenet = runtime.get_nodenet(uid) assert new_nodenet._modulators == {} # and no Emo-stepoperator should be set. for item in new_nodenet.stepoperators: assert 'Emotional' not in item.__class__.__name__ -def test_modulators_sensor_actor_connection(test_nodenet, test_world): - nodenet = micropsi.get_nodenet(test_nodenet) - micropsi.set_nodenet_properties(test_nodenet, worldadapter="Braitenberg", world_uid=test_world) - res, s1_id = micropsi.add_node(test_nodenet, "Sensor", [10, 10], None, name="brightness_l", parameters={'datasource': 'brightness_l'}) - res, s2_id = micropsi.add_node(test_nodenet, "Sensor", [20, 20], None, name="emo_activation", parameters={'datasource': 'emo_activation'}) - res, a1_id = micropsi.add_node(test_nodenet, "Actor", [30, 30], None, name="engine_l", parameters={'datatarget': 'engine_l'}) - res, a2_id = micropsi.add_node(test_nodenet, "Actor", [40, 40], None, name="base_importance_of_intention", parameters={'datatarget': 'base_importance_of_intention'}) - res, r1_id = micropsi.add_node(test_nodenet, "Register", [10, 30], None, name="r1") - res, r2_id = micropsi.add_node(test_nodenet, "Register", [10, 30], None, name="r2") +def test_modulators_sensor_actuator_connection(runtime, test_nodenet, default_world): + nodenet = runtime.get_nodenet(test_nodenet) + runtime.set_nodenet_properties(test_nodenet, worldadapter="Default", world_uid=default_world) + res, s1_id = runtime.add_node(test_nodenet, "Sensor", [10, 10], None, name="static_on", parameters={'datasource': 'static_on'}) + res, s2_id = runtime.add_node(test_nodenet, "Sensor", [20, 20], None, name="emo_activation", parameters={'datasource': 'emo_activation'}) + res, a1_id = runtime.add_node(test_nodenet, "Actuator", [30, 30], None, name="echo", parameters={'datatarget': 'echo'}) + res, a2_id = runtime.add_node(test_nodenet, "Actuator", [40, 40], None, name="base_importance_of_intention", parameters={'datatarget': 'base_importance_of_intention'}) + res, r1_id = runtime.add_node(test_nodenet, "Neuron", [10, 30], None, name="r1") + res, r2_id = runtime.add_node(test_nodenet, "Neuron", [10, 30], None, name="r2") s1 = nodenet.get_node(s1_id) s2 = nodenet.get_node(s2_id) r1 = nodenet.get_node(r1_id) r2 = nodenet.get_node(r2_id) - s2.set_gate_parameter('gen', 'maximum', 999) - micropsi.add_link(test_nodenet, r1_id, 'gen', a1_id, 'gen') - micropsi.add_link(test_nodenet, r2_id, 'gen', a2_id, 'gen') + runtime.add_link(test_nodenet, r1_id, 'gen', a1_id, 'gen') + runtime.add_link(test_nodenet, r2_id, 'gen', a2_id, 'gen') r1.activation = 0.3 r2.activation = 0.7 emo_val = nodenet.get_modulator("emo_activation") @@ -283,48 +283,110 @@ def nothing(): nodenet.worldadapter_instance.reset_datatargets = nothing nodenet.step() - assert round(nodenet.worldadapter_instance.datatargets['engine_l'], 3) == 0.3 - assert round(s1.activation, 3) == round(nodenet.worldadapter_instance.get_datasource_value('brightness_l'), 3) + assert round(nodenet.worldadapter_instance.datatargets['echo'], 3) == 0.3 + assert round(s1.activation, 3) == round(nodenet.worldadapter_instance.get_datasource_value('static_on'), 3) assert round(s2.activation, 3) == round(emo_val, 3) assert round(nodenet.get_modulator('base_importance_of_intention'), 3) == 0.7 - assert round(nodenet.worldadapter_instance.datatargets['engine_l'], 3) == 0.3 emo_val = nodenet.get_modulator("emo_activation") nodenet.step() assert round(s2.activation, 3) == round(emo_val, 3) -def test_node_parameters(fixed_nodenet, resourcepath): +def test_node_parameters(runtime, test_nodenet, resourcepath): import os - nodetype_file = os.path.join(resourcepath, 'Test', 'nodetypes.json') - nodefunc_file = os.path.join(resourcepath, 'Test', 'nodefunctions.py') + nodetype_file = os.path.join(resourcepath, 'nodetypes', 'Test', 'testnode.py') with open(nodetype_file, 'w') as fp: - fp.write('{"Testnode": {\ - "name": "Testnode",\ - "slottypes": ["gen", "foo", "bar"],\ - "gatetypes": ["gen", "foo", "bar"],\ - "nodefunction_name": "testnodefunc",\ - "parameters": ["linktype", "threshold", "protocol_mode"],\ - "parameter_values": {\ - "linktype": ["catexp", "subsur"],\ - "protocol_mode": ["all_active", "most_active_one"]\ - },\ - "parameter_defaults": {\ - "linktype": "catexp",\ - "protocol_mode": "all_active"\ - }}\ - }') - with open(nodefunc_file, 'w') as fp: - fp.write("def testnodefunc(netapi, node=None, **prams):\r\n return 17") - - assert micropsi.reload_native_modules() - res, uid = micropsi.add_node(fixed_nodenet, "Testnode", [10, 10], name="Test", parameters={"linktype": "catexp", "threshold": "", "protocol_mode": "all_active"}) - # nativemodule = micropsi.nodenets[fixed_nodenet].get_node(uid) - assert micropsi.save_nodenet(fixed_nodenet) - - -def test_delete_linked_nodes(fixed_nodenet): - - nodenet = micropsi.get_nodenet(fixed_nodenet) + fp.write("""nodetype_definition = { + "name": "Testnode", + "slottypes": ["gen", "foo", "bar"], + "gatetypes": ["gen", "foo", "bar"], + "nodefunction_name": "testnodefunc", + "parameters": ["linktype", "threshold", "protocol_mode"], + "parameter_values": { + "linktype": ["catexp", "subsur"], + "protocol_mode": ["all_active", "most_active_one"] + }, + "parameter_defaults": { + "linktype": "catexp", + "protocol_mode": "all_active" + } +} +def testnodefunc(netapi, node=None, **prams):\r\n return 17 +""") + + assert runtime.reload_code() + res, uid = runtime.add_node(test_nodenet, "Testnode", [10, 10], name="Test", parameters={"threshold": "", "protocol_mode": "most_active_one"}) + # nativemodule = runtime.nodenets[test_nodenet].get_node(uid) + assert runtime.save_nodenet(test_nodenet) + node = runtime.nodenets[test_nodenet].get_node(uid) + assert node.get_parameter('linktype') == 'catexp' + assert node.get_parameter('protocol_mode') == 'most_active_one' + + +@pytest.mark.engine("dict_engine") +def test_node_states(runtime, test_nodenet, node): + nodenet = runtime.get_nodenet(test_nodenet) + node = nodenet.get_node(node) + assert node.get_state('foobar') is None + node.set_state('foobar', 'bazbaz') + assert node.get_state('foobar') == 'bazbaz' + node.set_state('foobar', 42) + assert node.get_state('foobar') == 42 + + +@pytest.mark.engine("theano_engine") +def test_node_states_numpy(runtime, test_nodenet, node, resourcepath): + import os + import numpy as np + + nodenet = runtime.get_nodenet(test_nodenet) + node = nodenet.get_node(node) + assert node.get_state('foobar') is None + node.set_state('foobar', 'bazbaz') + assert node.get_state('foobar') == 'bazbaz' + node.set_state('foobar', 42) + assert node.get_state('foobar') == 42 + + nodetype_file = os.path.join(resourcepath, 'nodetypes', 'Test', 'testnode.py') + with open(nodetype_file, 'w') as fp: + fp.write("""nodetype_definition = { + "name": "Testnode", + "slottypes": ["gen", "foo", "bar"], + "gatetypes": ["gen", "foo", "bar"], + "nodefunction_name": "testnodefunc", +} +def testnodefunc(netapi, node=None, **prams):\r\n return 17 +""") + + assert runtime.reload_code() + res, uid = runtime.add_node(test_nodenet, "Testnode", [10, 10], name="Test") + + testnode = runtime.nodenets[test_nodenet].get_node(uid) + testnode.set_state("string", "hugo") + testnode.set_state("dict", {"eins": 1, "zwei": 2}) + testnode.set_state("list", [{"eins": 1, "zwei": 2}, "boing"]) + testnode.set_state("numpy", np.asarray([1, 2, 3, 4])) + + runtime.save_nodenet(test_nodenet) + runtime.revert_nodenet(test_nodenet) + + testnode = runtime.nodenets[test_nodenet].get_node(uid) + + assert testnode.get_state("string") == "hugo" + assert testnode.get_state("dict")["eins"] == 1 + assert testnode.get_state("list")[0]["eins"] == 1 + assert testnode.get_state("list")[1] == "boing" + assert testnode.get_state("numpy").sum() == 10 # only numpy arrays have ".sum()" + + testnode.set_state("wrong", (np.asarray([1, 2, 3]), 'tuple')) + + with pytest.raises(ValueError): + runtime.save_nodenet(test_nodenet) + + +def test_delete_linked_nodes(runtime, test_nodenet): + + nodenet = runtime.get_nodenet(test_nodenet) netapi = nodenet.netapi # create all evil (there will never be another dawn) @@ -349,116 +411,246 @@ def test_delete_linked_nodes(fixed_nodenet): netapi.delete_node(evil_two) -def test_multiple_nodenet_interference(engine, resourcepath): +def test_multiple_nodenet_interference(runtime, engine, resourcepath): import os - nodetype_file = os.path.join(resourcepath, 'Test', 'nodetypes.json') - nodefunc_file = os.path.join(resourcepath, 'Test', 'nodefunctions.py') + nodetype_file = os.path.join(resourcepath, 'nodetypes', 'Test', 'testnode.py') with open(nodetype_file, 'w') as fp: - fp.write('{"Testnode": {\ - "name": "Testnode",\ - "slottypes": ["gen", "foo", "bar"],\ - "gatetypes": ["gen", "foo", "bar"],\ - "nodefunction_name": "testnodefunc"\ - }}') - with open(nodefunc_file, 'w') as fp: - fp.write("def testnodefunc(netapi, node=None, **prams):\r\n node.get_gate('gen').gate_function(17)") - - micropsi.reload_native_modules() - - result, n1_uid = micropsi.new_nodenet('Net1', engine=engine, owner='Pytest User') - result, n2_uid = micropsi.new_nodenet('Net2', engine=engine, owner='Pytest User') - - n1 = micropsi.nodenets[n1_uid] - n2 = micropsi.nodenets[n2_uid] + fp.write("""nodetype_definition = { + "name": "Testnode", + "slottypes": ["gen", "foo", "bar"], + "gatetypes": ["gen", "foo", "bar"], + "nodefunction_name": "testnodefunc" +} +def testnodefunc(netapi, node=None, **prams):\r\n node.get_gate('gen').gate_function(17) +""") + runtime.reload_code() + + result, n1_uid = runtime.new_nodenet('Net1', engine=engine, owner='Pytest User') + result, n2_uid = runtime.new_nodenet('Net2', engine=engine, owner='Pytest User') + + n1 = runtime.nodenets[n1_uid] + n2 = runtime.nodenets[n2_uid] nativemodule = n1.netapi.create_node("Testnode", None, "Testnode") - register1 = n1.netapi.create_node("Register", None, "Register1") + register1 = n1.netapi.create_node("Neuron", None, "Neuron1") n1.netapi.link(nativemodule, 'gen', register1, 'gen', weight=1.2) - source2 = n2.netapi.create_node("Register", None, "Source2") - register2 = n2.netapi.create_node("Register", None, "Register2") + source2 = n2.netapi.create_node("Neuron", None, "Source2") + register2 = n2.netapi.create_node("Neuron", None, "Neuron2") n2.netapi.link(source2, 'gen', source2, 'gen') n2.netapi.link(source2, 'gen', register2, 'gen', weight=0.9) source2.activation = 0.7 - micropsi.step_nodenet(n2.uid) + runtime.step_nodenet(n2.uid) assert n1.current_step == 0 assert register1.activation == 0 - assert register1.name == "Register1" + assert register1.name == "Neuron1" assert nativemodule.name == "Testnode" assert round(register1.get_slot('gen').get_links()[0].weight, 2) == 1.2 assert register1.get_slot('gen').get_links()[0].source_node.name == 'Testnode' - assert n1.get_node(register1.uid).name == "Register1" + assert n1.get_node(register1.uid).name == "Neuron1" assert n2.current_step == 1 assert round(source2.activation, 2) == 0.7 assert round(register2.activation, 2) == 0.63 - assert register2.name == "Register2" + assert register2.name == "Neuron2" assert source2.name == "Source2" assert round(register2.get_slot('gen').get_links()[0].weight, 2) == 0.9 assert register2.get_slot('gen').get_links()[0].source_node.name == 'Source2' - assert n2.get_node(register2.uid).name == "Register2" + assert n2.get_node(register2.uid).name == "Neuron2" -def test_get_nodespace_changes(fixed_nodenet): - net = micropsi.nodenets[fixed_nodenet] +def test_get_nodespace_changes(runtime, test_nodenet): + net, netapi, source, register = prepare(runtime, test_nodenet) net.step() - result = micropsi.get_nodespace_changes(fixed_nodenet, [None], 0) + result = runtime.get_nodespace_changes(test_nodenet, [None], 0) assert set(result['nodes_dirty'].keys()) == set(net.get_node_uids()) assert result['nodes_deleted'] == [] assert result['nodespaces_dirty'] == {} assert result['nodespaces_deleted'] == [] - nodes = {} - for n in net.netapi.get_nodes(): - nodes[n.name] = n - net.netapi.unlink(nodes['A1'], 'por', nodes['A2'], 'gen') - net.netapi.delete_node(nodes['B2']) - newnode = net.netapi.create_node('Pipe', None, "asdf") - net.netapi.link(newnode, 'gen', nodes['B1'], 'gen') + net.netapi.unlink(source, 'gen', register, 'gen') + net.netapi.delete_node(register) + newnode = net.netapi.create_node('Neuron', None, "new thing") + net.netapi.link(newnode, 'gen', source, 'gen') newspace = net.netapi.create_nodespace(None, "nodespace") net.step() - test = micropsi.get_nodenet_activation_data(fixed_nodenet, [None], 1) + test = runtime.get_nodenet_activation_data(test_nodenet, [None], 1) assert test['has_changes'] - result = micropsi.get_nodespace_changes(fixed_nodenet, [None], 1) - assert nodes['B2'].uid in result['nodes_deleted'] - assert nodes['A1'].uid in result['nodes_dirty'] - assert nodes['A2'].uid in result['nodes_dirty'] - assert result['nodes_dirty'][nodes['A1'].uid]['links'] == {} + result = runtime.get_nodespace_changes(test_nodenet, [None], 1) + assert register.uid in result['nodes_deleted'] + assert source.uid in result['nodes_dirty'] assert newnode.uid in result['nodes_dirty'] + assert len(result['nodes_dirty'][source.uid]['links']) == 1 assert len(result['nodes_dirty'][newnode.uid]['links']['gen']) == 1 assert newspace.uid in result['nodespaces_dirty'] - assert len(result['nodes_dirty'].keys()) == 4 + assert len(result['nodes_dirty'].keys()) == 2 assert len(result['nodespaces_dirty'].keys()) == 1 net.step() - test = micropsi.get_nodenet_activation_data(fixed_nodenet, [None], 2) + test = runtime.get_nodenet_activation_data(test_nodenet, [None], 2) assert not test['has_changes'] -def test_get_nodespace_changes_cycles(fixed_nodenet): - net = micropsi.nodenets[fixed_nodenet] +def test_get_nodespace_changes_cycles(runtime, test_nodenet): + net, netapi, source, register = prepare(runtime, test_nodenet) net.step() - nodes = {} - for n in net.netapi.get_nodes(): - nodes[n.name] = n - net.netapi.delete_node(nodes['B2']) + net.netapi.delete_node(register) net.step() - result = micropsi.get_nodespace_changes(fixed_nodenet, [None], 1) - assert nodes['B2'].uid in result['nodes_deleted'] + result = runtime.get_nodespace_changes(test_nodenet, [None], 1) + assert register.uid in result['nodes_deleted'] for i in range(101): net.step() - result = micropsi.get_nodespace_changes(fixed_nodenet, [None], 1) - assert nodes['B2'].uid not in result['nodes_deleted'] + result = runtime.get_nodespace_changes(test_nodenet, [None], 1) + assert register.uid not in result['nodes_deleted'] -def test_nodespace_properties(test_nodenet): +def test_nodespace_properties(runtime, test_nodenet): data = {'testvalue': 'foobar'} - rootns = micropsi.get_nodenet(test_nodenet).get_nodespace(None) - micropsi.set_nodespace_properties(test_nodenet, rootns.uid, data) - assert micropsi.nodenets[test_nodenet].metadata['nodespace_ui_properties'][rootns.uid] == data - assert micropsi.get_nodespace_properties(test_nodenet, rootns.uid) == data - micropsi.save_nodenet(test_nodenet) - micropsi.revert_nodenet(test_nodenet) - assert micropsi.get_nodespace_properties(test_nodenet, rootns.uid) == data - properties = micropsi.get_nodespace_properties(test_nodenet) + rootns = runtime.get_nodenet(test_nodenet).get_nodespace(None) + runtime.set_nodespace_properties(test_nodenet, rootns.uid, data) + assert runtime.nodenets[test_nodenet].metadata['nodespace_ui_properties'][rootns.uid] == data + assert runtime.get_nodespace_properties(test_nodenet, rootns.uid) == data + runtime.save_nodenet(test_nodenet) + runtime.revert_nodenet(test_nodenet) + assert runtime.get_nodespace_properties(test_nodenet, rootns.uid) == data + properties = runtime.get_nodespace_properties(test_nodenet) assert properties[rootns.uid] == data + + +def test_native_module_reload_changes_gates(runtime, test_nodenet, resourcepath): + import os + nodetype_file = os.path.join(resourcepath, 'nodetypes', 'Test', 'testnode.py') + with open(nodetype_file, 'w') as fp: + fp.write("""nodetype_definition = { + "name": "Testnode", + "slottypes": ["gen", "foo", "bar"], + "gatetypes": ["gen", "foo", "bar"], + "nodefunction_name": "testnodefunc" +} +def testnodefunc(netapi, node=None, **prams):\r\n return 17 +""") + + assert runtime.reload_code() + res, uid = runtime.add_node(test_nodenet, "Testnode", [10, 10], name="Test") + res, neuron_uid = runtime.add_node(test_nodenet, 'Neuron', [10, 10]) + runtime.add_link(test_nodenet, neuron_uid, 'gen', uid, 'gen') + runtime.add_link(test_nodenet, uid, 'gen', neuron_uid, 'gen') + with open(nodetype_file, 'w') as fp: + fp.write("""nodetype_definition = { + "name": "Testnode", + "slottypes": ["foo", "bar"], + "gatetypes": ["foo", "bar"], + "nodefunction_name": "testnodefunc" +} +def testnodefunc(netapi, node=None, **prams):\r\n return 17 +""") + assert runtime.reload_code() + nativemodule = runtime.nodenets[test_nodenet].get_node(uid) + assert nativemodule.get_gate_types() == ["foo", "bar"] + neuron = runtime.nodenets[test_nodenet].get_node(neuron_uid) + assert neuron.get_gate('gen').get_links() == [] + assert neuron.get_slot('gen').get_links() == [] + + +@pytest.mark.engine("dict_engine") +def test_runtime_autosave_dict(runtime, test_nodenet, resourcepath): + import os + import json + import zipfile + import tempfile + from time import sleep + runtime.set_runner_condition(test_nodenet, steps=100) + runtime.start_nodenetrunner(test_nodenet) + count = 0 + while runtime.nodenets[test_nodenet].is_active: + sleep(.1) + count += 1 + assert count < 20 # quit if not done after 2 sec + filename = os.path.join(resourcepath, "nodenets", "__autosave__", "%s_%d.zip" % (test_nodenet, 100)) + assert os.path.isfile(filename) + with zipfile.ZipFile(filename, 'r') as archive: + assert set(archive.namelist()) == {"nodenet.json"} + tmp = tempfile.TemporaryDirectory() + archive.extractall(tmp.name) + with open(os.path.join(tmp.name, "nodenet.json"), 'r') as fp: + restored = json.load(fp) + original = runtime.nodenets[test_nodenet].export_json() + # step and runner_conditions might differ + for key in ['nodes', 'links', 'modulators', 'uid', 'name', 'owner', 'world', 'worldadapter', 'version', 'monitors', 'nodespaces']: + assert restored[key] == original[key] + + +@pytest.mark.engine("theano_engine") +def test_runtime_autosave_theano(runtime, test_nodenet, resourcepath): + import os + import tempfile + import zipfile + import numpy as np + from time import sleep + with open(os.path.join(resourcepath, "nodetypes", "Source.py"), 'w') as fp: + fp.write("""nodetype_definition = { + "flow_module": True, + "implementation": "python", + "name": "Source", + "init_function_name": "source_init", + "run_function_name": "source", + "inputs": [], + "outputs": ["X"] +} + +def source_init(netapi, node, parameters): + import numpy as np + w_array = np.random.rand(8).astype(netapi.floatX) + node.set_theta("weights", w_array) + +def source(netapi, node, parameters): + return node.get_theta("weights").get_value() +""") + with open(os.path.join(resourcepath, "nodetypes", "Target.py"), 'w') as fp: + fp.write("""nodetype_definition = { + "flow_module": True, + "implementation": "python", + "name": "Target", + "run_function_name": "target", + "inputs": ["X"], + "outputs": [], + "inputdims": [1] +} + +def target(X, netapi, node, parameters): + node.set_state("incoming", X) +""") + + runtime.reload_code() + nodenet = runtime.nodenets[test_nodenet] + netapi = nodenet.netapi + source = netapi.create_node("Source", None, "Source") + target = netapi.create_node("Target", None, "Target") + netapi.flow(source, "X", target, "X") + neuron = netapi.create_node("Neuron", None, "Neuron") + netapi.link(neuron, 'gen', target, 'sub') + neuron.activation = 1 + runtime.set_runner_condition(test_nodenet, steps=100) + runtime.start_nodenetrunner(test_nodenet) + count = 0 + while runtime.nodenets[test_nodenet].is_active: + sleep(.1) + count += 1 + assert count < 20 # quit if not done after 2 sec + filename = os.path.join(resourcepath, "nodenets", "__autosave__", "%s_%d.zip" % (test_nodenet, 100)) + assert os.path.isfile(filename) + with zipfile.ZipFile(filename, 'r') as archive: + assert set(archive.namelist()) == {"nodenet.json", "flowgraph.pickle", "partition-000.npz", "%s_numpystate.npz" % target.uid, "%s_thetas.npz" % source.uid} + from micropsi_core.nodenet.theano_engine.theano_nodenet import TheanoNodenet + tmp = tempfile.TemporaryDirectory() + archive.extractall(tmp.name) + net = TheanoNodenet(tmp.name, "restored", uid=test_nodenet, native_modules=runtime.native_modules) + net.load() + nsource = net.netapi.get_node(source.uid) + ntarget = net.netapi.get_node(target.uid) + nneuron = net.netapi.get_node(neuron.uid) + assert nsource.name == "Source" + assert nsource.outputmap == {'X': {(ntarget.uid, 'X')}} + assert np.all(nsource.get_theta("weights").get_value() == source.get_theta("weights").get_value()) + assert np.all(ntarget.get_state("incoming") == target.get_state("incoming")) + assert nneuron.get_gate('gen').get_links()[0].target_node == ntarget diff --git a/micropsi_core/tests/test_runtime_nodes.py b/micropsi_core/tests/test_runtime_nodes.py index 7a950a46..d69af069 100644 --- a/micropsi_core/tests/test_runtime_nodes.py +++ b/micropsi_core/tests/test_runtime_nodes.py @@ -4,18 +4,17 @@ """ """ -from micropsi_core import runtime as micropsi import pytest __author__ = 'joscha' __date__ = '29.10.12' -def prepare_nodenet(test_nodenet): - res, node_a_uid = micropsi.add_node(test_nodenet, "Pipe", [200, 250, 10], None, state=None, name="A") - res, node_b_uid = micropsi.add_node(test_nodenet, "Pipe", [500, 350, 10], None, state=None, name="B") - res, node_c_uid = micropsi.add_node(test_nodenet, "Pipe", [300, 150, 10], None, state=None, name="C") - res, node_s_uid = micropsi.add_node(test_nodenet, "Sensor", [200, 450, 10], None, state=None, name="S") +def prepare_nodenet(runtime, test_nodenet): + res, node_a_uid = runtime.add_node(test_nodenet, "Pipe", [200, 250, 10], None, state=None, name="A") + res, node_b_uid = runtime.add_node(test_nodenet, "Pipe", [500, 350, 10], None, state=None, name="B") + res, node_c_uid = runtime.add_node(test_nodenet, "Pipe", [300, 150, 10], None, state=None, name="C") + res, node_s_uid = runtime.add_node(test_nodenet, "Sensor", [200, 450, 10], None, state=None, name="S") return { 'a': node_a_uid, 'b': node_b_uid, @@ -24,72 +23,67 @@ def prepare_nodenet(test_nodenet): } -def test_add_node(test_nodenet): - micropsi.load_nodenet(test_nodenet) +def test_add_node(runtime, test_nodenet): + runtime.load_nodenet(test_nodenet) # make sure nodenet is empty - nodespace = micropsi.get_nodes(test_nodenet) + nodespace = runtime.get_nodes(test_nodenet) try: for i in nodespace["nodes"]: - micropsi.delete_node(test_nodenet, i) + runtime.delete_node(test_nodenet, i) except: pass - nodespace = micropsi.get_nodes(test_nodenet) + nodespace = runtime.get_nodes(test_nodenet) assert len(nodespace.get("nodes", [])) == 0 - res, uid = micropsi.add_node(test_nodenet, "Pipe", [200, 250, 10], None, state=None, name="A") - nodespace = micropsi.get_nodes(test_nodenet) + res, uid = runtime.add_node(test_nodenet, "Pipe", [200, 250, 10], None, state=None, name="A") + nodespace = runtime.get_nodes(test_nodenet) assert len(nodespace["nodes"]) == 1 node1 = nodespace["nodes"][uid] assert node1["name"] == "A" assert node1["position"] == [200, 250, 10] -def test_position_always_3d(test_nodenet): - res, nuid = micropsi.add_node(test_nodenet, "Pipe", [200], None, state=None, name="A") - res, nsuid = micropsi.add_nodespace(test_nodenet, [200, 125, 0, 134], None, name="NS") - data = micropsi.get_nodes(test_nodenet) +def test_position_always_3d(runtime, test_nodenet): + res, nuid = runtime.add_node(test_nodenet, "Pipe", [200], None, state=None, name="A") + data = runtime.get_nodes(test_nodenet) assert data['nodes'][nuid]['position'] == [200, 0, 0] - assert data['nodespaces'][nsuid]['position'] == [200, 125, 0] -def test_get_nodenet_activation_data(test_nodenet): - nodes = prepare_nodenet(test_nodenet) +def test_get_nodenet_activation_data(runtime, test_nodenet): + nodes = prepare_nodenet(runtime, test_nodenet) uid = nodes['a'] - activation_data = micropsi.get_nodenet_activation_data(test_nodenet, [None]) - assert activation_data["activations"][uid][0] == 0 - assert activation_data["activations"][uid][1] == 0 - assert activation_data["activations"][uid][2] == 0 - assert activation_data["activations"][uid][3] == 0 - assert activation_data["activations"][uid][4] == 0 - assert activation_data["activations"][uid][5] == 0 - assert activation_data["activations"][uid][6] == 0 - - micropsi.set_node_activation(test_nodenet, nodes['a'], 0.34556865) - - activation_data = micropsi.get_nodenet_activation_data(test_nodenet, [None]) + activation_data = runtime.get_nodenet_activation_data(test_nodenet, [None]) + uid not in activation_data["activations"] + runtime.set_node_activation(test_nodenet, nodes['a'], 0.34556865) + activation_data = runtime.get_nodenet_activation_data(test_nodenet, [None]) assert activation_data["activations"][uid][0] == 0.3 -def test_get_nodenet_activation_data_for_nodespace(test_nodenet): - nodes = prepare_nodenet(test_nodenet) +def test_get_nodenet_activation_data_for_nodespace(runtime, test_nodenet): + nodes = prepare_nodenet(runtime, test_nodenet) + netapi = runtime.nodenets[test_nodenet].netapi uid = nodes['a'] - nodespace = micropsi.nodenets[test_nodenet].get_nodespace_uids()[0] - activation_data = micropsi.get_nodenet_activation_data(test_nodenet, [nodespace]) - assert activation_data["activations"][uid][0] == 0 - - -def test_get_nodespace(test_nodenet): - nodes = prepare_nodenet(test_nodenet) - nodespace = micropsi.get_nodes(test_nodenet) + nodespace = runtime.nodenets[test_nodenet].get_nodespace_uids()[0] + activation_data = runtime.get_nodenet_activation_data(test_nodenet, [nodespace]) + # zero activations are not sent anymore + assert uid not in activation_data["activations"] + netapi.get_node(uid).activation = 0.9 + activation_data = runtime.get_nodenet_activation_data(test_nodenet, [nodespace]) + assert activation_data["activations"][uid][0] == 0.9 + + +def test_get_nodespace(runtime, test_nodenet): + nodes = prepare_nodenet(runtime, test_nodenet) + nodespace = runtime.get_nodes(test_nodenet) assert len(nodespace["nodes"]) == 4 node1 = nodespace["nodes"][nodes['a']] assert node1["name"] == "A" assert node1["position"] == [200, 250, 10] -def test_get_nodespace_list(test_nodenet): - nodes = prepare_nodenet(test_nodenet) - data = micropsi.get_nodespace_list(test_nodenet) +def test_get_nodespace_list(runtime, test_nodenet): + nodes = prepare_nodenet(runtime, test_nodenet) + data = runtime.get_nodespace_list(test_nodenet) uid = list(data.keys())[0] assert data[uid]['name'] == 'Root' assert nodes['a'] in data[uid]['nodes'] @@ -98,19 +92,19 @@ def test_get_nodespace_list(test_nodenet): assert node['type'] == 'Pipe' -def test_get_nodespace_list_with_empty_nodespace(test_nodenet): - res, uid = micropsi.add_nodespace(test_nodenet, [200, 250, 10], None, name="Foospace") - data = micropsi.get_nodespace_list(test_nodenet) +def test_get_nodespace_list_with_empty_nodespace(runtime, test_nodenet): + res, uid = runtime.add_nodespace(test_nodenet, None, name="Foospace") + data = runtime.get_nodespace_list(test_nodenet) assert data[uid]['nodes'] == {} -def test_add_link(test_nodenet): - nodes = prepare_nodenet(test_nodenet) - micropsi.add_link(test_nodenet, nodes['a'], "por", nodes['b'], "gen", 0.5, 1) - micropsi.add_link(test_nodenet, nodes['a'], "por", nodes['b'], "gen", 1, 0.1) - micropsi.add_link(test_nodenet, nodes['c'], "ret", nodes['b'], "gen", 1, 1) +def test_add_link(runtime, test_nodenet): + nodes = prepare_nodenet(runtime, test_nodenet) + runtime.add_link(test_nodenet, nodes['a'], "por", nodes['b'], "gen", 0.5) + runtime.add_link(test_nodenet, nodes['a'], "por", nodes['b'], "gen", 1) + runtime.add_link(test_nodenet, nodes['c'], "ret", nodes['b'], "gen", 1) - nodespace = micropsi.get_nodes(test_nodenet) + nodespace = runtime.get_nodes(test_nodenet) assert len(nodespace["nodes"]) == 4 link_a_b = nodespace["nodes"][nodes['a']]['links']['por'][0] @@ -126,303 +120,284 @@ def test_add_link(test_nodenet): assert nodespace['nodes'][nodes['s']]['links'] == {} -def test_delete_link(test_nodenet): - nodes = prepare_nodenet(test_nodenet) - success, link = micropsi.add_link(test_nodenet, nodes['a'], "por", nodes['b'], "gen", 0.5, 1) +def test_delete_link(runtime, test_nodenet): + nodes = prepare_nodenet(runtime, test_nodenet) + success, link = runtime.add_link(test_nodenet, nodes['a'], "por", nodes['b'], "gen", 0.5) assert success - micropsi.delete_link(test_nodenet, nodes['a'], "por", nodes['b'], "gen") - nodespace = micropsi.get_nodes(test_nodenet) + runtime.delete_link(test_nodenet, nodes['a'], "por", nodes['b'], "gen") + nodespace = runtime.get_nodes(test_nodenet) assert nodespace['nodes'][nodes['a']]['links'] == {} -def test_save_nodenet(test_nodenet): - prepare_nodenet(test_nodenet) +def test_save_nodenet(runtime, test_nodenet): + prepare_nodenet(runtime, test_nodenet) # save_nodenet - micropsi.save_nodenet(test_nodenet) + runtime.save_nodenet(test_nodenet) # unload_nodenet - micropsi.unload_nodenet(test_nodenet) + runtime.unload_nodenet(test_nodenet) try: - micropsi.get_nodes(test_nodenet) + runtime.get_nodes(test_nodenet) assert False, "could fetch a Nodespace that should not have been in memory" except: pass # load_nodenet - micropsi.get_nodenet(test_nodenet) - nodespace = micropsi.get_nodes(test_nodenet) + runtime.get_nodenet(test_nodenet) + nodespace = runtime.get_nodes(test_nodenet) assert len(nodespace["nodes"]) == 4 - micropsi.delete_nodenet(test_nodenet) + runtime.delete_nodenet(test_nodenet) -def test_reload_native_modules(fixed_nodenet): +def test_reload_code(runtime, test_nodenet, resourcepath): def hashlink(l): return "%s:%s:%s:%s" % (l['source_node_uid'], l['source_gate_name'], l['target_node_uid'], l['target_slot_name']) - data_before = micropsi.nodenets[fixed_nodenet].export_json() + import os + netapi = runtime.nodenets[test_nodenet].netapi + nodetype_file = os.path.join(resourcepath, 'nodetypes', 'testnode.py') + with open(nodetype_file, 'w') as fp: + fp.write("""nodetype_definition = { + "name": "Testnode", + "slottypes": ["gen", "foo", "bar"], + "nodefunction_name": "testnodefunc", + "gatetypes": ["gen", "foo", "bar"] + } +def testnodefunc(netapi, node=None, **prams):\r\n return 17 +""") + runtime.reload_code() + reg = netapi.create_node("Neuron", None, "reg") + test = netapi.create_node("Testnode", None, "test") + netapi.link(reg, 'gen', test, 'gen') + netapi.link(test, 'bar', reg, 'gen') + data_before = runtime.nodenets[test_nodenet].export_json() links_before = set([hashlink(l) for l in data_before.pop('links')]) - micropsi.reload_native_modules() - data_after = micropsi.nodenets[fixed_nodenet].export_json() + runtime.reload_code() + data_after = runtime.nodenets[test_nodenet].export_json() links_after = set([hashlink(l) for l in data_after.pop('links')]) assert data_before == data_after assert links_before == links_after -def test_native_module_and_recipe_categories(fixed_nodenet, resourcepath): +def test_native_module_and_recipe_categories(runtime, test_nodenet, resourcepath): import os - os.mkdir(os.path.join(resourcepath, 'Test', 'Test2')) - nodetype_file = os.path.join(resourcepath, 'Test', 'nodetypes.json') - nodefunc_file = os.path.join(resourcepath, 'Test', 'nodefunctions.py') - recipe_file = os.path.join(resourcepath, 'Test', 'Test2', 'recipes.py') + os.makedirs(os.path.join(resourcepath, 'nodetypes', 'Test', 'Test2')) + os.makedirs(os.path.join(resourcepath, 'recipes', 'Test', 'Test2')) + nodetype_file = os.path.join(resourcepath, 'nodetypes', 'Test', 'testnode.py') + recipe_file = os.path.join(resourcepath, 'recipes', 'Test', 'Test2', 'recipes.py') with open(nodetype_file, 'w') as fp: - fp.write('{"Testnode": {\ - "name": "Testnode",\ - "slottypes": ["gen", "foo", "bar"],\ - "nodefunction_name": "testnodefunc",\ - "gatetypes": ["gen", "foo", "bar"]\ - }}') - with open(nodefunc_file, 'w') as fp: - fp.write("def testnodefunc(netapi, node=None, **prams):\r\n return 17") + fp.write("""nodetype_definition = { + "name": "Testnode", + "slottypes": ["gen", "foo", "bar"], + "nodefunction_name": "testnodefunc", + "gatetypes": ["gen", "foo", "bar"] + } +def testnodefunc(netapi, node=None, **prams):\r\n return 17 +""") with open(recipe_file, 'w') as fp: fp.write("def testrecipe(netapi):\r\n pass") - micropsi.reload_native_modules() - res = micropsi.get_available_native_module_types(fixed_nodenet) + runtime.reload_code() + res = runtime.get_available_native_module_types(test_nodenet) assert res['Testnode']['category'] == 'Test' - res = micropsi.get_available_recipes() + assert res['Testnode']['line_number'] == 7 + res = runtime.get_available_recipes() assert res['testrecipe']['category'] == 'Test/Test2' -@pytest.mark.engine("dict_engine") -# This behavior is not available in theano_engine: Default inheritance at runtime is not implemented for -# performance reasons, changed defaults will only affect newly created nodes. -# This test will have to be replaced when the generic solution proposed in TOL-90 has been -# implemented. -def test_gate_defaults_change_with_nodetype(fixed_nodenet, resourcepath,): - # gate_parameters are a property of the nodetype, and should change with - # the nodetype definition if not explicitly overwritten for a given node - import os - nodetype_file = os.path.join(resourcepath, 'Test', 'nodetypes.json') - nodefunc_file = os.path.join(resourcepath, 'Test', 'nodefunctions.py') - with open(nodetype_file, 'w') as fp: - fp.write('{"Testnode": {\ - "name": "Testnode",\ - "slottypes": ["gen", "foo", "bar"],\ - "nodefunction_name": "testnodefunc",\ - "gatetypes": ["gen", "foo", "bar"],\ - "symbol": "t",\ - "gate_defaults":{\ - "foo": {\ - "amplification": 13\ - }\ - }}}') - with open(nodefunc_file, 'w') as fp: - fp.write("def testnodefunc(netapi, node=None, **prams):\r\n return 17") - micropsi.reload_native_modules() - res, uid = micropsi.add_node(fixed_nodenet, "Testnode", [10, 10], name="Testnode") - with open(nodetype_file, 'w') as fp: - fp.write('{"Testnode": {\ - "name": "Testnode",\ - "slottypes": ["gen", "foo", "bar"],\ - "nodefunction_name": "testnodefunc",\ - "gatetypes": ["gen", "foo", "bar"],\ - "symbol": "t",\ - "gate_defaults":{\ - "foo": {\ - "amplification": 5\ - }\ - }}}') - micropsi.reload_native_modules() - params = micropsi.nodenets[fixed_nodenet].get_node(uid).get_gate_parameters() - assert params["foo"]["amplification"] == 5 - - -def test_non_standard_gate_defaults(test_nodenet): - nodenet = micropsi.nodenets[test_nodenet] - res, uid = micropsi.add_node(test_nodenet, 'Register', [30, 30, 10], name='test') - node = nodenet.netapi.get_node(uid) - genparams = {'maximum': 0.5} - micropsi.set_gate_parameters(nodenet.uid, node.uid, 'gen', genparams) - assert node.clone_non_default_gate_parameters()['gen']['maximum'] == 0.5 - assert node.get_data()['gate_parameters'] == {'gen': {'maximum': 0.5}} - assert nodenet.get_data()['nodes'][uid]['gate_parameters'] == {'gen': {'maximum': 0.5}} - data = micropsi.get_nodes(test_nodenet) - assert data['nodes'][uid]['gate_parameters'] == {'gen': {'maximum': 0.5}} - - -def test_ignore_links(test_nodenet): - nodes = prepare_nodenet(test_nodenet) - micropsi.add_link(test_nodenet, nodes['a'], "por", nodes['b'], "gen", 0.5, 1) +def test_ignore_links(runtime, test_nodenet): + nodes = prepare_nodenet(runtime, test_nodenet) + runtime.add_link(test_nodenet, nodes['a'], "por", nodes['b'], "gen", 0.5) - nodespace = micropsi.get_nodes(test_nodenet, []) + nodespace = runtime.get_nodes(test_nodenet, []) assert len(nodespace["nodes"]) == 4 assert 'links' not in nodespace assert len(nodespace["nodes"][nodes['a']]['links']['por']) == 1 - nodespace = micropsi.get_nodes(test_nodenet, [], include_links=False) + nodespace = runtime.get_nodes(test_nodenet, [], include_links=False) assert 'links' not in nodespace["nodes"][nodes['a']] -def test_remove_and_reload_native_module(fixed_nodenet, resourcepath): +def test_remove_and_reload_native_module(runtime, test_nodenet, resourcepath): import os - nodetype_file = os.path.join(resourcepath, 'Test', 'nodetypes.json') - nodefunc_file = os.path.join(resourcepath, 'Test', 'nodefunctions.py') + nodetype_file = os.path.join(resourcepath, 'nodetypes', 'Test', 'testnode.py') with open(nodetype_file, 'w') as fp: - fp.write('{"Testnode": {\ - "name": "Testnode",\ - "slottypes": ["gen", "foo", "bar"],\ - "nodefunction_name": "testnodefunc",\ - "gatetypes": ["gen", "foo", "bar"],\ - "symbol": "t",\ - "gate_defaults":{\ - "foo": {\ - "amplification": 13\ - }\ - }}}') - with open(nodefunc_file, 'w') as fp: - fp.write("def testnodefunc(netapi, node=None, **prams):\r\n return 17") - - micropsi.reload_native_modules() - res, uid = micropsi.add_node(fixed_nodenet, "Testnode", [10, 10, 10], name="Testnode") + fp.write("""nodetype_definition = { + "name": "Testnode", + "slottypes": ["gen", "foo", "bar"], + "nodefunction_name": "testnodefunc", + "gatetypes": ["gen", "foo", "bar"], + "symbol": "t" + } +def testnodefunc(netapi, node=None, **prams):\r\n return 17 +""") + + runtime.reload_code() + res, uid = runtime.add_node(test_nodenet, "Testnode", [10, 10, 10], name="Testnode") os.remove(nodetype_file) - os.remove(nodefunc_file) - micropsi.reload_native_modules() - assert 'Testnode' not in micropsi.get_available_native_module_types(fixed_nodenet) + runtime.reload_code() + assert 'Testnode' not in runtime.get_available_native_module_types(test_nodenet) @pytest.mark.engine("dict_engine") -def test_engine_specific_nodetype_dict(fixed_nodenet, resourcepath): +def test_engine_specific_nodetype_dict(runtime, test_nodenet, resourcepath): import os - nodetype_file = os.path.join(resourcepath, 'Test', 'nodetypes.json') - nodefunc_file = os.path.join(resourcepath, 'Test', 'nodefunctions.py') + nodetype_file = os.path.join(resourcepath, 'nodetypes', 'Test', 'testnode.py') with open(nodetype_file, 'w') as fp: - fp.write('{"Testnode": {\ - "engine": "theano_engine",\ - "name": "Testnode",\ - "slottypes": ["gen", "foo", "bar"],\ - "nodefunction_name": "testnodefunc",\ - "gatetypes": ["gen", "foo", "bar"],\ - "symbol": "t",\ - "gate_defaults":{\ - "foo": {\ - "amplification": 13\ - }\ - }}}') - with open(nodefunc_file, 'w') as fp: - fp.write("def testnodefunc(netapi, node=None, **prams):\r\n return 17") - - micropsi.reload_native_modules() - data = micropsi.get_nodenet_metadata(fixed_nodenet) + fp.write("""nodetype_definition = { + "engine": "theano_engine", + "name": "Testnode", + "slottypes": ["gen", "foo", "bar"], + "nodefunction_name": "testnodefunc", + "gatetypes": ["gen", "foo", "bar"], + "symbol": "t" + } +def testnodefunc(netapi, node=None, **prams):\r\n return 17 +""") + + runtime.reload_code() + res, data = runtime.get_nodenet_metadata(test_nodenet) assert "Testnode" not in data['native_modules'] @pytest.mark.engine("theano_engine") -def test_engine_specific_nodetype_theano(fixed_nodenet, resourcepath): +def test_engine_specific_nodetype_theano(runtime, test_nodenet, resourcepath): import os - nodetype_file = os.path.join(resourcepath, 'Test', 'nodetypes.json') - nodefunc_file = os.path.join(resourcepath, 'Test', 'nodefunctions.py') + nodetype_file = os.path.join(resourcepath, 'nodetypes', 'Test', 'testnode.py') with open(nodetype_file, 'w') as fp: - fp.write('{"Testnode": {\ - "engine": "dict_engine",\ - "name": "Testnode",\ - "slottypes": ["gen", "foo", "bar"],\ - "nodefunction_name": "testnodefunc",\ - "gatetypes": ["gen", "foo", "bar"],\ - "symbol": "t",\ - "gate_defaults":{\ - "foo": {\ - "amplification": 13\ - }\ - }}}') - with open(nodefunc_file, 'w') as fp: - fp.write("def testnodefunc(netapi, node=None, **prams):\r\n return 17") - - micropsi.reload_native_modules() - data = micropsi.get_nodenet_metadata(fixed_nodenet) + fp.write("""nodetype_definition = { + "engine": "dict_engine", + "name": "Testnode", + "slottypes": ["gen", "foo", "bar"], + "nodefunction_name": "testnodefunc", + "gatetypes": ["gen", "foo", "bar"], + "symbol": "t" + } +def testnodefunc(netapi, node=None, **prams):\r\n return 17 +""") + + runtime.reload_code() + res, data = runtime.get_nodenet_metadata(test_nodenet) assert "Testnode" not in data['native_modules'] -def test_node_parameters_none_resets_to_default(fixed_nodenet): - nodenet = micropsi.nodenets[fixed_nodenet] - res, uid = micropsi.add_node(fixed_nodenet, 'Pipe', [30, 30, 10], name='test') +def test_node_parameters_none_resets_to_default(runtime, test_nodenet): + nodenet = runtime.nodenets[test_nodenet] + res, uid = runtime.add_node(test_nodenet, 'Pipe', [30, 30, 10], name='test') node = nodenet.netapi.get_node(uid) - micropsi.set_node_parameters(fixed_nodenet, node.uid, {'expectation': '', 'wait': 0}) + runtime.set_node_parameters(test_nodenet, node.uid, {'expectation': '', 'wait': 0}) assert node.get_parameter('expectation') == 1 assert node.get_parameter('wait') == 0 -def test_get_recipes(fixed_nodenet, resourcepath): +def test_get_recipes(runtime, test_nodenet, resourcepath): import os - recipe_file = os.path.join(resourcepath, 'Test', 'recipes.py') + os.makedirs(os.path.join(resourcepath, 'recipes', 'Test')) + recipe_file = os.path.join(resourcepath, 'recipes', 'Test', 'recipes.py') with open(recipe_file, 'w') as fp: fp.write(""" def testfoo(netapi, count=23): return {'count':count} """) - micropsi.reload_native_modules() - recipes = micropsi.get_available_recipes() + runtime.reload_code() + recipes = runtime.get_available_recipes() assert 'testfoo' in recipes assert len(recipes['testfoo']['parameters']) == 1 assert recipes['testfoo']['parameters'][0]['name'] == 'count' assert recipes['testfoo']['parameters'][0]['default'] == 23 -def test_run_recipe(fixed_nodenet, resourcepath): +def test_run_recipe(runtime, test_nodenet, resourcepath): import os - recipe_file = os.path.join(resourcepath, 'Test', 'recipes.py') + os.makedirs(os.path.join(resourcepath, 'recipes', 'Test')) + recipe_file = os.path.join(resourcepath, 'recipes', 'Test', 'recipes.py') with open(recipe_file, 'w') as fp: fp.write(""" def testfoo(netapi, count=23): return {'count':count} """) - micropsi.reload_native_modules() - state, result = micropsi.run_recipe(fixed_nodenet, 'testfoo', {'count': 42}) + runtime.reload_code() + state, result = runtime.run_recipe(test_nodenet, 'testfoo', {'count': 42}) assert state assert result['count'] == 42 -def test_node_parameter_defaults(fixed_nodenet, resourcepath): +def test_node_parameter_defaults(runtime, test_nodenet, resourcepath): import os - nodetype_file = os.path.join(resourcepath, 'Test', 'nodetypes.json') - nodefunc_file = os.path.join(resourcepath, 'Test', 'nodefunctions.py') + nodetype_file = os.path.join(resourcepath, 'nodetypes', 'Test', 'testnode.py') with open(nodetype_file, 'w') as fp: - fp.write('{"Testnode": {\ - "name": "Testnode",\ - "slottypes": ["gen", "foo", "bar"],\ - "gatetypes": ["gen", "foo", "bar"],\ - "nodefunction_name": "testnodefunc",\ - "parameters": ["testparam"],\ - "parameter_defaults": {\ - "testparam": 13\ - }\ - }}') - with open(nodefunc_file, 'w') as fp: - fp.write("def testnodefunc(netapi, node=None, **prams):\r\n return 17") - - micropsi.reload_native_modules() - res, uid = micropsi.add_node(fixed_nodenet, "Testnode", [10, 10, 10], name="Test") - node = micropsi.nodenets[fixed_nodenet].get_node(uid) + fp.write("""nodetype_definition = { + "name": "Testnode", + "slottypes": ["gen", "foo", "bar"], + "gatetypes": ["gen", "foo", "bar"], + "nodefunction_name": "testnodefunc", + "parameters": ["testparam"], + "parameter_defaults": { + "testparam": 13 + } + } +def testnodefunc(netapi, node=None, **prams):\r\n return 17 +""") + + runtime.reload_code() + res, uid = runtime.add_node(test_nodenet, "Testnode", [10, 10, 10], name="Test") + node = runtime.nodenets[test_nodenet].get_node(uid) assert node.get_parameter("testparam") == 13 -def test_node_parameters_from_persistence(fixed_nodenet, resourcepath): +def test_node_parameters_from_persistence(runtime, test_nodenet, resourcepath): import os - nodetype_file = os.path.join(resourcepath, 'Test', 'nodetypes.json') - nodefunc_file = os.path.join(resourcepath, 'Test', 'nodefunctions.py') + nodetype_file = os.path.join(resourcepath, 'nodetypes', 'Test', 'testnode.py') with open(nodetype_file, 'w') as fp: - fp.write('{"Testnode": {\ - "name": "Testnode",\ - "slottypes": ["gen", "foo", "bar"],\ - "gatetypes": ["gen", "foo", "bar"],\ - "nodefunction_name": "testnodefunc",\ - "parameters": ["testparam"],\ - "parameter_defaults": {\ - "testparam": 13\ - }\ - }}') - with open(nodefunc_file, 'w') as fp: - fp.write("def testnodefunc(netapi, node=None, **prams):\r\n return 17") - micropsi.reload_native_modules() - res, uid = micropsi.add_node(fixed_nodenet, "Testnode", [10, 10, 10], name="Test") - node = micropsi.nodenets[fixed_nodenet].get_node(uid) + fp.write("""nodetype_definition = { + "name": "Testnode", + "slottypes": ["gen", "foo", "bar"], + "gatetypes": ["gen", "foo", "bar"], + "nodefunction_name": "testnodefunc", + "parameters": ["testparam"], + "parameter_defaults": { + "testparam": 13 + } + } +def testnodefunc(netapi, node=None, **prams):\r\n return 17 +""") + runtime.reload_code() + res, uid = runtime.add_node(test_nodenet, "Testnode", [10, 10, 10], name="Test") + node = runtime.nodenets[test_nodenet].get_node(uid) node.set_parameter("testparam", 42) - micropsi.save_nodenet(fixed_nodenet) - micropsi.revert_nodenet(fixed_nodenet) - node = micropsi.nodenets[fixed_nodenet].get_node(uid) + runtime.save_nodenet(test_nodenet) + runtime.revert_nodenet(test_nodenet) + node = runtime.nodenets[test_nodenet].get_node(uid) assert node.get_parameter("testparam") == 42 + + +def test_change_node_parameters(runtime, test_nodenet, resourcepath): + import os + nodetype_file = os.path.join(resourcepath, 'nodetypes', 'Test', 'testnode.py') + + def write_nodetypedef(params=[]): + with open(nodetype_file, 'w') as fp: + fp.write("""nodetype_definition = { + "name": "Testnode", + "slottypes": ["gen", "foo", "bar"], + "gatetypes": ["gen", "foo", "bar"], + "nodefunction_name": "testnodefunc", + "parameters": %s} +def testnodefunc(netapi, node=None, **prams):\r\n return 17 + """ % str(params)) + + write_nodetypedef(params=["foo", "bar"]) + runtime.reload_code() + res, uid = runtime.add_node(test_nodenet, "Testnode", [10, 10, 10], name="Test") + node = runtime.nodenets[test_nodenet].get_node(uid) + keys = node.clone_parameters().keys() + assert "foo" in keys + assert "bar" in keys + node.set_parameter("foo", 42) + + write_nodetypedef(params=["spam", "eggs"]) + runtime.reload_code() + node = runtime.nodenets[test_nodenet].get_node(uid) + keys = node.clone_parameters().keys() + assert "foo" not in keys + assert "bar" not in keys + assert "spam" in keys + assert "eggs" in keys + assert node.get_parameter('spam') is None + assert node.get_parameter('eggs') is None diff --git a/micropsi_core/tests/test_runtime_world_basics.py b/micropsi_core/tests/test_runtime_world_basics.py index dc0b478c..a2accab1 100644 --- a/micropsi_core/tests/test_runtime_world_basics.py +++ b/micropsi_core/tests/test_runtime_world_basics.py @@ -6,118 +6,149 @@ """ import os import mock -from micropsi_core import runtime -from micropsi_core import runtime as micropsi __author__ = 'joscha' __date__ = '29.10.12' -def test_new_world(resourcepath, test_world): - success, world_uid = micropsi.new_world("Waterworld", "World", owner="tester") +def test_new_world(runtime, resourcepath, default_world): + success, world_uid = runtime.new_world("Waterworld", "DefaultWorld", owner="tester") assert success - assert world_uid != test_world - world_properties = micropsi.get_world_properties(world_uid) + assert world_uid != default_world + world_properties = runtime.get_world_properties(world_uid) assert world_properties["name"] == "Waterworld" w_path = os.path.join(resourcepath, runtime.WORLD_DIRECTORY, world_uid + ".json") assert os.path.exists(w_path) # get_available_worlds - worlds = micropsi.get_available_worlds() - myworlds = micropsi.get_available_worlds("tester") - assert test_world in worlds + worlds = runtime.get_available_worlds() + myworlds = runtime.get_available_worlds("tester") + assert default_world in worlds assert world_uid in worlds assert world_uid in myworlds - assert test_world not in myworlds + assert default_world not in myworlds + + world = runtime.worlds[world_uid] + assert world.name == "Waterworld" + assert world.owner == "tester" + assert world.__class__.get_config_options() == [] + assert world.get_available_worldadapters()['Default'].__name__ == "Default" + assert world.config == {} # delete_world - micropsi.delete_world(world_uid) - assert world_uid not in micropsi.get_available_worlds() + runtime.delete_world(world_uid) + assert world_uid not in runtime.get_available_worlds() assert not os.path.exists(w_path) -def test_get_world_properties(test_world): - wp = micropsi.get_world_properties(test_world) - assert "Island" == wp["world_type"] - assert test_world == wp["uid"] - - -def test_get_worldadapters(test_world, test_nodenet): - wa = micropsi.get_worldadapters(test_world) - assert 'Braitenberg' in wa - assert 'description' in wa['Braitenberg'] - assert 'datasources' not in wa['Braitenberg'] - runtime.set_nodenet_properties(test_nodenet, worldadapter='Braitenberg', world_uid=test_world) - wa = micropsi.get_worldadapters(test_world, test_nodenet) - assert wa['Braitenberg']['datatargets'] == ['engine_l', 'engine_r'] - assert wa['Braitenberg']['datasources'] == ['brightness_l', 'brightness_r'] - - -def test_add_worldobject(test_world): - world = runtime.get_available_worlds()[test_world] - runtime.add_worldobject(test_world, "Default", (10, 10), uid='foobar', name='foobar', parameters={}) - assert "foobar" in world.data['objects'] - assert "foobar" in world.objects - runtime.save_world(test_world) - runtime.revert_world(test_world) - assert "foobar" in world.data['objects'] - assert "foobar" in world.objects - - -def test_add_worldobject_without_id(test_world): - world = runtime.get_available_worlds()[test_world] +def test_get_world_properties(runtime, default_world): + wp = runtime.get_world_properties(default_world) + assert wp["world_type"] == "DefaultWorld" + assert default_world == wp["uid"] + + +def test_start_stop_simulation(runtime, default_world, default_nodenet): + nodenet = runtime.get_nodenet(default_nodenet) + world = runtime.load_world(default_world) + runtime.set_nodenet_properties(default_nodenet, worldadapter="Default", world_uid=default_world) + runtime.start_nodenetrunner(default_nodenet) + assert world.is_active + assert nodenet.is_active + runtime.stop_nodenetrunner(default_nodenet) + assert not world.is_active + assert not nodenet.is_active + + +def test_get_worldadapters(runtime, default_world, default_nodenet): + wa = runtime.get_worldadapters(default_world) + assert 'Default' in wa + assert 'description' in wa['Default'] + assert 'datasources' not in wa['Default'] + runtime.set_nodenet_properties(default_nodenet, worldadapter='Default', world_uid=default_world) + wa = runtime.get_worldadapters(default_world, default_nodenet) + assert wa['Default']['datatargets'] == ['echo'] + assert set(wa['Default']['datasources']) == set(['static_on', 'random', 'static_off']) + + +def test_add_worldobject(runtime, default_world): + world = runtime.load_world(default_world) + result, foobar_uid = runtime.add_worldobject(default_world, "TestObject", (10, 10), name='foobar', parameters={}) + assert foobar_uid in world.data['objects'] + assert foobar_uid in world.objects + result, spam_uid = runtime.add_worldobject(default_world, "Spam", (10, 10)) + assert not result # spam is not supported + runtime.save_world(default_world) + runtime.revert_world(default_world) + assert foobar_uid in world.data['objects'] + assert foobar_uid in world.objects + + +def test_add_worldobject_without_id(runtime, default_world): + world = runtime.load_world(default_world) count = len(world.objects) - runtime.add_worldobject(test_world, "Default", (10, 10), name='bazbaz', parameters={}) + runtime.add_worldobject(default_world, "TestObject", (10, 10), name='bazbaz', parameters={}) assert count + 1 == len(world.objects) assert count + 1 == len(world.data['objects']) -def test_get_worldobjects(test_world): - runtime.add_worldobject(test_world, "Default", (10, 10), uid='foobar', name='foobar', parameters={}) - objects = runtime.get_world_objects(test_world) - assert 'foobar' in objects - runtime.save_world(test_world) - runtime.revert_world(test_world) - objects = runtime.get_world_objects(test_world) - assert 'foobar' in objects +def test_get_worldobjects(runtime, default_world): + runtime.load_world(default_world) + reuslt, foobar_uid = runtime.add_worldobject(default_world, "TestObject", (10, 10), name='foobar', parameters={}) + objects = runtime.get_world_objects(default_world) + assert foobar_uid in objects + objects = runtime.get_world_objects(default_world, type="Spam") + assert not objects + objects = runtime.get_world_objects(default_world, type="TestObject") + assert foobar_uid in objects -def test_register_agent(test_world, test_nodenet): - world = runtime.worlds[test_world] - nodenet = runtime.get_nodenet(test_nodenet) +def test_register_agent(runtime, default_world, default_nodenet): + world = runtime.load_world(default_world) + nodenet = runtime.get_nodenet(default_nodenet) assert nodenet.uid not in world.data['agents'] - nodenet.world = test_world - runtime.set_nodenet_properties(nodenet.uid, worldadapter='Braitenberg', world_uid=world.uid) + nodenet.world = default_world + runtime.set_nodenet_properties(nodenet.uid, worldadapter='Default', world_uid=world.uid) assert nodenet.uid in world.data['agents'] assert nodenet.uid in world.agents - runtime.save_world(test_world) - runtime.revert_world(test_world) + runtime.save_world(default_world) + runtime.revert_world(default_world) assert nodenet.uid in world.data['agents'] assert nodenet.uid in world.agents -def test_set_object_properties(test_world): - world = runtime.get_available_worlds()[test_world] - runtime.add_worldobject(test_world, "Default", (10, 10), uid='foobar', name='foobar', parameters={}) - runtime.set_worldobject_properties(test_world, "foobar", position=(5, 5)) - assert world.objects["foobar"].position == (5, 5) - assert world.data['objects']['foobar']['position'] == (5, 5) - assert runtime.get_world_view(test_world, -1)['objects']['foobar']['position'] == (5, 5) - - -def test_set_agent_properties(test_world, test_nodenet): - world = runtime.worlds[test_world] - runtime.set_nodenet_properties(test_nodenet, worldadapter='Braitenberg', world_uid=test_world) - runtime.set_worldagent_properties(test_world, test_nodenet, position=(5, 5)) - assert world.agents[test_nodenet].position == (5, 5) - assert world.data['agents'][test_nodenet]['position'] == (5, 5) - - -def test_agent_dying_unregisters_agent(test_world, test_nodenet): - world = runtime.worlds[test_world] - nodenet = runtime.get_nodenet(test_nodenet) - nodenet.world = test_world - runtime.set_nodenet_properties(nodenet.uid, worldadapter='Braitenberg', world_uid=world.uid) +def test_set_object_properties(runtime, default_world): + world = runtime.load_world(default_world) + result, foobar_uid = runtime.add_worldobject(default_world, "TestObject", (10, 10), name='foobar', parameters={"foo": "bar"}) + runtime.set_worldobject_properties(default_world, foobar_uid, name="foobaz", position=(5, 5), orientation=270, parameters={"foo": "baz"}) + assert world.objects[foobar_uid].position == (5, 5) + assert world.data['objects'][foobar_uid]['position'] == (5, 5) + assert world.objects[foobar_uid].parameters["foo"] == "baz" + assert world.data['objects'][foobar_uid]['parameters']["foo"] == "baz" + assert world.objects[foobar_uid].name == "foobaz" + assert world.data['objects'][foobar_uid]['name'] == "foobaz" + assert world.objects[foobar_uid].orientation == 270 + assert world.data['objects'][foobar_uid]['orientation'] == 270 + + assert runtime.get_world_view(default_world, -1)['objects'][foobar_uid]['position'] == (5, 5) + + +def test_set_agent_properties(runtime, default_world, default_nodenet): + world = runtime.load_world(default_world) + runtime.set_nodenet_properties(default_nodenet, worldadapter='Default', world_uid=default_world) + runtime.set_worldagent_properties(default_world, default_nodenet, position=(5, 5), orientation=180, parameters={'foo': 'bar'}) + assert world.agents[default_nodenet].position == (5, 5) + assert world.data['agents'][default_nodenet]['position'] == (5, 5) + assert world.agents[default_nodenet].orientation == 180 + assert world.data['agents'][default_nodenet]['orientation'] == 180 + assert world.agents[default_nodenet].parameters == {'foo': 'bar'} + assert world.data['agents'][default_nodenet]['parameters'] == {'foo': 'bar'} + + +def test_agent_dying_unregisters_agent(runtime, default_world, default_nodenet): + world = runtime.load_world(default_world) + nodenet = runtime.get_nodenet(default_nodenet) + nodenet.world = default_world + runtime.set_nodenet_properties(nodenet.uid, worldadapter='Default', world_uid=world.uid) assert nodenet.uid in world.agents mockdead = mock.Mock(return_value=False) world.agents[nodenet.uid].is_alive = mockdead @@ -125,7 +156,7 @@ def test_agent_dying_unregisters_agent(test_world, test_nodenet): assert nodenet.uid not in world.agents -def test_world_does_not_spawn_deleted_agents(test_world, resourcepath): +def test_world_does_not_spawn_deleted_agents(runtime, default_world, resourcepath): from micropsi_core.world.world import World filename = os.path.join(resourcepath, 'worlds', 'foobar.json') data = """{ @@ -134,47 +165,45 @@ def test_world_does_not_spawn_deleted_agents(test_world, resourcepath): "owner": "Pytest User", "uid": "foobar", "version":1, - "world_type": "Island", + "world_type": "DefaultWorld", "agents": { "dummy": { "name": "Dummy", "position": [17, 17], - "type": "Braitenberg", + "type": "Default", "uid": "dummy" } } }""" with open(filename, 'w') as fp: fp.write(data) - world = World(filename, world_type='Island', name='foobar', owner='Pytest User', uid='foobar') + world = World(filename, world_type='DefaultWorld', name='foobar', owner='Pytest User', uid='foobar') assert 'dummy' not in world.agents # assert 'dummy' not in world.data['agents'] -def test_reset_datatargets(test_world, test_nodenet): - world = runtime.worlds[test_world] - nodenet = runtime.get_nodenet(test_nodenet) - nodenet.world = test_world - runtime.set_nodenet_properties(nodenet.uid, worldadapter='Braitenberg', world_uid=world.uid) - world.agents[test_nodenet].datatargets['engine_r'] = 0.7 - world.agents[test_nodenet].datatargets['engine_l'] = 0.2 - world.agents[test_nodenet].reset_datatargets() - assert world.agents[test_nodenet].datatargets['engine_l'] == 0 - assert world.agents[test_nodenet].datatargets['engine_r'] == 0 +def test_reset_datatargets(runtime, default_world, default_nodenet): + world = runtime.load_world(default_world) + nodenet = runtime.get_nodenet(default_nodenet) + nodenet.world = default_world + runtime.set_nodenet_properties(nodenet.uid, worldadapter='Default', world_uid=world.uid) + world.agents[default_nodenet].datatargets['echo'] = 0.7 + world.agents[default_nodenet].reset_datatargets() + assert world.agents[default_nodenet].datatargets['echo'] == 0 -def test_worldadapter_update_calls_reset_datatargets(test_world, test_nodenet): - world = runtime.worlds[test_world] - nodenet = runtime.get_nodenet(test_nodenet) - nodenet.world = test_world - runtime.set_nodenet_properties(nodenet.uid, worldadapter='Braitenberg', world_uid=world.uid) - world.agents[test_nodenet].reset_datatargets = mock.MagicMock(name='reset') - runtime.step_nodenet(test_nodenet) - world.agents[test_nodenet].reset_datatargets.assert_called_once_with() +def test_worldadapter_update_calls_reset_datatargets(runtime, default_world, default_nodenet): + world = runtime.load_world(default_world) + nodenet = runtime.get_nodenet(default_nodenet) + nodenet.world = default_world + runtime.set_nodenet_properties(nodenet.uid, worldadapter='Default', world_uid=world.uid) + world.agents[default_nodenet].reset_datatargets = mock.MagicMock(name='reset') + runtime.step_nodenet(default_nodenet) + world.agents[default_nodenet].reset_datatargets.assert_called_once_with() -def test_worlds_are_configurable(): - res, uid = runtime.new_world('testworld', 'Island', config={'foo': 'bar', '42': '23'}) +def test_worlds_are_configurable(runtime): + res, uid = runtime.new_world('testworld', 'DefaultWorld', config={'foo': 'bar', '42': '23'}) assert uid in runtime.worlds assert runtime.worlds[uid].data['config']['foo'] == 'bar' runtime.revert_world(uid) @@ -182,38 +211,140 @@ def test_worlds_are_configurable(): assert runtime.worlds[uid].data['config']['42'] == '23' -""" -def test_get_world_view(micropsi, test_world): - assert 0 - -def test_start_worldrunner(micropsi, test_world): - assert 0 - -def test_get_worldrunner_timestep(micropsi): - assert 0 - -def test_get_is_world_running(micropsi): - assert 0 - -def test_set_worldrunner_timestep(micropsi): - assert 0 - -def test_stop_worldrunner(micropsi): - assert 0 - -def test_step_world(micropsi): - assert 0 - -def test_revert_world(micropsi): - assert 0 - -def test_save_world(micropsi): - assert 0 - -def test_export_world(micropsi): - assert 0 - -def test_import_world(micropsi): - assert 0 - -""" \ No newline at end of file +def test_set_world_properties(runtime, default_nodenet): + res, world_uid = runtime.new_world('testworld', 'DefaultWorld', config={'foo': 'bar', '42': '23'}) + nodenet = runtime.get_nodenet(default_nodenet) + nodenet.world = world_uid + runtime.set_nodenet_properties(nodenet.uid, worldadapter='Default', world_uid=world_uid) + assert default_nodenet in runtime.worlds[world_uid].agents + assert runtime.nodenets[default_nodenet].worldadapter == "Default" + old_wa = nodenet.worldadapter_instance + runtime.set_world_properties(world_uid, world_name='renamedworld', config={'foo': 'dings', '42': '5'}) + assert runtime.worlds[world_uid].name == 'renamedworld' + assert runtime.worlds[world_uid].data['config']['foo'] == 'dings' + assert runtime.worlds[world_uid].data['config']['42'] == '5' + assert default_nodenet in runtime.worlds[world_uid].agents + assert nodenet.worldadapter_instance is not None and nodenet.worldadapter_instance is not old_wa + + +def test_get_world_uid_by_name(runtime, default_world): + assert runtime.get_world_uid_by_name("World of Pain") == default_world + assert runtime.get_world_uid_by_name("Netherworld") is None + + +def test_world_discovery(runtime, default_nodenet, resourcepath): + import os + with open(os.path.join(resourcepath, 'worlds.json'), 'w') as fp: + fp.write(""" + {"worlds": ["custom_world.py"], + "worldadapters": ["someadapter.py"]}""") + with open(os.path.join(resourcepath, 'custom_world.py'), 'w') as fp: + fp.write(""" + +from micropsi_core.world.world import World +from dependency import things + +class MyWorld(World): + supported_worldadapters = ['MyCustomWA'] + + def __init__(self, filename, **kwargs): + super().__init__(filename, **kwargs) + for key in things: + setattr(self, key, things[key]) + +""") + os.mkdir(os.path.join(resourcepath, 'dependency')) + with open(os.path.join(resourcepath, 'dependency', '__init__.py'), 'w') as fp: + fp.write(""" +things = {'foo': 'baz'} +""") + + with open(os.path.join(resourcepath, 'someadapter.py'), 'w') as fp: + fp.write(""" + +from micropsi_core.world.worldadapter import WorldAdapter + +class MyCustomWA(WorldAdapter): + def __init__(self, world, uid=None, config={}, **data): + super().__init__(world, uid=uid, config=config, **data) + self.datasources = {'foo': 1} + self.datatargets = {'bar': 0} + self.datatarget_feedback = {'bar': 0} + + def update_data_sources_and_targets(self): + self.datasources['foo'] = self.datatargets['bar'] * 2 + +""") + + runtime.reload_code() + assert "MyWorld" in runtime.get_available_world_types() + + result, world_uid = runtime.new_world("test world", "MyWorld") + + assert runtime.worlds[world_uid].foo == 'baz' + assert runtime.set_nodenet_properties(default_nodenet, world_uid=world_uid, worldadapter="MyCustomWA") + assert runtime.nodenets[default_nodenet].worldadapter_instance.__class__.__name__ == 'MyCustomWA' + + +def test_realtime_world_stepping(runtime, default_nodenet, resourcepath): + import os + import time + with open(os.path.join(resourcepath, 'worlds.json'), 'w') as fp: + fp.write(""" + {"worlds": ["custom_world.py"], + "worldadapters": ["custom_world.py"]}""") + with open(os.path.join(resourcepath, 'custom_world.py'), 'w') as fp: + fp.write(""" + +from micropsi_core.world.world import World +from micropsi_core.world.worldadapter import WorldAdapter + +class MyWorld(World): + is_realtime = True + supported_worldadapters = ['MyCustomWA'] + + def __init__(self, filename, **kwargs): + super().__init__(filename, **kwargs) + self.custom_state = None + + def simulation_started(self): + super().simulation_started() + self.custom_state = 'runner started' + + def simulation_stopped(self): + super().simulation_stopped() + self.custom_state = 'runner stopped' + +class MyCustomWA(WorldAdapter): + def __init__(self, world, uid=None, config={}, **data): + super().__init__(world, uid=uid, config=config, **data) + + def update_data_sources_and_targets(self): + pass +""") + runtime.reload_code() + result, world_uid = runtime.new_world("test world", "MyWorld") + assert runtime.set_nodenet_properties(default_nodenet, world_uid=world_uid, worldadapter="MyCustomWA") + runtime.single_step_nodenet_only(default_nodenet) + assert not runtime.worlds[world_uid].is_active + assert runtime.nodenets[default_nodenet].current_step == 1 + assert runtime.worlds[world_uid].current_step == 0 + assert runtime.worlds[world_uid].custom_state is None + runtime.step_nodenet(default_nodenet) + time.sleep(.2) + assert not runtime.nodenets[default_nodenet].is_active + assert runtime.nodenets[default_nodenet].current_step == 2 + assert runtime.worlds[world_uid].is_active + assert runtime.worlds[world_uid].current_step > 3 + assert runtime.worlds[world_uid].custom_state == 'runner started' + runtime.stop_nodenetrunner(default_nodenet) + assert not runtime.worlds[world_uid].is_active + assert runtime.worlds[world_uid].custom_state == 'runner stopped' + runtime.start_nodenetrunner(default_nodenet) + time.sleep(.2) + runtime.step_nodenet(default_nodenet) + laststep = runtime.nodenets[default_nodenet].current_step + time.sleep(.2) + assert laststep == runtime.nodenets[default_nodenet].current_step + assert not runtime.nodenets[default_nodenet].is_active + assert runtime.worlds[world_uid].is_active diff --git a/micropsi_core/tests/test_vizapi.py b/micropsi_core/tests/test_vizapi.py deleted file mode 100644 index 95aea62e..00000000 --- a/micropsi_core/tests/test_vizapi.py +++ /dev/null @@ -1,92 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -Tests for vizapi -""" - -from micropsi_core import runtime as micropsi - - -def test_plot_activations(test_nodenet): - from random import random - nodenet = micropsi.get_nodenet(test_nodenet) - vizapi = nodenet.netapi.vizapi - activations = [random() for i in range(256)] - plot = vizapi.NodenetPlot(plotsize=(2, 2)) - plot.add_activation_plot(activations) - res = plot.to_base64(format="png") - assert len(res) > 1000 - assert res.endswith('\n') - - -def test_plot_linkweights(test_nodenet): - from random import random - nodenet = micropsi.get_nodenet(test_nodenet) - vizapi = nodenet.netapi.vizapi - linkweights = [] - for i in range(16): - linkweights.append([random() for i in range(16)]) - plot = vizapi.NodenetPlot(plotsize=(2, 2)) - plot.add_linkweights_plot(linkweights) - res = plot.to_base64(format="png") - assert len(res) > 1000 - assert res.endswith('\n') - - -def test_save_file(test_nodenet, resourcepath): - from random import random - import os - nodenet = micropsi.get_nodenet(test_nodenet) - vizapi = nodenet.netapi.vizapi - activations = [random() for i in range(256)] - plot = vizapi.NodenetPlot(plotsize=(2, 2)) - plot.add_activation_plot(activations) - filepath = os.path.join(resourcepath, "plot.png") - returnpath = plot.save_to_file(filepath) - assert os.path.abspath(returnpath) == os.path.abspath(filepath) - assert os.path.isfile(filepath) - - -def test_plot_from_nodefunc(test_nodenet, resourcepath): - import os - from random import random - from time import sleep - nodenet = micropsi.get_nodenet(test_nodenet) - vizapi = nodenet.netapi.vizapi - activations = [random() for i in range(256)] - plot = vizapi.NodenetPlot(plotsize=(2, 2)) - plot.add_activation_plot(activations) - filepath = os.path.join(resourcepath, "plot.png") - returnpath = plot.save_to_file(filepath) - assert os.path.abspath(returnpath) == os.path.abspath(filepath) - assert os.path.isfile(filepath) - os.remove(filepath) - os.mkdir(os.path.join(resourcepath, 'plotter')) - nodetype_file = os.path.join(resourcepath, "plotter", "nodetypes.json") - nodefunc_file = os.path.join(resourcepath, "plotter", "nodefunctions.py") - with open(nodetype_file, 'w') as fp: - fp.write("""{"Plotter": { - "name": "Plotter", - "slottypes": [], - "nodefunction_name": "plotfunc", - "gatetypes": [], - "parameters": ["plotpath"]}}""") - with open(nodefunc_file, 'w') as fp: - fp.write(""" -def plotfunc(netapi, node=None, **params): - import os - from random import random - filepath = os.path.join(params['plotpath'], 'plot.png') - activations = [random() for i in range(256)] - plot = netapi.vizapi.NodenetPlot(plotsize=(2, 2)) - plot.add_activation_plot(activations) - plot.save_to_file(filepath) -""") - micropsi.reload_native_modules() - node = nodenet.netapi.create_node("Plotter", None, name="Plotter") - node.set_parameter("plotpath", resourcepath) - micropsi.start_nodenetrunner(test_nodenet) - sleep(2) - micropsi.stop_nodenetrunner(test_nodenet) - assert micropsi.MicropsiRunner.last_nodenet_exception == {} - assert os.path.isfile(os.path.join(resourcepath, "plot.png")) diff --git a/micropsi_core/tests/test_worldadapters.py b/micropsi_core/tests/test_worldadapters.py new file mode 100644 index 00000000..b515923b --- /dev/null +++ b/micropsi_core/tests/test_worldadapters.py @@ -0,0 +1,211 @@ +#!/usr/local/bin/python +# -*- coding: utf-8 -*- + +""" + +""" +import os +import mock +import pytest +from micropsi_core import runtime + +from micropsi_core.world import worldadapter as wa + +numpy_available = False +try: + import numpy as np + numpy_available = True +except ImportError: + pass + + +def test_default_worldadater(default_world): + + adapter = wa.Default(runtime.worlds[default_world]) + + sources = adapter.get_available_datasources() + assert set(["static_on", "static_off", "random"]) == set(sources) + targets = adapter.get_available_datatargets() + assert set(["echo"]) == set(targets) + + assert adapter.get_datasource_value('static_on') == 1 + assert len(adapter.get_datasource_values()) == 3 + + adapter.add_to_datatarget("echo", 0.3) + adapter.add_to_datatarget("echo", 0.25) + assert adapter.datatargets["echo"] == 0.55 + + adapter.set_datatarget_values([0.7]) + assert adapter.datatargets["echo"] == 0.7 + + adapter.update() + + assert adapter.get_datatarget_feedback_value("echo") == 0.7 + assert adapter.get_datatarget_feedback_values() == [0.7] + + adapter.set_datatarget_feedback_value("echo", 0.1) + assert adapter.get_datatarget_feedback_values() == [0.1] + + +@pytest.mark.skipif(not numpy_available, reason="requires numpy") +def test_arrayworldadapter(default_world): + + class TestArrayWA(wa.ArrayWorldAdapter): + def update_data_sources_and_targets(self): + self.datasource_values = np.copy(self.datatarget_values) * 2 + self.datatarget_feedback_values = np.copy(self.datatarget_values) + + adapter = TestArrayWA(runtime.worlds[default_world]) + + # datasources -------- + + # add + adapter.add_datasource("foo") + adapter.add_datasource("bar", initial_value=0.7) + assert adapter.datasource_names == ["foo", "bar"] + + # get + assert adapter.get_available_datasources() == adapter.datasource_names + assert np.allclose(adapter.get_datasource_value("bar"), 0.7) + assert np.allclose(adapter.get_datasource_values(), np.asarray([0., .7])) + + # index + assert adapter.get_datasource_index("bar") == 1 + + # set + adapter.set_datasource_value("foo", 123.) + assert np.allclose(adapter.get_datasource_value("foo"), 123.) + assert np.allclose(adapter.get_datasource_values(), np.asarray([123., 0.7])) + adapter.set_datasource_values(np.asarray([.1, .2])) + assert np.allclose(adapter.get_datasource_values(), np.asarray([.1, .2])) + with pytest.raises(AssertionError): + assert adapter.set_datasource_values(np.asarray([.1, .2, .3, .4, .5])) + + # datatargets -------- + + # add + adapter.add_datatarget("t_foo") + adapter.add_datatarget("t_bar", initial_value=.6) + assert adapter.datatarget_names == ["t_foo", "t_bar"] + + # get + assert adapter.get_available_datatargets() == adapter.datatarget_names + assert np.allclose(adapter.get_datatarget_value("t_bar"), 0.6) + assert np.allclose(adapter.get_datatarget_values(), np.asarray([0., 0.6])) + + # index + assert adapter.get_datatarget_index("t_bar") == 1 + + # set + adapter.set_datatarget_value("t_foo", .1) + adapter.add_to_datatarget("t_foo", 2.1) + assert np.allclose(adapter.get_datatarget_value("t_foo"), 2.2) + assert np.allclose(adapter.get_datatarget_values(), np.asarray([2.2, 0.6])) + adapter.set_datatarget_values(np.asarray([.1, .2])) + assert np.allclose(adapter.get_datatarget_values(), np.asarray([.1, .2])) + with pytest.raises(AssertionError): + assert adapter.set_datatarget_values(np.asarray([.1, .2, .3, .4, .5])) + + # datatarget_feedback -------- + + # get + assert adapter.get_datatarget_feedback_value("t_foo") == 0. + assert np.allclose(adapter.get_datatarget_feedback_values(), np.asarray([0, 0.6])) + + # set + adapter.set_datatarget_feedback_value("t_bar", 123.) + assert adapter.get_datatarget_feedback_value("t_bar") == 123. + assert np.allclose(adapter.get_datatarget_feedback_values(), np.asarray([0., 123.])) + adapter.set_datatarget_feedback_values(np.asarray([.1, .2])) + assert np.allclose(adapter.get_datatarget_feedback_values(), np.asarray([.1, .2])) + with pytest.raises(AssertionError): + assert adapter.set_datatarget_feedback_values(np.asarray([.1, .2, .3, .4, .5])) + + +@pytest.mark.skipif(not numpy_available, reason="requires numpy") +def test_flow_datasources(default_world): + + class TestArrayWA(wa.ArrayWorldAdapter): + def update_data_sources_and_targets(self): + self.datasource_values = np.random.rand(self.datasource_values.shape).astype(self.floatX) + self.datatarget_feedback_values = np.copy(self.datatarget_values).astype(self.floatX) + + adapter = TestArrayWA(runtime.worlds[default_world]) + + vision_shape = (2, 5) + vision_init = np.random.rand(*vision_shape).astype(adapter.floatX) + adapter.add_datasource("s_foo") + adapter.add_flow_datasource("s_vision", shape=vision_shape, initial_values=vision_init) + adapter.add_datasource("s_bar") + + assert adapter.get_available_datasources() == ['s_foo', 's_bar'] + assert adapter.get_available_flow_datasources() == ['s_vision'] + + motor_shape = (3, 2) + adapter.add_datatarget("t_execute") + adapter.add_flow_datatarget("t_motor", shape=motor_shape) + + assert adapter.get_available_datatargets() == ['t_execute'] + assert adapter.get_available_flow_datatargets() == ['t_motor'] + + vision = np.random.rand(*vision_shape).astype(adapter.floatX) + motor = np.random.rand(*motor_shape).astype(adapter.floatX) + + adapter.set_flow_datasource("s_vision", vision) + adapter.add_to_flow_datatarget("t_motor", motor) + adapter.add_to_flow_datatarget("t_motor", motor) + + assert np.allclose(adapter.get_flow_datasource("s_vision"), vision) + assert np.allclose(adapter.get_flow_datatarget("t_motor"), 2 * motor) + assert np.allclose(adapter.get_flow_datatarget_feedback("t_motor"), np.zeros((3, 2))) + + +def test_worldadapter_mixin(default_world): + + class TestMixin(wa.WorldAdapterMixin): + + @staticmethod + def get_config_options(): + return [{"name": "some_setting", "default": 23}] + + def __init__(self, world, **data): + super().__init__(world, **data) + self.add_datasource("some_setting") + + def update_datasources_and_targets(self): + super().update_datasources_and_targets() + self.set_datasource_value("some_setting", self.some_setting) + + class TestArrayWA(TestMixin, wa.ArrayWorldAdapter): + + @staticmethod + def get_config_options(): + params = TestMixin.get_config_options() + params.extend([{"name": "other_setting", "default": 42}]) + return params + + def __init__(self, world, **data): + super().__init__(world, **data) + self.add_datasource("blubb") + + def update_data_sources_and_targets(self): + super().update_datasources_and_targets() + self.set_datasource_value("blubb", 21) + + world = runtime.worlds[default_world] + adapter = TestArrayWA(world) + adapter.update_data_sources_and_targets() + assert adapter.get_datasource_value("blubb") == 21 + assert adapter.get_datasource_value("some_setting") == 23 + assert adapter.some_setting == 23 + assert adapter.other_setting == 42 + + +def test_worldadapter_update_config(default_world, default_nodenet): + runtime.set_nodenet_properties(default_nodenet, worldadapter="Default", world_uid=default_world) + runtime.save_nodenet(default_nodenet) + assert runtime.nodenets[default_nodenet].worldadapter_instance.foo == 'bar' + runtime.set_nodenet_properties(default_nodenet, worldadapter="Default", world_uid=default_world, worldadapter_config={'foo': 'changed'}) + assert runtime.nodenets[default_nodenet].worldadapter_instance.foo == 'changed' + assert runtime.nodenets[default_nodenet].worldadapter_instance.config['foo'] == 'changed' + assert runtime.worlds[default_world].agents[default_nodenet].foo == 'changed' diff --git a/micropsi_core/tools.py b/micropsi_core/tools.py index 981febf8..c7aea629 100644 --- a/micropsi_core/tools.py +++ b/micropsi_core/tools.py @@ -9,6 +9,51 @@ __date__ = '29.06.12' import uuid +import errno +import os +import sys +try: + import ipdb as pdb +except ImportError: + import pdb + + +def post_mortem(): + """ if desired, point a debugger to the origin of the last exception """ + from micropsi_core.runtime import runtime_config + if runtime_config['micropsi2'].get('on_exception') == 'debug': + exception_type, exception, tb = sys.exc_info() + print('\033[01m\033[31m%s: \033[32m%s\033[0m' % (exception_type.__name__, exception)) + pdb.post_mortem(tb) + + +def pid_exists(pid): + """Check whether pid exists in the current process table. + UNIX only. obtained from http://stackoverflow.com/a/6940314/5952582 + """ + if pid < 0: + return False + if pid == 0: + # According to "man 2 kill" PID 0 refers to every process + # in the process group of the calling process. + # On certain systems 0 is a valid PID but we have no way + # to know that in a portable fashion. + raise ValueError('invalid PID 0') + try: + os.kill(pid, 0) + except OSError as err: + if err.errno == errno.ESRCH: + # ESRCH == No such process + return False + elif err.errno == errno.EPERM: + # EPERM clearly means there's a process to deny access to + return True + else: + # According to "man 2 kill" possible error values are + # (EINVAL, EPERM, ESRCH) + raise + else: + return True def generate_uid(): diff --git a/micropsi_core/world/island/__init__.py b/micropsi_core/world/island/__init__.py deleted file mode 100644 index 0c729770..00000000 --- a/micropsi_core/world/island/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -#!/usr/local/bin/python -# -*- coding: utf-8 -*- - -""" - -""" - -__author__ = 'joscha' -__date__ = '03.08.12' - diff --git a/micropsi_core/world/island/island.py b/micropsi_core/world/island/island.py deleted file mode 100644 index 21878606..00000000 --- a/micropsi_core/world/island/island.py +++ /dev/null @@ -1,538 +0,0 @@ -import math -import os -import logging -from micropsi_core.world.world import World -from micropsi_core.world.worldadapter import WorldAdapter -from micropsi_core.world.worldobject import WorldObject -from micropsi_core.world.island import png - - -class Island(World): - """ A simple Doerner Island-World""" - - supported_worldadapters = ['Braitenberg', 'Survivor', 'StructuredObjects'] - - groundmap = { - 'image': "psi_1.png", - 'start_position': (700, 400), - 'scaling': (8, 8) - } - - assets = { - 'background': "island/psi_1.png", - 'template': 'island/island.tpl', - 'paperjs': "island/island.js", - 'x': 2048, - 'y': 2048, - 'icons': { - 'Lightsource': 'island/lamp.png', - 'Braitenberg': 'island/braitenberg.png', - 'Survivor': 'island/Micropsi.png', - 'PalmTree': 'island/palm-tree.png', - 'Maple': 'island/maple.png', - 'Braintree': 'island/braintree.png', - 'Wirselkraut': 'island/wirselkraut.png', - 'Thornbush': 'island/unknownbox.png', - 'Juniper': 'island/juniper-berries.png', - 'Champignon': 'island/boletus-edulis.png', - 'FlyAgaric': 'island/fly-agaris.png', - 'Stone': 'island/rock.png', - 'Boulder': 'island/boulder.png', - 'Menhir': 'island/menhir.png', - 'Waterhole': 'island/well.png' - } - } - - def __init__(self, filename, world_type="Island", name="", owner="", engine=None, uid=None, version=1, config={}): - World.__init__(self, filename, world_type=world_type, name=name, owner=owner, uid=uid, version=version) - self.load_groundmap() - # self.current_step = 0 - self.data['assets'] = self.assets - - def load_groundmap(self): - """ - Imports a groundmap for an island world from a png file. We expect a bitdepth of 8 (i.e. each pixel defines - a point with one of 256 possible values). - """ - filename = os.path.join(os.path.dirname(__file__), 'resources', 'groundmaps', self.groundmap["image"]) - with open(filename, 'rb') as file: - png_reader = png.Reader(file) - x, y, image_array, image_params = png_reader.read() - self.ground_data = list(image_array) - self.scale_x = self.groundmap["scaling"][0] - self.scale_y = self.groundmap["scaling"][1] - self.x_max = x - 1 - self.y_max = y - 1 - - def get_ground_at(self, x, y): - """ - returns the ground type (an integer) at the given position - """ - _x = int(min(self.x_max, max(0, round(x / self.scale_x)))) - _y = int(min(self.y_max, max(0, round(y / self.scale_y)))) - return self.ground_data[_y][_x] - - def get_brightness_at(self, position): - """calculate the brightness of the world at the given position; used by sensors of agents""" - brightness = 0 - for key in self.objects: - if hasattr(self.objects[key], "get_intensity"): - # adapted from micropsi1 - pos = self.objects[key].position - diff = (pos[0] - position[0], pos[1] - position[1]) - dist = _2d_vector_norm(diff) + 1 - lightness = self.objects[key].get_intensity() - brightness += (lightness /dist /dist) - return brightness - - def get_movement_result(self, start_position, effort_vector, diameter=0): - """determine how much an agent moves in the direction of the effort vector, starting in the start position. - Note that agents may be hindered by impassable terrain and other objects""" - - efficiency = ground_types[self.get_ground_at(*start_position)]['move_efficiency'] - if not efficiency: - return start_position - movement_vector = (effort_vector[0] * efficiency, effort_vector[1] * efficiency) - - # make sure we don't bump into stuff - target_position = None - while target_position is None and _2d_distance_squared((0, 0), movement_vector) > 0.01: - target_position = _2d_translate(start_position, movement_vector) - - for i in self.objects.values(): - if _2d_distance_squared(target_position, i.position) < (diameter + i.diameter) / 2: - movement_vector = (movement_vector[0] * 0.5, movement_vector[1] * 0.5) # should be collision point - target_position = None - break - - if target_position is not None and ground_types[self.get_ground_at(target_position[0], target_position[1])]['agent_allowed']: - return target_position - else: - return start_position - - -class Lightsource(WorldObject): - """A pretty inert and boring light source, with a square falloff""" - - @property - def diameter(self): - return self.data.get('diameter', 1.) - - @diameter.setter - def diameter(self, diameter): - self.data['diameter'] = diameter - - @property - def intensity(self): - return self.data.get('intensity', 10000.) - - @intensity.setter - def intensity(self, intensity): - self.data['intensity'] = intensity - - def __init__(self, world, uid=None, **data): - WorldObject.__init__(self, world, category="objects", uid=uid, **data) - - def get_intensity(self, falloff_func=1.): - """returns the strength of the light, optionally depending on a given fall-off function""" - return self.intensity * self.diameter * self.diameter / falloff_func - - def action_eat(self): - return True, 0, 0, -0.7 - - def action_drink(self): - return False, 0, 0, 0 - - -class PalmTree(WorldObject): - def __init__(self, world, uid=None, **data): - WorldObject.__init__(self, world, category="objects", uid=uid, **data) - self.structured_object_type = "PalmTree" - - def action_eat(self): - return False, 0, 0, 0 - - def action_drink(self): - return False, 0, 0, 0 - - -class Maple(WorldObject): - def __init__(self, world, uid=None, **data): - WorldObject.__init__(self, world, category="objects", uid=uid, **data) - self.structured_object_type = "Maple" - - def action_eat(self): - return False, 0, 0, 0 - - def action_drink(self): - return False, 0, 0, 0 - - -class Braintree(WorldObject): - def __init__(self, world, uid=None, **data): - WorldObject.__init__(self, world, category="objects", uid=uid, **data) - self.structured_object_type = "Braintree" - - def action_eat(self): - return False, 0, 0, 0 - - def action_drink(self): - return False, 0, 0, 0 - - -class Wirselkraut(WorldObject): - def __init__(self, world, uid=None, **data): - WorldObject.__init__(self, world, category="objects", uid=uid, **data) - self.structured_object_type = "Wirselkraut" - - def action_eat(self): - return True, 0, 0, 0.5 - - def action_drink(self): - return False, 0, 0, 0 - - -class Thornbush(WorldObject): - def __init__(self, world, uid=None, **data): - WorldObject.__init__(self, world, category="objects", uid=uid, **data) - self.structured_object_type = "Thornbush" - - def action_eat(self): - logging.getLogger("world").debug("... and the whirlwind is in the thorn tree...") - return True, 0, 0, -0.1 - - def action_drink(self): - return False, 0, 0, 0 - - -class Juniper(WorldObject): - def __init__(self, world, uid=None, **data): - WorldObject.__init__(self, world, category="objects", uid=uid, **data) - self.structured_object_type = "Juniper" - - def action_eat(self): - return True, 0.1, 0.1, 0 - - def action_drink(self): - return False, 0, 0, 0 - - -class Champignon(WorldObject): - def __init__(self, world, uid=None, **data): - WorldObject.__init__(self, world, category="objects", uid=uid, **data) - self.structured_object_type = "Champignon" - - def action_eat(self): - return True, 0.3, 0, 0 - - def action_drink(self): - return True, 0, 0, 0 - - -class FlyAgaric(WorldObject): - def __init__(self, world, uid=None, **data): - WorldObject.__init__(self, world, category="objects", uid=uid, **data) - self.structured_object_type = "FlyAgaric" - - def action_eat(self): - return True, 0.1, 0, -0.9 - - def action_drink(self): - return False, 0, 0, 0 - - -class Stone(WorldObject): - def __init__(self, world, uid=None, **data): - WorldObject.__init__(self, world, category="objects", uid=uid, **data) - self.structured_object_type = "Stone" - - def action_eat(self): - return False, 0, 0, 0 - - def action_drink(self): - return False, 0, 0, 0 - - -class Boulder(WorldObject): - def __init__(self, world, uid=None, **data): - WorldObject.__init__(self, world, category="objects", uid=uid, **data) - self.structured_object_type = "Boulder" - - def action_eat(self): - return False, 0, 0, 0 - - def action_drink(self): - return False, 0, 0, 0 - - -class Menhir(WorldObject): - def __init__(self, world, uid=None, **data): - WorldObject.__init__(self, world, category="objects", uid=uid, **data) - self.structured_object_type = "Menhir" - - def action_eat(self): - return False, 0, 0, 0 - - def action_drink(self): - return False, 0, 0, 0 - - -class Waterhole(WorldObject): - def __init__(self, world, uid=None, **data): - WorldObject.__init__(self, world, category="objects", uid=uid, **data) - self.structured_object_type = "Waterhole" - - def action_eat(self): - return False, 0, 0, 0 - - def action_drink(self): - return True, 0, 1, 0 - - -class Survivor(WorldAdapter): - - - def __init__(self, world, uid=None, **data): - super(Survivor, self).__init__(world, uid, **data) - - self.datasources = dict((s, 0) for s in ['body-energy', 'body-water', 'body-integrity']) - self.datatargets = dict((t, 0) for t in ['action_eat', 'action_drink', 'loco_north', 'loco_south', 'loco_east', 'loco_west']) - - self.currentobject = None - - self.energy = 1.0 - self.water = 1.0 - self.integrity = 1.0 - self.is_dead = False - - self.action_cooloff = 5 - - self.datasources['body-energy'] = self.energy - self.datasources['body-water'] = self.water - self.datasources['body-integrity'] = self.integrity - - def initialize_worldobject(self, data): - if "position" not in data: - self.position = self.world.groundmap['start_position'] - - def update_data_sources_and_targets(self): - """called on every world calculation step to advance the life of the agent""" - - if self.is_dead: - return - - effortvector = ((50*self.datatargets['loco_east'])+(50 * -self.datatargets['loco_west']), - (50*self.datatargets['loco_north'])-(50* -self.datatargets['loco_south'])) - desired_position = (self.position[0] + effortvector[0], self.position[1] + effortvector[1]) - self.datatargets['loco_east'] = 0 - self.datatargets['loco_west'] = 0 - self.datatargets['loco_north'] = 0 - self.datatargets['loco_south'] = 0 - - if ground_types[self.world.get_ground_at(desired_position[0], desired_position[1])]['agent_allowed']: - self.position = desired_position - - #find nearest object to load into the scene - lowest_distance_to_worldobject = float("inf") - nearest_worldobject = None - for key, worldobject in self.world.objects.items(): - # TODO: use a proper 2D geometry library - distance = _2d_distance_squared(self.position, worldobject.position) - if distance < lowest_distance_to_worldobject: - lowest_distance_to_worldobject = distance - nearest_worldobject = worldobject - - if self.currentobject is not nearest_worldobject and hasattr(nearest_worldobject, "structured_object_type"): - self.currentobject = nearest_worldobject - logging.getLogger("agent.%s" % self.uid).debug("Survivor WA selected new scene: %s", - self.currentobject.structured_object_type) - self.manage_body_parameters(nearest_worldobject) - - def manage_body_parameters(self, nearest_worldobject): - """called by update() to update energy, water and integrity""" - - for datatarget in self.datatargets: - if datatarget.startswith("action_"): - self.datatarget_feedback[datatarget] = 0 - if self.datatargets[datatarget] >= 1 and self.action_cooloff <= 0: - self.datatargets[datatarget] = 0 - if hasattr(nearest_worldobject, datatarget): - cando, delta_energy, delta_water, delta_integrity = nearest_worldobject.action_eat() - else: - cando, delta_energy, delta_water, delta_integrity = False, 0, 0, 0 - if cando: - self.action_cooloff = 6 - self.energy += delta_energy - self.water += delta_water - self.integrity += delta_integrity - self.datatarget_feedback[datatarget] = 1 - logging.getLogger("agent.%s" % self.uid).debug("Agent "+self.name+" "+ datatarget + - "("+nearest_worldobject.data["type"]+") result: "+ - " energy "+str(delta_energy)+ - " water "+str(delta_water)+ - " integrity "+str(delta_integrity)) - else: - logging.getLogger("agent.%s" % self.uid).debug("Agent "+self.name+" "+ datatarget + - "("+nearest_worldobject.data["type"]+") result: "+ - "cannot do.") - - self.action_cooloff -= 1 - self.energy -= 0.005 - self.water -= 0.005 - - if self.energy > 1: self.energy = 1 - if self.water > 1: self.water = 1 - if self.integrity > 1: self.integrity = 1 - - if self.energy <= 0 or self.water <= 0 or self.integrity <= 0: - self.is_dead = True - logging.getLogger("agent.%s" % self.uid).debug("Agent "+self.name+" has died:"+ - " energy "+str(self.energy)+ - " water "+str(self.water)+ - " integrity "+str(self.integrity)) - - self.datasources["body-energy"] = self.energy - self.datasources["body-water"] = self.water - self.datasources["body-integrity"] = self.integrity - - def is_alive(self): - """called by the world to check whether the agent has died and should be removed""" - return not self.is_dead - - -class Braitenberg(WorldAdapter): - """A simple Braitenberg vehicle chassis, with two light sensitive sensors and two engines""" - - # positions of sensors, relative to origin of agent center - brightness_l_offset = (-25, -50) - brightness_r_offset = (+25, -50) - - # positions of engines, relative to origin of agent center - engine_l_offset = (-25, 0) - engine_r_offset = (+25, 0) - - # agent diameter - diameter = 50 # note: this is also used as the distance between the wheels - radius = 25 - - # maximum speed - speed_limit = 1. - - def __init__(self, world, uid=None, **data): - super(Braitenberg, self).__init__(world, uid, **data) - - self.datasources = {'brightness_l': 0, 'brightness_r': 0} - self.datatargets = {'engine_l': 0, 'engine_r': 0} - self.datatarget_feedback = {'engine_l': 0, 'engine_r': 0} - - def initialize_worldobject(self, data): - if "position" not in data: - self.position = self.world.groundmap['start_position'] - - def update_data_sources_and_targets(self): - """called on every world calculation step to advance the life of the agent""" - - # drive engines - l_wheel_speed = self.datatargets["engine_l"] - r_wheel_speed = self.datatargets["engine_r"] - - # constrain speed - if l_wheel_speed + r_wheel_speed > 2 * self.speed_limit: # too fast - f = 2 * self.speed_limit / (l_wheel_speed + r_wheel_speed) - r_wheel_speed *= f - l_wheel_speed *= f - - # (left - right) because inverted rotation circle ( doesn't change x because cosine, does change y because sine :) - rotation = math.degrees((self.radius * l_wheel_speed - self.radius * r_wheel_speed) / self.diameter) - self.orientation += rotation - avg_velocity = (self.radius * r_wheel_speed + self.radius * l_wheel_speed) / 2 - translation = _2d_rotate((0, avg_velocity), self.orientation + rotation) - - # you may decide how far you want to go, but it is up the world to decide how far you make it - self.position = self.world.get_movement_result(self.position, translation, self.diameter) - - # sense light sources - brightness_l_position = _2d_translate(_2d_rotate(self.brightness_l_offset, self.orientation), self.position) - brightness_r_position = _2d_translate(_2d_rotate(self.brightness_r_offset, self.orientation), self.position) - - brightness_l = self.world.get_brightness_at(brightness_l_position) - brightness_r = self.world.get_brightness_at(brightness_r_position) - - self.datasources['brightness_l'] = brightness_l - self.datasources['brightness_r'] = brightness_r - - -def _2d_rotate(position, angle_degrees): - """rotate a 2d vector around an angle (in degrees)""" - radians = math.radians(angle_degrees) - # take the negative of the angle because the orientation circle works clockwise in this world - cos = math.cos(-radians) - sin = math.sin(-radians) - x, y = position - return x * cos - y * sin, - (x * sin + y * cos) - - -def _2d_distance_squared(position1, position2): - """calculate the square of the distance bwtween two 2D coordinate tuples""" - return (position1[0] - position2[0]) ** 2 + (position1[1] - position2[1]) ** 2 - - -def _2d_translate(position1, position2): - """add two 2d vectors""" - return (position1[0] + position2[0], position1[1] + position2[1]) - - -def _2d_vector_norm(vector): - """Calculates the length /norm of a given vector.""" - return math.sqrt(sum(i**2 for i in vector)) - - -# the indices of ground types correspond to the color numbers in the groundmap png -ground_types = ( - { - 'type': 'grass', - 'move_efficiency': 1.0, - 'agent_allowed': True, - }, - { - 'type': 'sand', - 'move_efficiency': 1.0, - 'agent_allowed': True, - }, - { - 'type': 'swamp', - 'move_efficiency': 0.5, - 'agent_allowed': True, - }, - { - 'type': 'darkgrass', - 'move_efficiency': 1.0, - 'agent_allowed': True, - }, - { - 'type': 'shallowwater', - 'move_efficiency': 0.2, - 'agent_allowed': True, - }, - { - 'type': 'rock', - 'move_efficiency': 1.0, - 'agent_allowed': True, - }, - { - 'type': 'clay', - 'move_efficiency': 0.7, - 'agent_allowed': True, - }, - { - 'type': 'water', - 'move_efficiency': 0.0, - 'agent_allowed': False, - }, - { - 'type': 'cliff', - 'move_efficiency': 1.0, - 'agent_allowed': False, - } - -) diff --git a/micropsi_core/world/island/png.py b/micropsi_core/world/island/png.py deleted file mode 100644 index 4e596c39..00000000 --- a/micropsi_core/world/island/png.py +++ /dev/null @@ -1,3854 +0,0 @@ -#!/usr/bin/env python - -# png.py - PNG encoder/decoder in pure Python -# -# Copyright (C) 2006 Johann C. Rocholl -# Portions Copyright (C) 2009 David Jones -# And probably portions Copyright (C) 2006 Nicko van Someren -# -# Original concept by Johann C. Rocholl. -# -# LICENCE (MIT) -# -# Permission is hereby granted, free of charge, to any person -# obtaining a copy of this software and associated documentation files -# (the "Software"), to deal in the Software without restriction, -# including without limitation the rights to use, copy, modify, merge, -# publish, distribute, sublicense, and/or sell copies of the Software, -# and to permit persons to whom the Software is furnished to do so, -# subject to the following conditions: -# -# The above copyright notice and this permission notice shall be -# included in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# -# Changelog (recent first): -# 2009-03-11 David: interlaced bit depth < 8 (writing). -# 2009-03-10 David: interlaced bit depth < 8 (reading). -# 2009-03-04 David: Flat and Boxed pixel formats. -# 2009-02-26 David: Palette support (writing). -# 2009-02-23 David: Bit-depths < 8; better PNM support. -# 2006-06-17 Nicko: Reworked into a class, faster interlacing. -# 2006-06-17 Johann: Very simple prototype PNG decoder. -# 2006-06-17 Nicko: Test suite with various image generators. -# 2006-06-17 Nicko: Alpha-channel, grey-scale, 16-bit/plane support. -# 2006-06-15 Johann: Scanline iterator interface for large input files. -# 2006-06-09 Johann: Very simple prototype PNG encoder. - -# Incorporated into Bangai-O Development Tools by drj on 2009-02-11 from -# http://trac.browsershots.org/browser/trunk/pypng/lib/png.py?rev=2885 - -# Incorporated into pypng by drj on 2009-03-12 from -# //depot/prj/bangaio/master/code/png.py#67 - - -""" -Pure Python PNG Reader/Writer - -This Python module implements support for PNG images (see PNG -specification at http://www.w3.org/TR/2003/REC-PNG-20031110/ ). It reads -and writes PNG files with all allowable bit depths (1/2/4/8/16/24/32/48/64 -bits per pixel) and colour combinations: greyscale (1/2/4/8/16 bit); RGB, -RGBA, LA (greyscale with alpha) with 8/16 bits per channel; colour mapped -images (1/2/4/8 bit). Adam7 interlacing is supported for reading and -writing. A number of optional chunks can be specified (when writing) -and understood (when reading): ``tRNS``, ``bKGD``, ``gAMA``. - -For help, type ``import png; help(png)`` in your python interpreter. - -A good place to start is the :class:`Reader` and :class:`Writer` classes. - -Requires Python 2.3. Limited support is available for Python 2.2, but -not everything works. Best with Python 2.4 and higher. Installation is -trivial, but see the ``README.txt`` file (with the source distribution) -for details. - -This file can also be used as a command-line utility to convert -`Netpbm `_ PNM files to PNG, and the reverse conversion from PNG to -PNM. The interface is similar to that of the ``pnmtopng`` program from -Netpbm. Type ``python png.py --help`` at the shell prompt -for usage and a list of options. - -A note on spelling and terminology ----------------------------------- - -Generally British English spelling is used in the documentation. So -that's "greyscale" and "colour". This not only matches the author's -native language, it's also used by the PNG specification. - -The major colour models supported by PNG (and hence by PyPNG) are: -greyscale, RGB, greyscale--alpha, RGB--alpha. These are sometimes -referred to using the abbreviations: L, RGB, LA, RGBA. In this case -each letter abbreviates a single channel: *L* is for Luminance or Luma or -Lightness which is the channel used in greyscale images; *R*, *G*, *B* stand -for Red, Green, Blue, the components of a colour image; *A* stands for -Alpha, the opacity channel (used for transparency effects, but higher -values are more opaque, so it makes sense to call it opacity). - -A note on formats ------------------ - -When getting pixel data out of this module (reading) and presenting -data to this module (writing) there are a number of ways the data could -be represented as a Python value. Generally this module uses one of -three formats called "flat row flat pixel", "boxed row flat pixel", and -"boxed row boxed pixel". Basically the concern is whether each pixel -and each row comes in its own little tuple (box), or not. - -Consider an image that is 3 pixels wide by 2 pixels high, and each pixel -has RGB components: - -Boxed row flat pixel:: - - list([R,G,B, R,G,B, R,G,B], - [R,G,B, R,G,B, R,G,B]) - -Each row appears as its own list, but the pixels are flattened so that -three values for one pixel simply follow the three values for the previous -pixel. This is the most common format used, because it provides a good -compromise between space and convenience. PyPNG regards itself as -at liberty to replace any sequence type with any sufficiently compatible -other sequence type; in practice each row is an array (from the array -module), and the outer list is sometimes an iterator rather than an -explicit list (so that streaming is possible). - -Flat row flat pixel:: - - [R,G,B, R,G,B, R,G,B, - R,G,B, R,G,B, R,G,B] - -The entire image is one single giant sequence of colour values. -Generally an array will be used (to save space), not a list. - -Boxed row boxed pixel:: - - list([ (R,G,B), (R,G,B), (R,G,B) ], - [ (R,G,B), (R,G,B), (R,G,B) ]) - -Each row appears in its own list, but each pixel also appears in its own -tuple. A serious memory burn in Python. - -In all cases the top row comes first, and for each row the pixels are -ordered from left-to-right. Within a pixel the values appear in the -order, R-G-B-A (or L-A for greyscale--alpha). - -There is a fourth format, mentioned because it is used internally, -is close to what lies inside a PNG file itself, and has some support -from the public API. This format is called packed. When packed, -each row is a sequence of bytes (integers from 0 to 255), just as -it is before PNG scanline filtering is applied. When the bit depth -is 8 this is essentially the same as boxed row flat pixel; when the -bit depth is less than 8, several pixels are packed into each byte; -when the bit depth is 16 (the only value more than 8 that is supported -by the PNG image format) each pixel value is decomposed into 2 bytes -(and `packed` is a misnomer). This format is used by the -:meth:`Writer.write_packed` method. It isn't usually a convenient -format, but may be just right if the source data for the PNG image -comes from something that uses a similar format (for example, 1-bit -BMPs, or another PNG file). - -And now, my famous members --------------------------- -""" - -# http://www.python.org/doc/2.2.3/whatsnew/node5.html -from __future__ import generators - -__version__ = "0.0.15" - -from array import array -try: # See :pyver:old - import itertools -except: - pass -import math -# http://www.python.org/doc/2.4.4/lib/module-operator.html -import operator -import struct -import sys -import zlib -# http://www.python.org/doc/2.4.4/lib/module-warnings.html -import warnings -try: - import pyximport - pyximport.install() - import cpngfilters as pngfilters -except ImportError: - pass - - -__all__ = ['Image', 'Reader', 'Writer', 'write_chunks', 'from_array'] - - -# The PNG signature. -# http://www.w3.org/TR/PNG/#5PNG-file-signature -_signature = struct.pack('8B', 137, 80, 78, 71, 13, 10, 26, 10) - -_adam7 = ((0, 0, 8, 8), - (4, 0, 8, 8), - (0, 4, 4, 8), - (2, 0, 4, 4), - (0, 2, 2, 4), - (1, 0, 2, 2), - (0, 1, 1, 2)) - -def group(s, n): - # See - # http://www.python.org/doc/2.6/library/functions.html#zip - return zip(*[iter(s)]*n) - -def isarray(x): - """Same as ``isinstance(x, array)`` except on Python 2.2, where it - always returns ``False``. This helps PyPNG work on Python 2.2. - """ - - try: - return isinstance(x, array) - except: - return False - -try: # see :pyver:old - array.tostring -except: - def tostring(row): - l = len(row) - return struct.pack('%dB' % l, *row) -else: - def tostring(row): - """Convert row of bytes to string. Expects `row` to be an - ``array``. - """ - return row.tostring() - -# Conditionally convert to bytes. Works on Python 2 and Python 3. -try: - bytes('', 'ascii') - def strtobytes(x): return bytes(x, 'iso8859-1') - def bytestostr(x): return str(x, 'iso8859-1') -except: - strtobytes = str - bytestostr = str - -def interleave_planes(ipixels, apixels, ipsize, apsize): - """ - Interleave (colour) planes, e.g. RGB + A = RGBA. - - Return an array of pixels consisting of the `ipsize` elements of data - from each pixel in `ipixels` followed by the `apsize` elements of data - from each pixel in `apixels`. Conventionally `ipixels` and - `apixels` are byte arrays so the sizes are bytes, but it actually - works with any arrays of the same type. The returned array is the - same type as the input arrays which should be the same type as each other. - """ - - itotal = len(ipixels) - atotal = len(apixels) - newtotal = itotal + atotal - newpsize = ipsize + apsize - # Set up the output buffer - # See http://www.python.org/doc/2.4.4/lib/module-array.html#l2h-1356 - out = array(ipixels.typecode) - # It's annoying that there is no cheap way to set the array size :-( - out.extend(ipixels) - out.extend(apixels) - # Interleave in the pixel data - for i in range(ipsize): - out[i:newtotal:newpsize] = ipixels[i:itotal:ipsize] - for i in range(apsize): - out[i+ipsize:newtotal:newpsize] = apixels[i:atotal:apsize] - return out - -def check_palette(palette): - """Check a palette argument (to the :class:`Writer` class) for validity. - Returns the palette as a list if okay; raises an exception otherwise. - """ - - # None is the default and is allowed. - if palette is None: - return None - - p = list(palette) - if not (0 < len(p) <= 256): - raise ValueError("a palette must have between 1 and 256 entries") - seen_triple = False - for i,t in enumerate(p): - if len(t) not in (3,4): - raise ValueError( - "palette entry %d: entries must be 3- or 4-tuples." % i) - if len(t) == 3: - seen_triple = True - if seen_triple and len(t) == 4: - raise ValueError( - "palette entry %d: all 4-tuples must precede all 3-tuples" % i) - for x in t: - if int(x) != x or not(0 <= x <= 255): - raise ValueError( - "palette entry %d: values must be integer: 0 <= x <= 255" % i) - return p - -class Error(Exception): - prefix = 'Error' - def __str__(self): - return self.prefix + ': ' + ' '.join(self.args) - -class FormatError(Error): - """Problem with input file format. In other words, PNG file does - not conform to the specification in some way and is invalid. - """ - - prefix = 'FormatError' - -class ChunkError(FormatError): - prefix = 'ChunkError' - - -class Writer: - """ - PNG encoder in pure Python. - """ - - def __init__(self, width=None, height=None, - size=None, - greyscale=False, - alpha=False, - bitdepth=8, - palette=None, - transparent=None, - background=None, - gamma=None, - compression=None, - interlace=False, - bytes_per_sample=None, # deprecated - planes=None, - colormap=None, - maxval=None, - chunk_limit=2**20): - """ - Create a PNG encoder object. - - Arguments: - - width, height - Image size in pixels, as two separate arguments. - size - Image size (w,h) in pixels, as single argument. - greyscale - Input data is greyscale, not RGB. - alpha - Input data has alpha channel (RGBA or LA). - bitdepth - Bit depth: from 1 to 16. - palette - Create a palette for a colour mapped image (colour type 3). - transparent - Specify a transparent colour (create a ``tRNS`` chunk). - background - Specify a default background colour (create a ``bKGD`` chunk). - gamma - Specify a gamma value (create a ``gAMA`` chunk). - compression - zlib compression level: 0 (none) to 9 (more compressed); default: -1 or None. - interlace - Create an interlaced image. - chunk_limit - Write multiple ``IDAT`` chunks to save memory. - - The image size (in pixels) can be specified either by using the - `width` and `height` arguments, or with the single `size` - argument. If `size` is used it should be a pair (*width*, - *height*). - - `greyscale` and `alpha` are booleans that specify whether - an image is greyscale (or colour), and whether it has an - alpha channel (or not). - - `bitdepth` specifies the bit depth of the source pixel values. - Each source pixel value must be an integer between 0 and - ``2**bitdepth-1``. For example, 8-bit images have values - between 0 and 255. PNG only stores images with bit depths of - 1,2,4,8, or 16. When `bitdepth` is not one of these values, - the next highest valid bit depth is selected, and an ``sBIT`` - (significant bits) chunk is generated that specifies the original - precision of the source image. In this case the supplied pixel - values will be rescaled to fit the range of the selected bit depth. - - The details of which bit depth / colour model combinations the - PNG file format supports directly, are somewhat arcane - (refer to the PNG specification for full details). Briefly: - "small" bit depths (1,2,4) are only allowed with greyscale and - colour mapped images; colour mapped images cannot have bit depth - 16. - - For colour mapped images (in other words, when the `palette` - argument is specified) the `bitdepth` argument must match one of - the valid PNG bit depths: 1, 2, 4, or 8. (It is valid to have a - PNG image with a palette and an ``sBIT`` chunk, but the meaning - is slightly different; it would be awkward to press the - `bitdepth` argument into service for this.) - - The `palette` option, when specified, causes a colour mapped image - to be created: the PNG colour type is set to 3; greyscale - must not be set; alpha must not be set; transparent must - not be set; the bit depth must be 1,2,4, or 8. When a colour - mapped image is created, the pixel values are palette indexes - and the `bitdepth` argument specifies the size of these indexes - (not the size of the colour values in the palette). - - The palette argument value should be a sequence of 3- or - 4-tuples. 3-tuples specify RGB palette entries; 4-tuples - specify RGBA palette entries. If both 4-tuples and 3-tuples - appear in the sequence then all the 4-tuples must come - before all the 3-tuples. A ``PLTE`` chunk is created; if there - are 4-tuples then a ``tRNS`` chunk is created as well. The - ``PLTE`` chunk will contain all the RGB triples in the same - sequence; the ``tRNS`` chunk will contain the alpha channel for - all the 4-tuples, in the same sequence. Palette entries - are always 8-bit. - - If specified, the `transparent` and `background` parameters must - be a tuple with three integer values for red, green, blue, or - a simple integer (or singleton tuple) for a greyscale image. - - If specified, the `gamma` parameter must be a positive number - (generally, a float). A ``gAMA`` chunk will be created. Note that - this will not change the values of the pixels as they appear in - the PNG file, they are assumed to have already been converted - appropriately for the gamma specified. - - The `compression` argument specifies the compression level to - be used by the ``zlib`` module. Values from 1 to 9 specify - compression, with 9 being "more compressed" (usually smaller - and slower, but it doesn't always work out that way). 0 means - no compression. -1 and ``None`` both mean that the default - level of compession will be picked by the ``zlib`` module - (which is generally acceptable). - - If `interlace` is true then an interlaced image is created - (using PNG's so far only interace method, *Adam7*). This does not - affect how the pixels should be presented to the encoder, rather - it changes how they are arranged into the PNG file. On slow - connexions interlaced images can be partially decoded by the - browser to give a rough view of the image that is successively - refined as more image data appears. - - .. note :: - - Enabling the `interlace` option requires the entire image - to be processed in working memory. - - `chunk_limit` is used to limit the amount of memory used whilst - compressing the image. In order to avoid using large amounts of - memory, multiple ``IDAT`` chunks may be created. - """ - - # At the moment the `planes` argument is ignored; - # its purpose is to act as a dummy so that - # ``Writer(x, y, **info)`` works, where `info` is a dictionary - # returned by Reader.read and friends. - # Ditto for `colormap`. - - # A couple of helper functions come first. Best skipped if you - # are reading through. - - def isinteger(x): - try: - return int(x) == x - except: - return False - - def check_color(c, which): - """Checks that a colour argument for transparent or - background options is the right form. Also "corrects" bare - integers to 1-tuples. - """ - - if c is None: - return c - if greyscale: - try: - l = len(c) - except TypeError: - c = (c,) - if len(c) != 1: - raise ValueError("%s for greyscale must be 1-tuple" % - which) - if not isinteger(c[0]): - raise ValueError( - "%s colour for greyscale must be integer" % - which) - else: - if not (len(c) == 3 and - isinteger(c[0]) and - isinteger(c[1]) and - isinteger(c[2])): - raise ValueError( - "%s colour must be a triple of integers" % - which) - return c - - if size: - if len(size) != 2: - raise ValueError( - "size argument should be a pair (width, height)") - if width is not None and width != size[0]: - raise ValueError( - "size[0] (%r) and width (%r) should match when both are used." - % (size[0], width)) - if height is not None and height != size[1]: - raise ValueError( - "size[1] (%r) and height (%r) should match when both are used." - % (size[1], height)) - width,height = size - del size - - if width <= 0 or height <= 0: - raise ValueError("width and height must be greater than zero") - if not isinteger(width) or not isinteger(height): - raise ValueError("width and height must be integers") - # http://www.w3.org/TR/PNG/#7Integers-and-byte-order - if width > 2**32-1 or height > 2**32-1: - raise ValueError("width and height cannot exceed 2**32-1") - - if alpha and transparent is not None: - raise ValueError( - "transparent colour not allowed with alpha channel") - - if bytes_per_sample is not None: - warnings.warn('please use bitdepth instead of bytes_per_sample', - DeprecationWarning) - if bytes_per_sample not in (0.125, 0.25, 0.5, 1, 2): - raise ValueError( - "bytes per sample must be .125, .25, .5, 1, or 2") - bitdepth = int(8*bytes_per_sample) - del bytes_per_sample - if not isinteger(bitdepth) or bitdepth < 1 or 16 < bitdepth: - raise ValueError("bitdepth (%r) must be a postive integer <= 16" % - bitdepth) - - self.rescale = None - if palette: - if bitdepth not in (1,2,4,8): - raise ValueError("with palette, bitdepth must be 1, 2, 4, or 8") - if transparent is not None: - raise ValueError("transparent and palette not compatible") - if alpha: - raise ValueError("alpha and palette not compatible") - if greyscale: - raise ValueError("greyscale and palette not compatible") - else: - # No palette, check for sBIT chunk generation. - if alpha or not greyscale: - if bitdepth not in (8,16): - targetbitdepth = (8,16)[bitdepth > 8] - self.rescale = (bitdepth, targetbitdepth) - bitdepth = targetbitdepth - del targetbitdepth - else: - assert greyscale - assert not alpha - if bitdepth not in (1,2,4,8,16): - if bitdepth > 8: - targetbitdepth = 16 - elif bitdepth == 3: - targetbitdepth = 4 - else: - assert bitdepth in (5,6,7) - targetbitdepth = 8 - self.rescale = (bitdepth, targetbitdepth) - bitdepth = targetbitdepth - del targetbitdepth - - if bitdepth < 8 and (alpha or not greyscale and not palette): - raise ValueError( - "bitdepth < 8 only permitted with greyscale or palette") - if bitdepth > 8 and palette: - raise ValueError( - "bit depth must be 8 or less for images with palette") - - transparent = check_color(transparent, 'transparent') - background = check_color(background, 'background') - - # It's important that the true boolean values (greyscale, alpha, - # colormap, interlace) are converted to bool because Iverson's - # convention is relied upon later on. - self.width = width - self.height = height - self.transparent = transparent - self.background = background - self.gamma = gamma - self.greyscale = bool(greyscale) - self.alpha = bool(alpha) - self.colormap = bool(palette) - self.bitdepth = int(bitdepth) - self.compression = compression - self.chunk_limit = chunk_limit - self.interlace = bool(interlace) - self.palette = check_palette(palette) - - self.color_type = 4*self.alpha + 2*(not greyscale) + 1*self.colormap - assert self.color_type in (0,2,3,4,6) - - self.color_planes = (3,1)[self.greyscale or self.colormap] - self.planes = self.color_planes + self.alpha - # :todo: fix for bitdepth < 8 - self.psize = (self.bitdepth/8) * self.planes - - def make_palette(self): - """Create the byte sequences for a ``PLTE`` and if necessary a - ``tRNS`` chunk. Returned as a pair (*p*, *t*). *t* will be - ``None`` if no ``tRNS`` chunk is necessary. - """ - - p = array('B') - t = array('B') - - for x in self.palette: - p.extend(x[0:3]) - if len(x) > 3: - t.append(x[3]) - p = tostring(p) - t = tostring(t) - if t: - return p,t - return p,None - - def write(self, outfile, rows): - """Write a PNG image to the output file. `rows` should be - an iterable that yields each row in boxed row flat pixel format. - The rows should be the rows of the original image, so there - should be ``self.height`` rows of ``self.width * self.planes`` values. - If `interlace` is specified (when creating the instance), then - an interlaced PNG file will be written. Supply the rows in the - normal image order; the interlacing is carried out internally. - - .. note :: - - Interlacing will require the entire image to be in working memory. - """ - - if self.interlace: - fmt = 'BH'[self.bitdepth > 8] - a = array(fmt, itertools.chain(*rows)) - return self.write_array(outfile, a) - else: - nrows = self.write_passes(outfile, rows) - if nrows != self.height: - raise ValueError( - "rows supplied (%d) does not match height (%d)" % - (nrows, self.height)) - - def write_passes(self, outfile, rows, packed=False): - """ - Write a PNG image to the output file. - - Most users are expected to find the :meth:`write` or - :meth:`write_array` method more convenient. - - The rows should be given to this method in the order that - they appear in the output file. For straightlaced images, - this is the usual top to bottom ordering, but for interlaced - images the rows should have already been interlaced before - passing them to this function. - - `rows` should be an iterable that yields each row. When - `packed` is ``False`` the rows should be in boxed row flat pixel - format; when `packed` is ``True`` each row should be a packed - sequence of bytes. - - """ - - # http://www.w3.org/TR/PNG/#5PNG-file-signature - outfile.write(_signature) - - # http://www.w3.org/TR/PNG/#11IHDR - write_chunk(outfile, 'IHDR', - struct.pack("!2I5B", self.width, self.height, - self.bitdepth, self.color_type, - 0, 0, self.interlace)) - - # See :chunk:order - # http://www.w3.org/TR/PNG/#11gAMA - if self.gamma is not None: - write_chunk(outfile, 'gAMA', - struct.pack("!L", int(round(self.gamma*1e5)))) - - # See :chunk:order - # http://www.w3.org/TR/PNG/#11sBIT - if self.rescale: - write_chunk(outfile, 'sBIT', - struct.pack('%dB' % self.planes, - *[self.rescale[0]]*self.planes)) - - # :chunk:order: Without a palette (PLTE chunk), ordering is - # relatively relaxed. With one, gAMA chunk must precede PLTE - # chunk which must precede tRNS and bKGD. - # See http://www.w3.org/TR/PNG/#5ChunkOrdering - if self.palette: - p,t = self.make_palette() - write_chunk(outfile, 'PLTE', p) - if t: - # tRNS chunk is optional. Only needed if palette entries - # have alpha. - write_chunk(outfile, 'tRNS', t) - - # http://www.w3.org/TR/PNG/#11tRNS - if self.transparent is not None: - if self.greyscale: - write_chunk(outfile, 'tRNS', - struct.pack("!1H", *self.transparent)) - else: - write_chunk(outfile, 'tRNS', - struct.pack("!3H", *self.transparent)) - - # http://www.w3.org/TR/PNG/#11bKGD - if self.background is not None: - if self.greyscale: - write_chunk(outfile, 'bKGD', - struct.pack("!1H", *self.background)) - else: - write_chunk(outfile, 'bKGD', - struct.pack("!3H", *self.background)) - - # http://www.w3.org/TR/PNG/#11IDAT - if self.compression is not None: - compressor = zlib.compressobj(self.compression) - else: - compressor = zlib.compressobj() - - # Choose an extend function based on the bitdepth. The extend - # function packs/decomposes the pixel values into bytes and - # stuffs them onto the data array. - data = array('B') - if self.bitdepth == 8 or packed: - extend = data.extend - elif self.bitdepth == 16: - # Decompose into bytes - def extend(sl): - fmt = '!%dH' % len(sl) - data.extend(array('B', struct.pack(fmt, *sl))) - else: - # Pack into bytes - assert self.bitdepth < 8 - # samples per byte - spb = int(8/self.bitdepth) - def extend(sl): - a = array('B', sl) - # Adding padding bytes so we can group into a whole - # number of spb-tuples. - l = float(len(a)) - extra = math.ceil(l / float(spb))*spb - l - a.extend([0]*int(extra)) - # Pack into bytes - l = group(a, spb) - l = map(lambda e: reduce(lambda x,y: - (x << self.bitdepth) + y, e), l) - data.extend(l) - if self.rescale: - oldextend = extend - factor = \ - float(2**self.rescale[1]-1) / float(2**self.rescale[0]-1) - def extend(sl): - oldextend(map(lambda x: int(round(factor*x)), sl)) - - # Build the first row, testing mostly to see if we need to - # changed the extend function to cope with NumPy integer types - # (they cause our ordinary definition of extend to fail, so we - # wrap it). See - # http://code.google.com/p/pypng/issues/detail?id=44 - enumrows = enumerate(rows) - del rows - - # First row's filter type. - data.append(0) - # :todo: Certain exceptions in the call to ``.next()`` or the - # following try would indicate no row data supplied. - # Should catch. - i,row = enumrows.next() - try: - # If this fails... - extend(row) - except: - # ... try a version that converts the values to int first. - # Not only does this work for the (slightly broken) NumPy - # types, there are probably lots of other, unknown, "nearly" - # int types it works for. - def wrapmapint(f): - return lambda sl: f(map(int, sl)) - extend = wrapmapint(extend) - del wrapmapint - extend(row) - - for i,row in enumrows: - # Add "None" filter type. Currently, it's essential that - # this filter type be used for every scanline as we do not - # mark the first row of a reduced pass image; that means we - # could accidentally compute the wrong filtered scanline if - # we used "up", "average", or "paeth" on such a line. - data.append(0) - extend(row) - if len(data) > self.chunk_limit: - compressed = compressor.compress(tostring(data)) - if len(compressed): - # print >> sys.stderr, len(data), len(compressed) - write_chunk(outfile, 'IDAT', compressed) - # Because of our very witty definition of ``extend``, - # above, we must re-use the same ``data`` object. Hence - # we use ``del`` to empty this one, rather than create a - # fresh one (which would be my natural FP instinct). - del data[:] - if len(data): - compressed = compressor.compress(tostring(data)) - else: - compressed = '' - flushed = compressor.flush() - if len(compressed) or len(flushed): - # print >> sys.stderr, len(data), len(compressed), len(flushed) - write_chunk(outfile, 'IDAT', compressed + flushed) - # http://www.w3.org/TR/PNG/#11IEND - write_chunk(outfile, 'IEND') - return i+1 - - def write_array(self, outfile, pixels): - """ - Write an array in flat row flat pixel format as a PNG file on - the output file. See also :meth:`write` method. - """ - - if self.interlace: - self.write_passes(outfile, self.array_scanlines_interlace(pixels)) - else: - self.write_passes(outfile, self.array_scanlines(pixels)) - - def write_packed(self, outfile, rows): - """ - Write PNG file to `outfile`. The pixel data comes from `rows` - which should be in boxed row packed format. Each row should be - a sequence of packed bytes. - - Technically, this method does work for interlaced images but it - is best avoided. For interlaced images, the rows should be - presented in the order that they appear in the file. - - This method should not be used when the source image bit depth - is not one naturally supported by PNG; the bit depth should be - 1, 2, 4, 8, or 16. - """ - - if self.rescale: - raise Error("write_packed method not suitable for bit depth %d" % - self.rescale[0]) - return self.write_passes(outfile, rows, packed=True) - - def convert_pnm(self, infile, outfile): - """ - Convert a PNM file containing raw pixel data into a PNG file - with the parameters set in the writer object. Works for - (binary) PGM, PPM, and PAM formats. - """ - - if self.interlace: - pixels = array('B') - pixels.fromfile(infile, - (self.bitdepth/8) * self.color_planes * - self.width * self.height) - self.write_passes(outfile, self.array_scanlines_interlace(pixels)) - else: - self.write_passes(outfile, self.file_scanlines(infile)) - - def convert_ppm_and_pgm(self, ppmfile, pgmfile, outfile): - """ - Convert a PPM and PGM file containing raw pixel data into a - PNG outfile with the parameters set in the writer object. - """ - pixels = array('B') - pixels.fromfile(ppmfile, - (self.bitdepth/8) * self.color_planes * - self.width * self.height) - apixels = array('B') - apixels.fromfile(pgmfile, - (self.bitdepth/8) * - self.width * self.height) - pixels = interleave_planes(pixels, apixels, - (self.bitdepth/8) * self.color_planes, - (self.bitdepth/8)) - if self.interlace: - self.write_passes(outfile, self.array_scanlines_interlace(pixels)) - else: - self.write_passes(outfile, self.array_scanlines(pixels)) - - def file_scanlines(self, infile): - """ - Generates boxed rows in flat pixel format, from the input file - `infile`. It assumes that the input file is in a "Netpbm-like" - binary format, and is positioned at the beginning of the first - pixel. The number of pixels to read is taken from the image - dimensions (`width`, `height`, `planes`) and the number of bytes - per value is implied by the image `bitdepth`. - """ - - # Values per row - vpr = self.width * self.planes - row_bytes = vpr - if self.bitdepth > 8: - assert self.bitdepth == 16 - row_bytes *= 2 - fmt = '>%dH' % vpr - def line(): - return array('H', struct.unpack(fmt, infile.read(row_bytes))) - else: - def line(): - scanline = array('B', infile.read(row_bytes)) - return scanline - for y in range(self.height): - yield line() - - def array_scanlines(self, pixels): - """ - Generates boxed rows (flat pixels) from flat rows (flat pixels) - in an array. - """ - - # Values per row - vpr = self.width * self.planes - stop = 0 - for y in range(self.height): - start = stop - stop = start + vpr - yield pixels[start:stop] - - def array_scanlines_interlace(self, pixels): - """ - Generator for interlaced scanlines from an array. `pixels` is - the full source image in flat row flat pixel format. The - generator yields each scanline of the reduced passes in turn, in - boxed row flat pixel format. - """ - - # http://www.w3.org/TR/PNG/#8InterlaceMethods - # Array type. - fmt = 'BH'[self.bitdepth > 8] - # Value per row - vpr = self.width * self.planes - for xstart, ystart, xstep, ystep in _adam7: - if xstart >= self.width: - continue - # Pixels per row (of reduced image) - ppr = int(math.ceil((self.width-xstart)/float(xstep))) - # number of values in reduced image row. - row_len = ppr*self.planes - for y in range(ystart, self.height, ystep): - if xstep == 1: - offset = y * vpr - yield pixels[offset:offset+vpr] - else: - row = array(fmt) - # There's no easier way to set the length of an array - row.extend(pixels[0:row_len]) - offset = y * vpr + xstart * self.planes - end_offset = (y+1) * vpr - skip = self.planes * xstep - for i in range(self.planes): - row[i::self.planes] = \ - pixels[offset+i:end_offset:skip] - yield row - -def write_chunk(outfile, tag, data=strtobytes('')): - """ - Write a PNG chunk to the output file, including length and - checksum. - """ - - # http://www.w3.org/TR/PNG/#5Chunk-layout - outfile.write(struct.pack("!I", len(data))) - tag = strtobytes(tag) - outfile.write(tag) - outfile.write(data) - checksum = zlib.crc32(tag) - checksum = zlib.crc32(data, checksum) - checksum &= 2**32-1 - outfile.write(struct.pack("!I", checksum)) - -def write_chunks(out, chunks): - """Create a PNG file by writing out the chunks.""" - - out.write(_signature) - for chunk in chunks: - write_chunk(out, *chunk) - -def filter_scanline(type, line, fo, prev=None): - """Apply a scanline filter to a scanline. `type` specifies the - filter type (0 to 4); `line` specifies the current (unfiltered) - scanline as a sequence of bytes; `prev` specifies the previous - (unfiltered) scanline as a sequence of bytes. `fo` specifies the - filter offset; normally this is size of a pixel in bytes (the number - of bytes per sample times the number of channels), but when this is - < 1 (for bit depths < 8) then the filter offset is 1. - """ - - assert 0 <= type < 5 - - # The output array. Which, pathetically, we extend one-byte at a - # time (fortunately this is linear). - out = array('B', [type]) - - def sub(): - ai = -fo - for x in line: - if ai >= 0: - x = (x - line[ai]) & 0xff - out.append(x) - ai += 1 - def up(): - for i,x in enumerate(line): - x = (x - prev[i]) & 0xff - out.append(x) - def average(): - ai = -fo - for i,x in enumerate(line): - if ai >= 0: - x = (x - ((line[ai] + prev[i]) >> 1)) & 0xff - else: - x = (x - (prev[i] >> 1)) & 0xff - out.append(x) - ai += 1 - def paeth(): - # http://www.w3.org/TR/PNG/#9Filter-type-4-Paeth - ai = -fo # also used for ci - for i,x in enumerate(line): - a = 0 - b = prev[i] - c = 0 - - if ai >= 0: - a = line[ai] - c = prev[ai] - p = a + b - c - pa = abs(p - a) - pb = abs(p - b) - pc = abs(p - c) - if pa <= pb and pa <= pc: Pr = a - elif pb <= pc: Pr = b - else: Pr = c - - x = (x - Pr) & 0xff - out.append(x) - ai += 1 - - if not prev: - # We're on the first line. Some of the filters can be reduced - # to simpler cases which makes handling the line "off the top" - # of the image simpler. "up" becomes "none"; "paeth" becomes - # "left" (non-trivial, but true). "average" needs to be handled - # specially. - if type == 2: # "up" - return line # type = 0 - elif type == 3: - prev = [0]*len(line) - elif type == 4: # "paeth" - type = 1 - if type == 0: - out.extend(line) - elif type == 1: - sub() - elif type == 2: - up() - elif type == 3: - average() - else: # type == 4 - paeth() - return out - - -def from_array(a, mode=None, info={}): - """Create a PNG :class:`Image` object from a 2- or 3-dimensional array. - One application of this function is easy PIL-style saving: - ``png.from_array(pixels, 'L').save('foo.png')``. - - .. note : - - The use of the term *3-dimensional* is for marketing purposes - only. It doesn't actually work. Please bear with us. Meanwhile - enjoy the complimentary snacks (on request) and please use a - 2-dimensional array. - - Unless they are specified using the *info* parameter, the PNG's - height and width are taken from the array size. For a 3 dimensional - array the first axis is the height; the second axis is the width; - and the third axis is the channel number. Thus an RGB image that is - 16 pixels high and 8 wide will use an array that is 16x8x3. For 2 - dimensional arrays the first axis is the height, but the second axis - is ``width*channels``, so an RGB image that is 16 pixels high and 8 - wide will use a 2-dimensional array that is 16x24 (each row will be - 8*3==24 sample values). - - *mode* is a string that specifies the image colour format in a - PIL-style mode. It can be: - - ``'L'`` - greyscale (1 channel) - ``'LA'`` - greyscale with alpha (2 channel) - ``'RGB'`` - colour image (3 channel) - ``'RGBA'`` - colour image with alpha (4 channel) - - The mode string can also specify the bit depth (overriding how this - function normally derives the bit depth, see below). Appending - ``';16'`` to the mode will cause the PNG to be 16 bits per channel; - any decimal from 1 to 16 can be used to specify the bit depth. - - When a 2-dimensional array is used *mode* determines how many - channels the image has, and so allows the width to be derived from - the second array dimension. - - The array is expected to be a ``numpy`` array, but it can be any - suitable Python sequence. For example, a list of lists can be used: - ``png.from_array([[0, 255, 0], [255, 0, 255]], 'L')``. The exact - rules are: ``len(a)`` gives the first dimension, height; - ``len(a[0])`` gives the second dimension; ``len(a[0][0])`` gives the - third dimension, unless an exception is raised in which case a - 2-dimensional array is assumed. It's slightly more complicated than - that because an iterator of rows can be used, and it all still - works. Using an iterator allows data to be streamed efficiently. - - The bit depth of the PNG is normally taken from the array element's - datatype (but if *mode* specifies a bitdepth then that is used - instead). The array element's datatype is determined in a way which - is supposed to work both for ``numpy`` arrays and for Python - ``array.array`` objects. A 1 byte datatype will give a bit depth of - 8, a 2 byte datatype will give a bit depth of 16. If the datatype - does not have an implicit size, for example it is a plain Python - list of lists, as above, then a default of 8 is used. - - The *info* parameter is a dictionary that can be used to specify - metadata (in the same style as the arguments to the - :class:``png.Writer`` class). For this function the keys that are - useful are: - - height - overrides the height derived from the array dimensions and allows - *a* to be an iterable. - width - overrides the width derived from the array dimensions. - bitdepth - overrides the bit depth derived from the element datatype (but - must match *mode* if that also specifies a bit depth). - - Generally anything specified in the - *info* dictionary will override any implicit choices that this - function would otherwise make, but must match any explicit ones. - For example, if the *info* dictionary has a ``greyscale`` key then - this must be true when mode is ``'L'`` or ``'LA'`` and false when - mode is ``'RGB'`` or ``'RGBA'``. - """ - - # We abuse the *info* parameter by modifying it. Take a copy here. - # (Also typechecks *info* to some extent). - info = dict(info) - - # Syntax check mode string. - bitdepth = None - try: - mode = mode.split(';') - if len(mode) not in (1,2): - raise Error() - if mode[0] not in ('L', 'LA', 'RGB', 'RGBA'): - raise Error() - if len(mode) == 2: - try: - bitdepth = int(mode[1]) - except: - raise Error() - except Error: - raise Error("mode string should be 'RGB' or 'L;16' or similar.") - mode = mode[0] - - # Get bitdepth from *mode* if possible. - if bitdepth: - if info.get('bitdepth') and bitdepth != info['bitdepth']: - raise Error("mode bitdepth (%d) should match info bitdepth (%d)." % - (bitdepth, info['bitdepth'])) - info['bitdepth'] = bitdepth - - # Fill in and/or check entries in *info*. - # Dimensions. - if 'size' in info: - # Check width, height, size all match where used. - for dimension,axis in [('width', 0), ('height', 1)]: - if dimension in info: - if info[dimension] != info['size'][axis]: - raise Error( - "info[%r] shhould match info['size'][%r]." % - (dimension, axis)) - info['width'],info['height'] = info['size'] - if 'height' not in info: - try: - l = len(a) - except: - raise Error( - "len(a) does not work, supply info['height'] instead.") - info['height'] = l - # Colour format. - if 'greyscale' in info: - if bool(info['greyscale']) != ('L' in mode): - raise Error("info['greyscale'] should match mode.") - info['greyscale'] = 'L' in mode - if 'alpha' in info: - if bool(info['alpha']) != ('A' in mode): - raise Error("info['alpha'] should match mode.") - info['alpha'] = 'A' in mode - - planes = len(mode) - if 'planes' in info: - if info['planes'] != planes: - raise Error("info['planes'] should match mode.") - - # In order to work out whether we the array is 2D or 3D we need its - # first row, which requires that we take a copy of its iterator. - # We may also need the first row to derive width and bitdepth. - a,t = itertools.tee(a) - row = t.next() - del t - try: - row[0][0] - threed = True - testelement = row[0] - except: - threed = False - testelement = row - if 'width' not in info: - if threed: - width = len(row) - else: - width = len(row) // planes - info['width'] = width - - # Not implemented yet - assert not threed - - if 'bitdepth' not in info: - try: - dtype = testelement.dtype - # goto the "else:" clause. Sorry. - except: - try: - # Try a Python array.array. - bitdepth = 8 * testelement.itemsize - except: - # We can't determine it from the array element's - # datatype, use a default of 8. - bitdepth = 8 - else: - # If we got here without exception, we now assume that - # the array is a numpy array. - if dtype.kind == 'b': - bitdepth = 1 - else: - bitdepth = 8 * dtype.itemsize - info['bitdepth'] = bitdepth - - for thing in 'width height bitdepth greyscale alpha'.split(): - assert thing in info - return Image(a, info) - -# So that refugee's from PIL feel more at home. Not documented. -fromarray = from_array - -class Image: - """A PNG image. - You can create an :class:`Image` object from an array of pixels by calling - :meth:`png.from_array`. It can be saved to disk with the - :meth:`save` method.""" - def __init__(self, rows, info): - """ - .. note :: - - The constructor is not public. Please do not call it. - """ - - self.rows = rows - self.info = info - - def save(self, file): - """Save the image to *file*. If *file* looks like an open file - descriptor then it is used, otherwise it is treated as a - filename and a fresh file is opened. - - In general, you can only call this method once; after it has - been called the first time and the PNG image has been saved, the - source data will have been streamed, and cannot be streamed - again. - """ - - w = Writer(**self.info) - - try: - file.write - def close(): pass - except: - file = open(file, 'wb') - def close(): file.close() - - try: - w.write(file, self.rows) - finally: - close() - -class _readable: - """ - A simple file-like interface for strings and arrays. - """ - - def __init__(self, buf): - self.buf = buf - self.offset = 0 - - def read(self, n): - r = self.buf[self.offset:self.offset+n] - if isarray(r): - r = r.tostring() - self.offset += n - return r - - -class Reader: - """ - PNG decoder in pure Python. - """ - - def __init__(self, _guess=None, **kw): - """ - Create a PNG decoder object. - - The constructor expects exactly one keyword argument. If you - supply a positional argument instead, it will guess the input - type. You can choose among the following keyword arguments: - - filename - Name of input file (a PNG file). - file - A file-like object (object with a read() method). - bytes - ``array`` or ``string`` with PNG data. - - """ - if ((_guess is not None and len(kw) != 0) or - (_guess is None and len(kw) != 1)): - raise TypeError("Reader() takes exactly 1 argument") - - # Will be the first 8 bytes, later on. See validate_signature. - self.signature = None - self.transparent = None - # A pair of (len,type) if a chunk has been read but its data and - # checksum have not (in other words the file position is just - # past the 4 bytes that specify the chunk type). See preamble - # method for how this is used. - self.atchunk = None - - if _guess is not None: - if isarray(_guess): - kw["bytes"] = _guess - elif isinstance(_guess, str): - kw["filename"] = _guess - elif hasattr(_guess, 'read'): - kw["file"] = _guess - - if "filename" in kw: - self.file = open(kw["filename"], "rb") - elif "file" in kw: - self.file = kw["file"] - elif "bytes" in kw: - self.file = _readable(kw["bytes"]) - else: - raise TypeError("expecting filename, file or bytes array") - - - def chunk(self, seek=None, lenient=False): - """ - Read the next PNG chunk from the input file; returns a - (*type*,*data*) tuple. *type* is the chunk's type as a string - (all PNG chunk types are 4 characters long). *data* is the - chunk's data content, as a string. - - If the optional `seek` argument is - specified then it will keep reading chunks until it either runs - out of file or finds the type specified by the argument. Note - that in general the order of chunks in PNGs is unspecified, so - using `seek` can cause you to miss chunks. - - If the optional `lenient` argument evaluates to True, - checksum failures will raise warnings rather than exceptions. - """ - - self.validate_signature() - - while True: - # http://www.w3.org/TR/PNG/#5Chunk-layout - if not self.atchunk: - self.atchunk = self.chunklentype() - length,type = self.atchunk - self.atchunk = None - data = self.file.read(length) - if len(data) != length: - raise ChunkError('Chunk %s too short for required %i octets.' - % (type, length)) - checksum = self.file.read(4) - if len(checksum) != 4: - raise ValueError('Chunk %s too short for checksum.', tag) - if seek and type != seek: - continue - verify = zlib.crc32(strtobytes(type)) - verify = zlib.crc32(data, verify) - # Whether the output from zlib.crc32 is signed or not varies - # according to hideous implementation details, see - # http://bugs.python.org/issue1202 . - # We coerce it to be positive here (in a way which works on - # Python 2.3 and older). - verify &= 2**32 - 1 - verify = struct.pack('!I', verify) - if checksum != verify: - # print repr(checksum) - (a, ) = struct.unpack('!I', checksum) - (b, ) = struct.unpack('!I', verify) - message = "Checksum error in %s chunk: 0x%08X != 0x%08X." % (type, a, b) - if lenient: - warnings.warn(message, RuntimeWarning) - else: - raise ChunkError(message) - return type, data - - def chunks(self): - """Return an iterator that will yield each chunk as a - (*chunktype*, *content*) pair. - """ - - while True: - t,v = self.chunk() - yield t,v - if t == 'IEND': - break - - def undo_filter(self, filter_type, scanline, previous): - """Undo the filter for a scanline. `scanline` is a sequence of - bytes that does not include the initial filter type byte. - `previous` is decoded previous scanline (for straightlaced - images this is the previous pixel row, but for interlaced - images, it is the previous scanline in the reduced image, which - in general is not the previous pixel row in the final image). - When there is no previous scanline (the first row of a - straightlaced image, or the first row in one of the passes in an - interlaced image), then this argument should be ``None``. - - The scanline will have the effects of filtering removed, and the - result will be returned as a fresh sequence of bytes. - """ - - # :todo: Would it be better to update scanline in place? - # Yes, with the Cython extension making the undo_filter fast, - # updating scanline inplace makes the code 3 times faster - # (reading 50 images of 800x800 went from 40s to 16s) - result = scanline - - if filter_type == 0: - return result - - if filter_type not in (1,2,3,4): - raise FormatError('Invalid PNG Filter Type.' - ' See http://www.w3.org/TR/2003/REC-PNG-20031110/#9Filters .') - - # Filter unit. The stride from one pixel to the corresponding - # byte from the previous previous. Normally this is the pixel - # size in bytes, but when this is smaller than 1, the previous - # byte is used instead. - fu = max(1, self.psize) - - # For the first line of a pass, synthesize a dummy previous - # line. An alternative approach would be to observe that on the - # first line 'up' is the same as 'null', 'paeth' is the same - # as 'sub', with only 'average' requiring any special case. - if not previous: - previous = array('B', [0]*len(scanline)) - - def sub(): - """Undo sub filter.""" - - ai = 0 - # Loops starts at index fu. Observe that the initial part - # of the result is already filled in correctly with - # scanline. - for i in range(fu, len(result)): - x = scanline[i] - a = result[ai] - result[i] = (x + a) & 0xff - ai += 1 - - def up(): - """Undo up filter.""" - - for i in range(len(result)): - x = scanline[i] - b = previous[i] - result[i] = (x + b) & 0xff - - def average(): - """Undo average filter.""" - - ai = -fu - for i in range(len(result)): - x = scanline[i] - if ai < 0: - a = 0 - else: - a = result[ai] - b = previous[i] - result[i] = (x + ((a + b) >> 1)) & 0xff - ai += 1 - - def paeth(): - """Undo Paeth filter.""" - - # Also used for ci. - ai = -fu - for i in range(len(result)): - x = scanline[i] - if ai < 0: - a = c = 0 - else: - a = result[ai] - c = previous[ai] - b = previous[i] - p = a + b - c - pa = abs(p - a) - pb = abs(p - b) - pc = abs(p - c) - if pa <= pb and pa <= pc: - pr = a - elif pb <= pc: - pr = b - else: - pr = c - result[i] = (x + pr) & 0xff - ai += 1 - - # Call appropriate filter algorithm. Note that 0 has already - # been dealt with. - (None, - pngfilters.undo_filter_sub, - pngfilters.undo_filter_up, - pngfilters.undo_filter_average, - pngfilters.undo_filter_paeth)[filter_type](fu, scanline, previous, result) - return result - - def deinterlace(self, raw): - """ - Read raw pixel data, undo filters, deinterlace, and flatten. - Return in flat row flat pixel format. - """ - - # print >> sys.stderr, ("Reading interlaced, w=%s, r=%s, planes=%s," + - # " bpp=%s") % (self.width, self.height, self.planes, self.bps) - # Values per row (of the target image) - vpr = self.width * self.planes - - # Make a result array, and make it big enough. Interleaving - # writes to the output array randomly (well, not quite), so the - # entire output array must be in memory. - fmt = 'BH'[self.bitdepth > 8] - a = array(fmt, [0]*vpr*self.height) - source_offset = 0 - - for xstart, ystart, xstep, ystep in _adam7: - # print >> sys.stderr, "Adam7: start=%s,%s step=%s,%s" % ( - # xstart, ystart, xstep, ystep) - if xstart >= self.width: - continue - # The previous (reconstructed) scanline. None at the - # beginning of a pass to indicate that there is no previous - # line. - recon = None - # Pixels per row (reduced pass image) - ppr = int(math.ceil((self.width-xstart)/float(xstep))) - # Row size in bytes for this pass. - row_size = int(math.ceil(self.psize * ppr)) - for y in range(ystart, self.height, ystep): - filter_type = raw[source_offset] - source_offset += 1 - scanline = raw[source_offset:source_offset+row_size] - source_offset += row_size - recon = self.undo_filter(filter_type, scanline, recon) - # Convert so that there is one element per pixel value - flat = self.serialtoflat(recon, ppr) - if xstep == 1: - assert xstart == 0 - offset = y * vpr - a[offset:offset+vpr] = flat - else: - offset = y * vpr + xstart * self.planes - end_offset = (y+1) * vpr - skip = self.planes * xstep - for i in range(self.planes): - a[offset+i:end_offset:skip] = \ - flat[i::self.planes] - return a - - def iterboxed(self, rows): - """Iterator that yields each scanline in boxed row flat pixel - format. `rows` should be an iterator that yields the bytes of - each row in turn. - """ - - def asvalues(raw): - """Convert a row of raw bytes into a flat row. Result may - or may not share with argument""" - - if self.bitdepth == 8: - return raw - if self.bitdepth == 16: - raw = tostring(raw) - return array('H', struct.unpack('!%dH' % (len(raw)//2), raw)) - assert self.bitdepth < 8 - width = self.width - # Samples per byte - spb = 8//self.bitdepth - out = array('B') - mask = 2**self.bitdepth - 1 - shifts = map(self.bitdepth.__mul__, reversed(range(spb))) - for o in raw: - out.extend(map(lambda i: mask&(o>>i), shifts)) - return out[:width] - - return map(asvalues, rows) - - def serialtoflat(self, bytes, width=None): - """Convert serial format (byte stream) pixel data to flat row - flat pixel. - """ - - if self.bitdepth == 8: - return bytes - if self.bitdepth == 16: - bytes = tostring(bytes) - return array('H', - struct.unpack('!%dH' % (len(bytes)//2), bytes)) - assert self.bitdepth < 8 - if width is None: - width = self.width - # Samples per byte - spb = 8//self.bitdepth - out = array('B') - mask = 2**self.bitdepth - 1 - shifts = map(self.bitdepth.__mul__, reversed(range(spb))) - l = width - for o in bytes: - out.extend([(mask&(o>>s)) for s in shifts][:l]) - l -= spb - if l <= 0: - l = width - return out - - def iterstraight(self, raw): - """Iterator that undoes the effect of filtering, and yields each - row in serialised format (as a sequence of bytes). Assumes input - is straightlaced. `raw` should be an iterable that yields the - raw bytes in chunks of arbitrary size.""" - - # length of row, in bytes - rb = self.row_bytes - a = array('B') - # The previous (reconstructed) scanline. None indicates first - # line of image. - recon = None - for some in raw: - a.extend(some) - while len(a) >= rb + 1: - filter_type = a[0] - scanline = a[1:rb+1] - del a[:rb+1] - recon = self.undo_filter(filter_type, scanline, recon) - yield recon - if len(a) != 0: - # :file:format We get here with a file format error: when the - # available bytes (after decompressing) do not pack into exact - # rows. - raise FormatError( - 'Wrong size for decompressed IDAT chunk.') - assert len(a) == 0 - - def validate_signature(self): - """If signature (header) has not been read then read and - validate it; otherwise do nothing. - """ - - if self.signature: - return - self.signature = self.file.read(8) - if self.signature != _signature: - raise FormatError("PNG file has invalid signature.") - - def preamble(self, lenient=False): - """ - Extract the image metadata by reading the initial part of the PNG - file up to the start of the ``IDAT`` chunk. All the chunks that - precede the ``IDAT`` chunk are read and either processed for - metadata or discarded. - - If the optional `lenient` argument evaluates to True, - checksum failures will raise warnings rather than exceptions. - """ - - self.validate_signature() - - while True: - if not self.atchunk: - self.atchunk = self.chunklentype() - if self.atchunk is None: - raise FormatError( - 'This PNG file has no IDAT chunks.') - if self.atchunk[1] == 'IDAT': - return - self.process_chunk(lenient=lenient) - - def chunklentype(self): - """Reads just enough of the input to determine the next - chunk's length and type, returned as a (*length*, *type*) pair - where *type* is a string. If there are no more chunks, ``None`` - is returned. - """ - - x = self.file.read(8) - if not x: - return None - if len(x) != 8: - raise FormatError( - 'End of file whilst reading chunk length and type.') - length,type = struct.unpack('!I4s', x) - type = bytestostr(type) - if length > 2**31-1: - raise FormatError('Chunk %s is too large: %d.' % (type,length)) - return length,type - - def process_chunk(self, lenient=False): - """Process the next chunk and its data. This only processes the - following chunk types, all others are ignored: ``IHDR``, - ``PLTE``, ``bKGD``, ``tRNS``, ``gAMA``, ``sBIT``. - - If the optional `lenient` argument evaluates to True, - checksum failures will raise warnings rather than exceptions. - """ - - type, data = self.chunk(lenient=lenient) - if type == 'IHDR': - # http://www.w3.org/TR/PNG/#11IHDR - if len(data) != 13: - raise FormatError('IHDR chunk has incorrect length.') - (self.width, self.height, self.bitdepth, self.color_type, - self.compression, self.filter, - self.interlace) = struct.unpack("!2I5B", data) - - # Check that the header specifies only valid combinations. - if self.bitdepth not in (1,2,4,8,16): - raise Error("invalid bit depth %d" % self.bitdepth) - if self.color_type not in (0,2,3,4,6): - raise Error("invalid colour type %d" % self.color_type) - # Check indexed (palettized) images have 8 or fewer bits - # per pixel; check only indexed or greyscale images have - # fewer than 8 bits per pixel. - if ((self.color_type & 1 and self.bitdepth > 8) or - (self.bitdepth < 8 and self.color_type not in (0,3))): - raise FormatError("Illegal combination of bit depth (%d)" - " and colour type (%d)." - " See http://www.w3.org/TR/2003/REC-PNG-20031110/#table111 ." - % (self.bitdepth, self.color_type)) - if self.compression != 0: - raise Error("unknown compression method %d" % self.compression) - if self.filter != 0: - raise FormatError("Unknown filter method %d," - " see http://www.w3.org/TR/2003/REC-PNG-20031110/#9Filters ." - % self.filter) - if self.interlace not in (0,1): - raise FormatError("Unknown interlace method %d," - " see http://www.w3.org/TR/2003/REC-PNG-20031110/#8InterlaceMethods ." - % self.interlace) - - # Derived values - # http://www.w3.org/TR/PNG/#6Colour-values - colormap = bool(self.color_type & 1) - greyscale = not (self.color_type & 2) - alpha = bool(self.color_type & 4) - color_planes = (3,1)[greyscale or colormap] - planes = color_planes + alpha - - self.colormap = colormap - self.greyscale = greyscale - self.alpha = alpha - self.color_planes = color_planes - self.planes = planes - self.psize = float(self.bitdepth)/float(8) * planes - if int(self.psize) == self.psize: - self.psize = int(self.psize) - self.row_bytes = int(math.ceil(self.width * self.psize)) - # Stores PLTE chunk if present, and is used to check - # chunk ordering constraints. - self.plte = None - # Stores tRNS chunk if present, and is used to check chunk - # ordering constraints. - self.trns = None - # Stores sbit chunk if present. - self.sbit = None - elif type == 'PLTE': - # http://www.w3.org/TR/PNG/#11PLTE - if self.plte: - warnings.warn("Multiple PLTE chunks present.") - self.plte = data - if len(data) % 3 != 0: - raise FormatError( - "PLTE chunk's length should be a multiple of 3.") - if len(data) > (2**self.bitdepth)*3: - raise FormatError("PLTE chunk is too long.") - if len(data) == 0: - raise FormatError("Empty PLTE is not allowed.") - elif type == 'bKGD': - try: - if self.colormap: - if not self.plte: - warnings.warn( - "PLTE chunk is required before bKGD chunk.") - self.background = struct.unpack('B', data) - else: - self.background = struct.unpack("!%dH" % self.color_planes, - data) - except struct.error: - raise FormatError("bKGD chunk has incorrect length.") - elif type == 'tRNS': - # http://www.w3.org/TR/PNG/#11tRNS - self.trns = data - if self.colormap: - if not self.plte: - warnings.warn("PLTE chunk is required before tRNS chunk.") - else: - if len(data) > len(self.plte)/3: - # Was warning, but promoted to Error as it - # would otherwise cause pain later on. - raise FormatError("tRNS chunk is too long.") - else: - if self.alpha: - raise FormatError( - "tRNS chunk is not valid with colour type %d." % - self.color_type) - try: - self.transparent = \ - struct.unpack("!%dH" % self.color_planes, data) - except struct.error: - raise FormatError("tRNS chunk has incorrect length.") - elif type == 'gAMA': - try: - self.gamma = struct.unpack("!L", data)[0] / 100000.0 - except struct.error: - raise FormatError("gAMA chunk has incorrect length.") - elif type == 'sBIT': - self.sbit = data - if (self.colormap and len(data) != 3 or - not self.colormap and len(data) != self.planes): - raise FormatError("sBIT chunk has incorrect length.") - - def read(self, lenient=False): - """ - Read the PNG file and decode it. Returns (`width`, `height`, - `pixels`, `metadata`). - - May use excessive memory. - - `pixels` are returned in boxed row flat pixel format. - - If the optional `lenient` argument evaluates to True, - checksum failures will raise warnings rather than exceptions. - """ - - def iteridat(): - """Iterator that yields all the ``IDAT`` chunks as strings.""" - while True: - try: - type, data = self.chunk(lenient=lenient) - except ValueError as e: - raise ChunkError(e.args[0]) - if type == 'IEND': - # http://www.w3.org/TR/PNG/#11IEND - break - if type != 'IDAT': - continue - # type == 'IDAT' - # http://www.w3.org/TR/PNG/#11IDAT - if self.colormap and not self.plte: - warnings.warn("PLTE chunk is required before IDAT chunk") - yield data - - def iterdecomp(idat): - """Iterator that yields decompressed strings. `idat` should - be an iterator that yields the ``IDAT`` chunk data. - """ - - # Currently, with no max_length paramter to decompress, this - # routine will do one yield per IDAT chunk. So not very - # incremental. - d = zlib.decompressobj() - # Each IDAT chunk is passed to the decompressor, then any - # remaining state is decompressed out. - for data in idat: - # :todo: add a max_length argument here to limit output - # size. - yield array('B', d.decompress(data)) - yield array('B', d.flush()) - - self.preamble(lenient=lenient) - raw = iterdecomp(iteridat()) - - if self.interlace: - raw = array('B', itertools.chain(*raw)) - arraycode = 'BH'[self.bitdepth>8] - # Like :meth:`group` but producing an array.array object for - # each row. - pixels = itertools.imap(lambda *row: array(arraycode, row), - *[iter(self.deinterlace(raw))]*self.width*self.planes) - else: - pixels = self.iterboxed(self.iterstraight(raw)) - meta = dict() - for attr in 'greyscale alpha planes bitdepth interlace'.split(): - meta[attr] = getattr(self, attr) - meta['size'] = (self.width, self.height) - for attr in 'gamma transparent background'.split(): - a = getattr(self, attr, None) - if a is not None: - meta[attr] = a - if self.plte: - meta['palette'] = self.palette() - return self.width, self.height, pixels, meta - - - def read_flat(self): - """ - Read a PNG file and decode it into flat row flat pixel format. - Returns (*width*, *height*, *pixels*, *metadata*). - - May use excessive memory. - - `pixels` are returned in flat row flat pixel format. - - See also the :meth:`read` method which returns pixels in the - more stream-friendly boxed row flat pixel format. - """ - - x, y, pixel, meta = self.read() - arraycode = 'BH'[meta['bitdepth']>8] - pixel = array(arraycode, itertools.chain(*pixel)) - return x, y, pixel, meta - - def palette(self, alpha='natural'): - """Returns a palette that is a sequence of 3-tuples or 4-tuples, - synthesizing it from the ``PLTE`` and ``tRNS`` chunks. These - chunks should have already been processed (for example, by - calling the :meth:`preamble` method). All the tuples are the - same size: 3-tuples if there is no ``tRNS`` chunk, 4-tuples when - there is a ``tRNS`` chunk. Assumes that the image is colour type - 3 and therefore a ``PLTE`` chunk is required. - - If the `alpha` argument is ``'force'`` then an alpha channel is - always added, forcing the result to be a sequence of 4-tuples. - """ - - if not self.plte: - raise FormatError( - "Required PLTE chunk is missing in colour type 3 image.") - plte = group(array('B', self.plte), 3) - if self.trns or alpha == 'force': - trns = array('B', self.trns or '') - trns.extend([255]*(len(plte)-len(trns))) - plte = map(operator.add, plte, group(trns, 1)) - return plte - - def asDirect(self): - """Returns the image data as a direct representation of an - ``x * y * planes`` array. This method is intended to remove the - need for callers to deal with palettes and transparency - themselves. Images with a palette (colour type 3) - are converted to RGB or RGBA; images with transparency (a - ``tRNS`` chunk) are converted to LA or RGBA as appropriate. - When returned in this format the pixel values represent the - colour value directly without needing to refer to palettes or - transparency information. - - Like the :meth:`read` method this method returns a 4-tuple: - - (*width*, *height*, *pixels*, *meta*) - - This method normally returns pixel values with the bit depth - they have in the source image, but when the source PNG has an - ``sBIT`` chunk it is inspected and can reduce the bit depth of - the result pixels; pixel values will be reduced according to - the bit depth specified in the ``sBIT`` chunk (PNG nerds should - note a single result bit depth is used for all channels; the - maximum of the ones specified in the ``sBIT`` chunk. An RGB565 - image will be rescaled to 6-bit RGB666). - - The *meta* dictionary that is returned reflects the `direct` - format and not the original source image. For example, an RGB - source image with a ``tRNS`` chunk to represent a transparent - colour, will have ``planes=3`` and ``alpha=False`` for the - source image, but the *meta* dictionary returned by this method - will have ``planes=4`` and ``alpha=True`` because an alpha - channel is synthesized and added. - - *pixels* is the pixel data in boxed row flat pixel format (just - like the :meth:`read` method). - - All the other aspects of the image data are not changed. - """ - - self.preamble() - - # Simple case, no conversion necessary. - if not self.colormap and not self.trns and not self.sbit: - return self.read() - - x,y,pixels,meta = self.read() - - if self.colormap: - meta['colormap'] = False - meta['alpha'] = bool(self.trns) - meta['bitdepth'] = 8 - meta['planes'] = 3 + bool(self.trns) - plte = self.palette() - def iterpal(pixels): - for row in pixels: - row = map(plte.__getitem__, row) - yield array('B', itertools.chain(*row)) - pixels = iterpal(pixels) - elif self.trns: - # It would be nice if there was some reasonable way of doing - # this without generating a whole load of intermediate tuples. - # But tuples does seem like the easiest way, with no other way - # clearly much simpler or much faster. (Actually, the L to LA - # conversion could perhaps go faster (all those 1-tuples!), but - # I still wonder whether the code proliferation is worth it) - it = self.transparent - maxval = 2**meta['bitdepth']-1 - planes = meta['planes'] - meta['alpha'] = True - meta['planes'] += 1 - typecode = 'BH'[meta['bitdepth']>8] - def itertrns(pixels): - for row in pixels: - # For each row we group it into pixels, then form a - # characterisation vector that says whether each pixel - # is opaque or not. Then we convert True/False to - # 0/maxval (by multiplication), and add it as the extra - # channel. - row = group(row, planes) - opa = map(it.__ne__, row) - opa = map(maxval.__mul__, opa) - opa = zip(opa) # convert to 1-tuples - yield array(typecode, - itertools.chain(*map(operator.add, row, opa))) - pixels = itertrns(pixels) - targetbitdepth = None - if self.sbit: - sbit = struct.unpack('%dB' % len(self.sbit), self.sbit) - targetbitdepth = max(sbit) - if targetbitdepth > meta['bitdepth']: - raise Error('sBIT chunk %r exceeds bitdepth %d' % - (sbit,self.bitdepth)) - if min(sbit) <= 0: - raise Error('sBIT chunk %r has a 0-entry' % sbit) - if targetbitdepth == meta['bitdepth']: - targetbitdepth = None - if targetbitdepth: - shift = meta['bitdepth'] - targetbitdepth - meta['bitdepth'] = targetbitdepth - def itershift(pixels): - for row in pixels: - yield map(shift.__rrshift__, row) - pixels = itershift(pixels) - return x,y,pixels,meta - - def asFloat(self, maxval=1.0): - """Return image pixels as per :meth:`asDirect` method, but scale - all pixel values to be floating point values between 0.0 and - *maxval*. - """ - - x,y,pixels,info = self.asDirect() - sourcemaxval = 2**info['bitdepth']-1 - del info['bitdepth'] - info['maxval'] = float(maxval) - factor = float(maxval)/float(sourcemaxval) - def iterfloat(): - for row in pixels: - yield map(factor.__mul__, row) - return x,y,iterfloat(),info - - def _as_rescale(self, get, targetbitdepth): - """Helper used by :meth:`asRGB8` and :meth:`asRGBA8`.""" - - width,height,pixels,meta = get() - maxval = 2**meta['bitdepth'] - 1 - targetmaxval = 2**targetbitdepth - 1 - factor = float(targetmaxval) / float(maxval) - meta['bitdepth'] = targetbitdepth - def iterscale(): - for row in pixels: - yield map(lambda x: int(round(x*factor)), row) - if maxval == targetmaxval: - return width, height, pixels, meta - else: - return width, height, iterscale(), meta - - def asRGB8(self): - """Return the image data as an RGB pixels with 8-bits per - sample. This is like the :meth:`asRGB` method except that - this method additionally rescales the values so that they - are all between 0 and 255 (8-bit). In the case where the - source image has a bit depth < 8 the transformation preserves - all the information; where the source image has bit depth - > 8, then rescaling to 8-bit values loses precision. No - dithering is performed. Like :meth:`asRGB`, an alpha channel - in the source image will raise an exception. - - This function returns a 4-tuple: - (*width*, *height*, *pixels*, *metadata*). - *width*, *height*, *metadata* are as per the :meth:`read` method. - - *pixels* is the pixel data in boxed row flat pixel format. - """ - - return self._as_rescale(self.asRGB, 8) - - def asRGBA8(self): - """Return the image data as RGBA pixels with 8-bits per - sample. This method is similar to :meth:`asRGB8` and - :meth:`asRGBA`: The result pixels have an alpha channel, *and* - values are rescaled to the range 0 to 255. The alpha channel is - synthesized if necessary (with a small speed penalty). - """ - - return self._as_rescale(self.asRGBA, 8) - - def asRGB(self): - """Return image as RGB pixels. RGB colour images are passed - through unchanged; greyscales are expanded into RGB - triplets (there is a small speed overhead for doing this). - - An alpha channel in the source image will raise an - exception. - - The return values are as for the :meth:`read` method - except that the *metadata* reflect the returned pixels, not the - source image. In particular, for this method - ``metadata['greyscale']`` will be ``False``. - """ - - width,height,pixels,meta = self.asDirect() - if meta['alpha']: - raise Error("will not convert image with alpha channel to RGB") - if not meta['greyscale']: - return width,height,pixels,meta - meta['greyscale'] = False - typecode = 'BH'[meta['bitdepth'] > 8] - def iterrgb(): - for row in pixels: - a = array(typecode, [0]) * 3 * width - for i in range(3): - a[i::3] = row - yield a - return width,height,iterrgb(),meta - - def asRGBA(self): - """Return image as RGBA pixels. Greyscales are expanded into - RGB triplets; an alpha channel is synthesized if necessary. - The return values are as for the :meth:`read` method - except that the *metadata* reflect the returned pixels, not the - source image. In particular, for this method - ``metadata['greyscale']`` will be ``False``, and - ``metadata['alpha']`` will be ``True``. - """ - - width,height,pixels,meta = self.asDirect() - if meta['alpha'] and not meta['greyscale']: - return width,height,pixels,meta - typecode = 'BH'[meta['bitdepth'] > 8] - maxval = 2**meta['bitdepth'] - 1 - maxbuffer = struct.pack('=' + typecode, maxval) * 4 * width - def newarray(): - return array(typecode, maxbuffer) - - if meta['alpha'] and meta['greyscale']: - # LA to RGBA - def convert(): - for row in pixels: - # Create a fresh target row, then copy L channel - # into first three target channels, and A channel - # into fourth channel. - a = newarray() - pngfilters.convert_la_to_rgba(row, a) - yield a - elif meta['greyscale']: - # L to RGBA - def convert(): - for row in pixels: - a = newarray() - pngfilters.convert_l_to_rgba(row, a) - yield a - else: - assert not meta['alpha'] and not meta['greyscale'] - # RGB to RGBA - def convert(): - for row in pixels: - a = newarray() - pngfilters.convert_rgb_to_rgba(row, a) - yield a - meta['alpha'] = True - meta['greyscale'] = False - return width,height,convert(),meta - - -# === Legacy Version Support === - -# :pyver:old: PyPNG works on Python versions 2.3 and 2.2, but not -# without some awkward problems. Really PyPNG works on Python 2.4 (and -# above); it works on Pythons 2.3 and 2.2 by virtue of fixing up -# problems here. It's a bit ugly (which is why it's hidden down here). -# -# Generally the strategy is one of pretending that we're running on -# Python 2.4 (or above), and patching up the library support on earlier -# versions so that it looks enough like Python 2.4. When it comes to -# Python 2.2 there is one thing we cannot patch: extended slices -# http://www.python.org/doc/2.3/whatsnew/section-slices.html. -# Instead we simply declare that features that are implemented using -# extended slices will not work on Python 2.2. -# -# In order to work on Python 2.3 we fix up a recurring annoyance involving -# the array type. In Python 2.3 an array cannot be initialised with an -# array, and it cannot be extended with a list (or other sequence). -# Both of those are repeated issues in the code. Whilst I would not -# normally tolerate this sort of behaviour, here we "shim" a replacement -# for array into place (and hope no-ones notices). You never read this. -# -# In an amusing case of warty hacks on top of warty hacks... the array -# shimming we try and do only works on Python 2.3 and above (you can't -# subclass array.array in Python 2.2). So to get it working on Python -# 2.2 we go for something much simpler and (probably) way slower. -try: - array('B').extend([]) - array('B', array('B')) -except: - # Expect to get here on Python 2.3 - try: - class _array_shim(array): - true_array = array - def __new__(cls, typecode, init=None): - super_new = super(_array_shim, cls).__new__ - it = super_new(cls, typecode) - if init is None: - return it - it.extend(init) - return it - def extend(self, extension): - super_extend = super(_array_shim, self).extend - if isinstance(extension, self.true_array): - return super_extend(extension) - if not isinstance(extension, (list, str)): - # Convert to list. Allows iterators to work. - extension = list(extension) - return super_extend(self.true_array(self.typecode, extension)) - array = _array_shim - except: - # Expect to get here on Python 2.2 - def array(typecode, init=()): - if type(init) == str: - return map(ord, init) - return list(init) - -# Further hacks to get it limping along on Python 2.2 -try: - enumerate -except: - def enumerate(seq): - i=0 - for x in seq: - yield i,x - i += 1 - -try: - reversed -except: - def reversed(l): - l = list(l) - l.reverse() - for x in l: - yield x - -try: - itertools -except: - class _dummy_itertools: - pass - itertools = _dummy_itertools() - def _itertools_imap(f, seq): - for x in seq: - yield f(x) - itertools.imap = _itertools_imap - def _itertools_chain(*iterables): - for it in iterables: - for element in it: - yield element - itertools.chain = _itertools_chain - - -# === Support for users without Cython === - -try: - pngfilters -except: - class pngfilters(object): - def undo_filter_sub(filter_unit, scanline, previous, result): - """Undo sub filter.""" - - ai = 0 - # Loops starts at index fu. Observe that the initial part - # of the result is already filled in correctly with - # scanline. - for i in range(filter_unit, len(result)): - x = scanline[i] - a = result[ai] - result[i] = (x + a) & 0xff - ai += 1 - undo_filter_sub = staticmethod(undo_filter_sub) - - def undo_filter_up(filter_unit, scanline, previous, result): - """Undo up filter.""" - - for i in range(len(result)): - x = scanline[i] - b = previous[i] - result[i] = (x + b) & 0xff - undo_filter_up = staticmethod(undo_filter_up) - - def undo_filter_average(filter_unit, scanline, previous, result): - """Undo up filter.""" - - ai = -filter_unit - for i in range(len(result)): - x = scanline[i] - if ai < 0: - a = 0 - else: - a = result[ai] - b = previous[i] - result[i] = (x + ((a + b) >> 1)) & 0xff - ai += 1 - undo_filter_average = staticmethod(undo_filter_average) - - def undo_filter_paeth(filter_unit, scanline, previous, result): - """Undo Paeth filter.""" - - # Also used for ci. - ai = -filter_unit - for i in range(len(result)): - x = scanline[i] - if ai < 0: - a = c = 0 - else: - a = result[ai] - c = previous[ai] - b = previous[i] - p = a + b - c - pa = abs(p - a) - pb = abs(p - b) - pc = abs(p - c) - if pa <= pb and pa <= pc: - pr = a - elif pb <= pc: - pr = b - else: - pr = c - result[i] = (x + pr) & 0xff - ai += 1 - undo_filter_paeth = staticmethod(undo_filter_paeth) - - def convert_la_to_rgba(row, result): - for i in range(3): - result[i::4] = row[0::2] - result[3::4] = row[1::2] - convert_la_to_rgba = staticmethod(convert_la_to_rgba) - - def convert_l_to_rgba(row, result): - """Convert a grayscale image to RGBA. This method assumes the alpha - channel in result is already correctly initialized.""" - for i in range(3): - result[i::4] = row - convert_l_to_rgba = staticmethod(convert_l_to_rgba) - - def convert_rgb_to_rgba(row, result): - """Convert an RGB image to RGBA. This method assumes the alpha - channel in result is already correctly initialized.""" - for i in range(3): - result[i::4] = row[i::3] - convert_rgb_to_rgba = staticmethod(convert_rgb_to_rgba) - - -# === Internal Test Support === - -# This section comprises the tests that are internally validated (as -# opposed to tests which produce output files that are externally -# validated). Primarily they are unittests. - -# Note that it is difficult to internally validate the results of -# writing a PNG file. The only thing we can do is read it back in -# again, which merely checks consistency, not that the PNG file we -# produce is valid. - -# Run the tests from the command line: -# python -c 'import png;png.test()' - -# (For an in-memory binary file IO object) We use BytesIO where -# available, otherwise we use StringIO, but name it BytesIO. -try: - from io import BytesIO -except: - from StringIO import StringIO as BytesIO -import tempfile -# http://www.python.org/doc/2.4.4/lib/module-unittest.html -import unittest - - -def test(): - unittest.main(__name__) - -def topngbytes(name, rows, x, y, **k): - """Convenience function for creating a PNG file "in memory" as a - string. Creates a :class:`Writer` instance using the keyword arguments, - then passes `rows` to its :meth:`Writer.write` method. The resulting - PNG file is returned as a string. `name` is used to identify the file for - debugging. - """ - - import os - - print(name) - f = BytesIO() - w = Writer(x, y, **k) - w.write(f, rows) - if os.environ.get('PYPNG_TEST_TMP'): - w = open(name, 'wb') - w.write(f.getvalue()) - w.close() - return f.getvalue() - -def testWithIO(inp, out, f): - """Calls the function `f` with ``sys.stdin`` changed to `inp` - and ``sys.stdout`` changed to `out`. They are restored when `f` - returns. This function returns whatever `f` returns. - """ - - import os - - try: - oldin,sys.stdin = sys.stdin,inp - oldout,sys.stdout = sys.stdout,out - x = f() - finally: - sys.stdin = oldin - sys.stdout = oldout - if os.environ.get('PYPNG_TEST_TMP') and hasattr(out,'getvalue'): - name = mycallersname() - if name: - w = open(name+'.png', 'wb') - w.write(out.getvalue()) - w.close() - return x - -def mycallersname(): - """Returns the name of the caller of the caller of this function - (hence the name of the caller of the function in which - "mycallersname()" textually appears). Returns None if this cannot - be determined.""" - - # http://docs.python.org/library/inspect.html#the-interpreter-stack - import inspect - - frame = inspect.currentframe() - if not frame: - return None - frame_,filename_,lineno_,funname,linelist_,listi_ = ( - inspect.getouterframes(frame)[2]) - return funname - -def seqtobytes(s): - """Convert a sequence of integers to a *bytes* instance. Good for - plastering over Python 2 / Python 3 cracks. - """ - - return strtobytes(''.join(chr(x) for x in s)) - -class Test(unittest.TestCase): - # This member is used by the superclass. If we don't define a new - # class here then when we use self.assertRaises() and the PyPNG code - # raises an assertion then we get no proper traceback. I can't work - # out why, but defining a new class here means we get a proper - # traceback. - class failureException(Exception): - pass - - def helperLN(self, n): - mask = (1 << n) - 1 - # Use small chunk_limit so that multiple chunk writing is - # tested. Making it a test for Issue 20. - w = Writer(15, 17, greyscale=True, bitdepth=n, chunk_limit=99) - f = BytesIO() - w.write_array(f, array('B', map(mask.__and__, range(1, 256)))) - r = Reader(bytes=f.getvalue()) - x,y,pixels,meta = r.read() - self.assertEqual(x, 15) - self.assertEqual(y, 17) - self.assertEqual(list(itertools.chain(*pixels)), - map(mask.__and__, range(1,256))) - def testL8(self): - return self.helperLN(8) - def testL4(self): - return self.helperLN(4) - def testL2(self): - "Also tests asRGB8." - w = Writer(1, 4, greyscale=True, bitdepth=2) - f = BytesIO() - w.write_array(f, array('B', range(4))) - r = Reader(bytes=f.getvalue()) - x,y,pixels,meta = r.asRGB8() - self.assertEqual(x, 1) - self.assertEqual(y, 4) - for i,row in enumerate(pixels): - self.assertEqual(len(row), 3) - self.assertEqual(list(row), [0x55*i]*3) - def testP2(self): - "2-bit palette." - a = (255,255,255) - b = (200,120,120) - c = (50,99,50) - w = Writer(1, 4, bitdepth=2, palette=[a,b,c]) - f = BytesIO() - w.write_array(f, array('B', (0,1,1,2))) - r = Reader(bytes=f.getvalue()) - x,y,pixels,meta = r.asRGB8() - self.assertEqual(x, 1) - self.assertEqual(y, 4) - self.assertEqual(map(list, pixels), map(list, [a, b, b, c])) - def testPtrns(self): - "Test colour type 3 and tRNS chunk (and 4-bit palette)." - a = (50,99,50,50) - b = (200,120,120,80) - c = (255,255,255) - d = (200,120,120) - e = (50,99,50) - w = Writer(3, 3, bitdepth=4, palette=[a,b,c,d,e]) - f = BytesIO() - w.write_array(f, array('B', (4, 3, 2, 3, 2, 0, 2, 0, 1))) - r = Reader(bytes=f.getvalue()) - x,y,pixels,meta = r.asRGBA8() - self.assertEqual(x, 3) - self.assertEqual(y, 3) - c = c+(255,) - d = d+(255,) - e = e+(255,) - boxed = [(e,d,c),(d,c,a),(c,a,b)] - flat = map(lambda row: itertools.chain(*row), boxed) - self.assertEqual(map(list, pixels), map(list, flat)) - def testRGBtoRGBA(self): - "asRGBA8() on colour type 2 source.""" - # Test for Issue 26 - r = Reader(bytes=_pngsuite['basn2c08']) - x,y,pixels,meta = r.asRGBA8() - # Test the pixels at row 9 columns 0 and 1. - row9 = list(pixels)[9] - self.assertEqual(list(row9[0:8]), - [0xff, 0xdf, 0xff, 0xff, 0xff, 0xde, 0xff, 0xff]) - def testLtoRGBA(self): - "asRGBA() on grey source.""" - # Test for Issue 60 - r = Reader(bytes=_pngsuite['basi0g08']) - x,y,pixels,meta = r.asRGBA() - row9 = list(list(pixels)[9]) - self.assertEqual(row9[0:8], - [222, 222, 222, 255, 221, 221, 221, 255]) - def testCtrns(self): - "Test colour type 2 and tRNS chunk." - # Test for Issue 25 - r = Reader(bytes=_pngsuite['tbrn2c08']) - x,y,pixels,meta = r.asRGBA8() - # I just happen to know that the first pixel is transparent. - # In particular it should be #7f7f7f00 - row0 = list(pixels)[0] - self.assertEqual(tuple(row0[0:4]), (0x7f, 0x7f, 0x7f, 0x00)) - def testAdam7read(self): - """Adam7 interlace reading. - Specifically, test that for images in the PngSuite that - have both an interlaced and straightlaced pair that both - images from the pair produce the same array of pixels.""" - for candidate in _pngsuite: - if not candidate.startswith('basn'): - continue - candi = candidate.replace('n', 'i') - if candi not in _pngsuite: - continue - print('adam7 read', candidate) - straight = Reader(bytes=_pngsuite[candidate]) - adam7 = Reader(bytes=_pngsuite[candi]) - # Just compare the pixels. Ignore x,y (because they're - # likely to be correct?); metadata is ignored because the - # "interlace" member differs. Lame. - straight = straight.read()[2] - adam7 = adam7.read()[2] - self.assertEqual(map(list, straight), map(list, adam7)) - def testAdam7write(self): - """Adam7 interlace writing. - For each test image in the PngSuite, write an interlaced - and a straightlaced version. Decode both, and compare results. - """ - # Not such a great test, because the only way we can check what - # we have written is to read it back again. - - for name,bytes in _pngsuite.items(): - # Only certain colour types supported for this test. - if name[3:5] not in ['n0', 'n2', 'n4', 'n6']: - continue - it = Reader(bytes=bytes) - x,y,pixels,meta = it.read() - pngi = topngbytes('adam7wn'+name+'.png', pixels, - x=x, y=y, bitdepth=it.bitdepth, - greyscale=it.greyscale, alpha=it.alpha, - transparent=it.transparent, - interlace=False) - x,y,ps,meta = Reader(bytes=pngi).read() - it = Reader(bytes=bytes) - x,y,pixels,meta = it.read() - pngs = topngbytes('adam7wi'+name+'.png', pixels, - x=x, y=y, bitdepth=it.bitdepth, - greyscale=it.greyscale, alpha=it.alpha, - transparent=it.transparent, - interlace=True) - x,y,pi,meta = Reader(bytes=pngs).read() - self.assertEqual(map(list, ps), map(list, pi)) - def testPGMin(self): - """Test that the command line tool can read PGM files.""" - def do(): - return _main(['testPGMin']) - s = BytesIO() - s.write(strtobytes('P5 2 2 3\n')) - s.write(strtobytes('\x00\x01\x02\x03')) - s.flush() - s.seek(0) - o = BytesIO() - testWithIO(s, o, do) - r = Reader(bytes=o.getvalue()) - x,y,pixels,meta = r.read() - self.assertTrue(r.greyscale) - self.assertEqual(r.bitdepth, 2) - def testPAMin(self): - """Test that the command line tool can read PAM file.""" - def do(): - return _main(['testPAMin']) - s = BytesIO() - s.write(strtobytes('P7\nWIDTH 3\nHEIGHT 1\nDEPTH 4\nMAXVAL 255\n' - 'TUPLTYPE RGB_ALPHA\nENDHDR\n')) - # The pixels in flat row flat pixel format - flat = [255,0,0,255, 0,255,0,120, 0,0,255,30] - asbytes = seqtobytes(flat) - s.write(asbytes) - s.flush() - s.seek(0) - o = BytesIO() - testWithIO(s, o, do) - r = Reader(bytes=o.getvalue()) - x,y,pixels,meta = r.read() - self.assertTrue(r.alpha) - self.assertTrue(not r.greyscale) - self.assertEqual(list(itertools.chain(*pixels)), flat) - def testLA4(self): - """Create an LA image with bitdepth 4.""" - bytes = topngbytes('la4.png', [[5, 12]], 1, 1, - greyscale=True, alpha=True, bitdepth=4) - sbit = Reader(bytes=bytes).chunk('sBIT')[1] - self.assertEqual(sbit, strtobytes('\x04\x04')) - def testPal(self): - """Test that a palette PNG returns the palette in info.""" - r = Reader(bytes=_pngsuite['basn3p04']) - x,y,pixels,info = r.read() - self.assertEqual(x, 32) - self.assertEqual(y, 32) - self.assertTrue('palette' in info) - def testPalWrite(self): - """Test metadata for paletted PNG can be passed from one PNG - to another.""" - r = Reader(bytes=_pngsuite['basn3p04']) - x,y,pixels,info = r.read() - w = Writer(**info) - o = BytesIO() - w.write(o, pixels) - o.flush() - o.seek(0) - r = Reader(file=o) - _,_,_,again_info = r.read() - # Same palette - self.assertEqual(again_info['palette'], info['palette']) - def testPalExpand(self): - """Test that bitdepth can be used to fiddle with pallete image.""" - r = Reader(bytes=_pngsuite['basn3p04']) - x,y,pixels,info = r.read() - pixels = [list(row) for row in pixels] - info['bitdepth'] = 8 - w = Writer(**info) - o = BytesIO() - w.write(o, pixels) - o.flush() - o.seek(0) - r = Reader(file=o) - _,_,again_pixels,again_info = r.read() - # Same pixels - again_pixels = [list(row) for row in again_pixels] - self.assertEqual(again_pixels, pixels) - - def testPNMsbit(self): - """Test that PNM files can generates sBIT chunk.""" - def do(): - return _main(['testPNMsbit']) - s = BytesIO() - s.write(strtobytes('P6 8 1 1\n')) - for pixel in range(8): - s.write(struct.pack('>sys.stderr, "skipping numpy test" - return - - rows = [map(numpy.uint16, range(0,0x10000,0x5555))] - b = topngbytes('numpyuint16.png', rows, 4, 1, - greyscale=True, alpha=False, bitdepth=16) - def testNumpyuint8(self): - """numpy uint8.""" - - try: - import numpy - except ImportError: - print >>sys.stderr, "skipping numpy test" - return - - rows = [map(numpy.uint8, range(0,0x100,0x55))] - b = topngbytes('numpyuint8.png', rows, 4, 1, - greyscale=True, alpha=False, bitdepth=8) - def testNumpybool(self): - """numpy bool.""" - - try: - import numpy - except ImportError: - print >>sys.stderr, "skipping numpy test" - return - - rows = [map(numpy.bool, [0,1])] - b = topngbytes('numpybool.png', rows, 2, 1, - greyscale=True, alpha=False, bitdepth=1) - def testNumpyarray(self): - """numpy array.""" - try: - import numpy - except ImportError: - print >>sys.stderr, "skipping numpy test" - return - - pixels = numpy.array([[0,0x5555],[0x5555,0xaaaa]], numpy.uint16) - img = from_array(pixels, 'L') - img.save('testnumpyL16.png') - - def paeth(self, x, a, b, c): - p = a + b - c - pa = abs(p - a) - pb = abs(p - b) - pc = abs(p - c) - if pa <= pb and pa <= pc: - pr = a - elif pb <= pc: - pr = b - else: - pr = c - return x - pr - - # test filters and unfilters - def testFilterScanlineFirstLine(self): - fo = 3 # bytes per pixel - line = [30, 31, 32, 230, 231, 232] - out = filter_scanline(0, line, fo, None) # none - self.assertEqual(list(out), [0, 30, 31, 32, 230, 231, 232]) - out = filter_scanline(1, line, fo, None) # sub - self.assertEqual(list(out), [1, 30, 31, 32, 200, 200, 200]) - out = filter_scanline(2, line, fo, None) # up - # TODO: All filtered scanlines start with a byte indicating the filter - # algorithm, except "up". Is this a bug? Should the expected output - # start with 2 here? - self.assertEqual(list(out), [30, 31, 32, 230, 231, 232]) - out = filter_scanline(3, line, fo, None) # average - self.assertEqual(list(out), [3, 30, 31, 32, 215, 216, 216]) - out = filter_scanline(4, line, fo, None) # paeth - self.assertEqual(list(out), [ - 4, self.paeth(30, 0, 0, 0), self.paeth(31, 0, 0, 0), - self.paeth(32, 0, 0, 0), self.paeth(230, 30, 0, 0), - self.paeth(231, 31, 0, 0), self.paeth(232, 32, 0, 0) - ]) - def testFilterScanline(self): - prev = [20, 21, 22, 210, 211, 212] - line = [30, 32, 34, 230, 233, 236] - fo = 3 - out = filter_scanline(0, line, fo, prev) # none - self.assertEqual(list(out), [0, 30, 32, 34, 230, 233, 236]) - out = filter_scanline(1, line, fo, prev) # sub - self.assertEqual(list(out), [1, 30, 32, 34, 200, 201, 202]) - out = filter_scanline(2, line, fo, prev) # up - self.assertEqual(list(out), [2, 10, 11, 12, 20, 22, 24]) - out = filter_scanline(3, line, fo, prev) # average - self.assertEqual(list(out), [3, 20, 22, 23, 110, 112, 113]) - out = filter_scanline(4, line, fo, prev) # paeth - self.assertEqual(list(out), [ - 4, self.paeth(30, 0, 20, 0), self.paeth(32, 0, 21, 0), - self.paeth(34, 0, 22, 0), self.paeth(230, 30, 210, 20), - self.paeth(233, 32, 211, 21), self.paeth(236, 34, 212, 22) - ]) - def testUnfilterScanline(self): - reader = Reader(bytes='') - reader.psize = 3 - scanprev = array('B', [20, 21, 22, 210, 211, 212]) - scanline = array('B', [30, 32, 34, 230, 233, 236]) - def cp(a): - return array('B', a) - - out = reader.undo_filter(0, cp(scanline), cp(scanprev)) - self.assertEqual(list(out), list(scanline)) # none - out = reader.undo_filter(1, cp(scanline), cp(scanprev)) - self.assertEqual(list(out), [30, 32, 34, 4, 9, 14]) # sub - out = reader.undo_filter(2, cp(scanline), cp(scanprev)) - self.assertEqual(list(out), [50, 53, 56, 184, 188, 192]) # up - out = reader.undo_filter(3, cp(scanline), cp(scanprev)) - self.assertEqual(list(out), [40, 42, 45, 99, 103, 108]) # average - out = reader.undo_filter(4, cp(scanline), cp(scanprev)) - self.assertEqual(list(out), [50, 53, 56, 184, 188, 192]) # paeth - def testUnfilterScanlinePaeth(self): - # This tests more edge cases in the paeth unfilter - reader = Reader(bytes='') - reader.psize = 3 - scanprev = array('B', [2, 0, 0, 0, 9, 11]) - scanline = array('B', [6, 10, 9, 100, 101, 102]) - - out = reader.undo_filter(4, scanline, scanprev) - self.assertEqual(list(out), [8, 10, 9, 108, 111, 113]) # paeth - def testIterstraight(self): - def arraify(list_of_str): - return [array('B', s) for s in list_of_str] - reader = Reader(bytes='') - reader.row_bytes = 6 - reader.psize = 3 - rows = reader.iterstraight(arraify(['\x00abcdef', '\x00ghijkl'])) - self.assertEqual(list(rows), arraify(['abcdef', 'ghijkl'])) - - rows = reader.iterstraight(arraify(['\x00abc', 'def\x00ghijkl'])) - self.assertEqual(list(rows), arraify(['abcdef', 'ghijkl'])) - - rows = reader.iterstraight(arraify(['\x00abcdef\x00ghijkl'])) - self.assertEqual(list(rows), arraify(['abcdef', 'ghijkl'])) - - rows = reader.iterstraight(arraify(['\x00abcdef\x00ghi', 'jkl'])) - self.assertEqual(list(rows), arraify(['abcdef', 'ghijkl'])) - -# === Command Line Support === - -def _dehex(s): - """Liberally convert from hex string to binary string.""" - import re - import binascii - - # Remove all non-hexadecimal digits - s = re.sub(r'[^a-fA-F\d]', '', s) - # binscii.unhexlify works in Python 2 and Python 3 (unlike - # thing.decode('hex')). - return binascii.unhexlify(strtobytes(s)) -def _enhex(s): - """Convert from binary string (bytes) to hex string (str).""" - - import binascii - - return bytestostr(binascii.hexlify(s)) - -# Copies of PngSuite test files taken -# from http://www.schaik.com/pngsuite/pngsuite_bas_png.html -# on 2009-02-19 by drj and converted to hex. -# Some of these are not actually in PngSuite (but maybe they should -# be?), they use the same naming scheme, but start with a capital -# letter. -_pngsuite = { - 'basi0g01': _dehex(""" -89504e470d0a1a0a0000000d49484452000000200000002001000000012c0677 -cf0000000467414d41000186a031e8965f0000009049444154789c2d8d310ec2 -300c45dfc682c415187a00a42e197ab81e83b127e00c5639001363a580d8582c -65c910357c4b78b0bfbfdf4f70168c19e7acb970a3f2d1ded9695ce5bf5963df -d92aaf4c9fd927ea449e6487df5b9c36e799b91bdf082b4d4bd4014fe4014b01 -ab7a17aee694d28d328a2d63837a70451e1648702d9a9ff4a11d2f7a51aa21e5 -a18c7ffd0094e3511d661822f20000000049454e44ae426082 -"""), - 'basi0g02': _dehex(""" -89504e470d0a1a0a0000000d49484452000000200000002002000000016ba60d -1f0000000467414d41000186a031e8965f0000005149444154789c635062e860 -00e17286bb609c93c370ec189494960631366e4467b3ae675dcf10f521ea0303 -90c1ca006444e11643482064114a4852c710baea3f18c31918020c30410403a6 -0ac1a09239009c52804d85b6d97d0000000049454e44ae426082 -"""), - 'basi0g04': _dehex(""" -89504e470d0a1a0a0000000d4948445200000020000000200400000001e4e6f8 -bf0000000467414d41000186a031e8965f000000ae49444154789c658e5111c2 -301044171c141c141c041c843a287510ea20d441c041c141c141c04191102454 -03994998cecd7edcecedbb9bdbc3b2c2b6457545fbc4bac1be437347f7c66a77 -3c23d60db15e88f5c5627338a5416c2e691a9b475a89cd27eda12895ae8dfdab -43d61e590764f5c83a226b40d669bec307f93247701687723abf31ff83a2284b -a5b4ae6b63ac6520ad730ca4ed7b06d20e030369bd6720ed383290360406d24e -13811f2781eba9d34d07160000000049454e44ae426082 -"""), - 'basi0g08': _dehex(""" -89504e470d0a1a0a0000000d4948445200000020000000200800000001211615 -be0000000467414d41000186a031e8965f000000b549444154789cb5905d0ac2 -3010849dbac81c42c47bf843cf253e8878b0aa17110f214bdca6be240f5d21a5 -94ced3e49bcd322c1624115515154998aa424822a82a5624a1aa8a8b24c58f99 -999908130989a04a00d76c2c09e76cf21adcb209393a6553577da17140a2c59e -70ecbfa388dff1f03b82fb82bd07f05f7cb13f80bb07ad2fd60c011c3c588eef -f1f4e03bbec7ce832dca927aea005e431b625796345307b019c845e6bfc3bb98 -769d84f9efb02ea6c00f9bb9ff45e81f9f280000000049454e44ae426082 -"""), - 'basi0g16': _dehex(""" -89504e470d0a1a0a0000000d49484452000000200000002010000000017186c9 -fd0000000467414d41000186a031e8965f000000e249444154789cb5913b0ec2 -301044c7490aa8f85d81c3e4301c8f53a4ca0da8902c8144b3920b4043111282 -23bc4956681a6bf5fc3c5a3ba0448912d91a4de2c38dd8e380231eede4c4f7a1 -4677700bec7bd9b1d344689315a3418d1a6efbe5b8305ba01f8ff4808c063e26 -c60d5c81edcf6c58c535e252839e93801b15c0a70d810ae0d306b205dc32b187 -272b64057e4720ff0502154034831520154034c3df81400510cdf0015c86e5cc -5c79c639fddba9dcb5456b51d7980eb52d8e7d7fa620a75120d6064641a05120 -b606771a05626b401a05f1f589827cf0fe44c1f0bae0055698ee8914fffffe00 -00000049454e44ae426082 -"""), - 'basi2c08': _dehex(""" -89504e470d0a1a0a0000000d49484452000000200000002008020000018b1fdd -350000000467414d41000186a031e8965f000000f249444154789cd59341aa04 -210c44abc07b78133d59d37333bd89d76868b566d10cf4675af8596431a11662 -7c5688919280e312257dd6a0a4cf1a01008ee312a5f3c69c37e6fcc3f47e6776 -a07f8bdaf5b40feed2d33e025e2ff4fe2d4a63e1a16d91180b736d8bc45854c5 -6d951863f4a7e0b66dcf09a900f3ffa2948d4091e53ca86c048a64390f662b50 -4a999660ced906182b9a01a8be00a56404a6ede182b1223b4025e32c4de34304 -63457680c93aada6c99b73865aab2fc094920d901a203f5ddfe1970d28456783 -26cffbafeffcd30654f46d119be4793f827387fc0d189d5bc4d69a3c23d45a7f -db803146578337df4d0a3121fc3d330000000049454e44ae426082 -"""), - 'basi2c16': _dehex(""" -89504e470d0a1a0a0000000d4948445200000020000000201002000001db8f01 -760000000467414d41000186a031e8965f0000020a49444154789cd5962173e3 -3010853fcf1838cc61a1818185a53e56787fa13fa130852e3b5878b4b0b03081 -b97f7030070b53e6b057a0a8912bbb9163b9f109ececbc59bd7dcf2b45492409 -d66f00eb1dd83cb5497d65456aeb8e1040913b3b2c04504c936dd5a9c7e2c6eb -b1b8f17a58e8d043da56f06f0f9f62e5217b6ba3a1b76f6c9e99e8696a2a72e2 -c4fb1e4d452e92ec9652b807486d12b6669be00db38d9114b0c1961e375461a5 -5f76682a85c367ad6f682ff53a9c2a353191764b78bb07d8ddc3c97c1950f391 -6745c7b9852c73c2f212605a466a502705c8338069c8b9e84efab941eb393a97 -d4c9fd63148314209f1c1d3434e847ead6380de291d6f26a25c1ebb5047f5f24 -d85c49f0f22cc1d34282c72709cab90477bf25b89d49f0f351822297e0ea9704 -f34c82bc94002448ede51866e5656aef5d7c6a385cb4d80e6a538ceba04e6df2 -480e9aa84ddedb413bb5c97b3838456df2d4fec2c7a706983e7474d085fae820 -a841776a83073838973ac0413fea2f1dc4a06e71108fda73109bdae48954ad60 -bf867aac3ce44c7c1589a711cf8a81df9b219679d96d1cec3d8bbbeaa2012626 -df8c7802eda201b2d2e0239b409868171fc104ba8b76f10b4da09f6817ffc609 -c413ede267fd1fbab46880c90f80eccf0013185eb48b47ba03df2bdaadef3181 -cb8976f18e13188768170f98c0f844bb78cb04c62ddac59d09fc3fa25dfc1da4 -14deb3df1344f70000000049454e44ae426082 -"""), - 'basi3p08': _dehex(""" -89504e470d0a1a0a0000000d494844520000002000000020080300000133a3ba -500000000467414d41000186a031e8965f00000300504c5445224400f5ffed77 -ff77cbffff110a003a77002222ffff11ff110000222200ffac5566ff66ff6666 -ff01ff221200dcffffccff994444ff005555220000cbcbff44440055ff55cbcb -00331a00ffecdcedffffe4ffcbffdcdc44ff446666ff330000442200ededff66 -6600ffa444ffffaaeded0000cbcbfefffffdfffeffff0133ff33552a000101ff -8888ff00aaaa010100440000888800ffe4cbba5b0022ff22663200ffff99aaaa -ff550000aaaa00cb630011ff11d4ffaa773a00ff4444dc6b0066000001ff0188 -4200ecffdc6bdc00ffdcba00333300ed00ed7300ffff88994a0011ffff770000 -ff8301ffbabafe7b00fffeff00cb00ff999922ffff880000ffff77008888ffdc -ff1a33000000aa33ffff009900990000000001326600ffbaff44ffffffaaff00 -770000fefeaa00004a9900ffff66ff22220000998bff1155ffffff0101ff88ff -005500001111fffffefffdfea4ff4466ffffff66ff003300ffff55ff77770000 -88ff44ff00110077ffff006666ffffed000100fff5ed1111ffffff44ff22ffff -eded11110088ffff00007793ff2200dcdc3333fffe00febabaff99ffff333300 -63cb00baba00acff55ffffdcffff337bfe00ed00ed5555ffaaffffdcdcff5555 -00000066dcdc00dc00dc83ff017777fffefeffffffcbff5555777700fefe00cb -00cb0000fe010200010000122200ffff220044449bff33ffd4aa0000559999ff -999900ba00ba2a5500ffcbcbb4ff66ff9b33ffffbaaa00aa42880053aa00ffaa -aa0000ed00babaffff1100fe00000044009999990099ffcc99ba000088008800 -dc00ff93220000dcfefffeaa5300770077020100cb0000000033ffedff00ba00 -ff3333edffedffc488bcff7700aa00660066002222dc0000ffcbffdcffdcff8b -110000cb00010155005500880000002201ffffcbffcbed0000ff88884400445b -ba00ffbc77ff99ff006600baffba00777773ed00fe00003300330000baff77ff -004400aaffaafffefe000011220022c4ff8800eded99ff99ff55ff002200ffb4 -661100110a1100ff1111dcffbabaffff88ff88010001ff33ffb98ed362000002 -a249444154789c65d0695c0b001806f03711a9904a94d24dac63292949e5a810 -d244588a14ca5161d1a1323973252242d62157d12ae498c8124d25ca3a11398a -16e55a3cdffab0ffe7f77d7fcff3528645349b584c3187824d9d19d4ec2e3523 -9eb0ae975cf8de02f2486d502191841b42967a1ad49e5ddc4265f69a899e26b5 -e9e468181baae3a71a41b95669da8df2ea3594c1b31046d7b17bfb86592e4cbe -d89b23e8db0af6304d756e60a8f4ad378bdc2552ae5948df1d35b52143141533 -33bbbbababebeb3b3bc9c9c9c6c6c0c0d7b7b535323225a5aa8a02024a4bedec -0a0a2a2bcdcd7d7cf2f3a9a9c9cdcdd8b8adcdd5b5ababa828298982824a4ab2 -b21212acadbdbc1414e2e24859b9a72730302f4f49292c4c57373c9c0a0b7372 -8c8c1c1c3a3a92936d6dfdfd293e3e26262a4a4eaea2424b4b5fbfbc9c323278 -3c0b0ba1303abaae8ecdeeed950d6669a9a7a7a141d4de9e9d5d5cdcd2229b94 -c572716132f97cb1d8db9bc3110864a39795d9db6b6a26267a7a9a98d4d6a6a7 -cb76090ef6f030354d4d75766e686030545464cb393a1a1ac6c68686eae8f8f9 -a9aa4644c8b66d6e1689dcdd2512a994cb35330b0991ad9f9b6b659596a6addd -d8282fafae5e5323fb8f41d01f76c22fd8061be01bfc041a0323e1002c81cd30 -0b9ec027a0c930014ec035580fc3e112bc069a0b53e11c0c8095f00176c163a0 -e5301baec06a580677600ddc05ba0f13e120bc81a770133ec355a017300d4ec2 -0c7800bbe1219c02fa08f3e13c1c85dbb00a2ec05ea0dff00a6ec15a98027360 -070c047a06d7e1085c84f1b014f6c03fa0b33018b6c0211801ebe018fc00da0a -6f61113c877eb01d4ec317a085700f26c130f80efbe132bc039a0733e106fc81 -f7f017f6c10aa0d1300a0ec374780943e1382c06fa0a9b60238c83473016cec0 -02f80f73fefe1072afc1e50000000049454e44ae426082 -"""), - 'basi6a08': _dehex(""" -89504e470d0a1a0a0000000d4948445200000020000000200806000001047d4a -620000000467414d41000186a031e8965f0000012049444154789cc595414ec3 -3010459fa541b8bbb26641b8069b861e8b4d12c1c112c1452a710a2a65d840d5 -949041fc481ec98ae27c7f3f8d27e3e4648047600fec0d1f390fbbe2633a31e2 -9389e4e4ea7bfdbf3d9a6b800ab89f1bd6b553cfcbb0679e960563d72e0a9293 -b7337b9f988cc67f5f0e186d20e808042f1c97054e1309da40d02d7e27f92e03 -6cbfc64df0fc3117a6210a1b6ad1a00df21c1abcf2a01944c7101b0cb568a001 -909c9cf9e399cf3d8d9d4660a875405d9a60d000b05e2de55e25780b7a5268e0 -622118e2399aab063a815808462f1ab86890fc2e03e48bb109ded7d26ce4bf59 -0db91bac0050747fec5015ce80da0e5700281be533f0ce6d5900b59bcb00ea6d -200314cf801faab200ea752803a8d7a90c503a039f824a53f4694e7342000000 -0049454e44ae426082 -"""), - 'basn0g01': _dehex(""" -89504e470d0a1a0a0000000d49484452000000200000002001000000005b0147 -590000000467414d41000186a031e8965f0000005b49444154789c2dccb10903 -300c05d1ebd204b24a200b7a346f90153c82c18d0a61450751f1e08a2faaead2 -a4846ccea9255306e753345712e211b221bf4b263d1b427325255e8bdab29e6f -6aca30692e9d29616ee96f3065f0bf1f1087492fd02f14c90000000049454e44 -ae426082 -"""), - 'basn0g02': _dehex(""" -89504e470d0a1a0a0000000d49484452000000200000002002000000001ca13d -890000000467414d41000186a031e8965f0000001f49444154789c6360085df5 -1f8cf1308850c20053868f0133091f6390b90700bd497f818b0989a900000000 -49454e44ae426082 -"""), - # A version of basn0g04 dithered down to 3 bits. - 'Basn0g03': _dehex(""" -89504e470d0a1a0a0000000d494844520000002000000020040000000093e1c8 -2900000001734249540371d88211000000fd49444154789c6d90d18906210c84 -c356f22356b2889588604301b112112b11d94a96bb495cf7fe87f32d996f2689 -44741cc658e39c0b118f883e1f63cc89dafbc04c0f619d7d898396c54b875517 -83f3a2e7ac09a2074430e7f497f00f1138a5444f82839c5206b1f51053cca968 -63258821e7f2b5438aac16fbecc052b646e709de45cf18996b29648508728612 -952ca606a73566d44612b876845e9a347084ea4868d2907ff06be4436c4b41a3 -a3e1774285614c5affb40dbd931a526619d9fa18e4c2be420858de1df0e69893 -a0e3e5523461be448561001042b7d4a15309ce2c57aef2ba89d1c13794a109d7 -b5880aa27744fc5c4aecb5e7bcef5fe528ec6293a930690000000049454e44ae -426082 -"""), - 'basn0g04': _dehex(""" -89504e470d0a1a0a0000000d494844520000002000000020040000000093e1c8 -290000000467414d41000186a031e8965f0000004849444154789c6360601014 -545232367671090d4d4b2b2f6720430095dbd1418e002a77e64c720450b9ab56 -912380caddbd9b1c0154ee9933e408a072efde25470095fbee1d1902001f14ee -01eaff41fa0000000049454e44ae426082 -"""), - 'basn0g08': _dehex(""" -89504e470d0a1a0a0000000d4948445200000020000000200800000000561125 -280000000467414d41000186a031e8965f0000004149444154789c6364602400 -1408c8b30c05058c0f0829f8f71f3f6079301c1430ca11906764a2795c0c0605 -8c8ff0cafeffcff887e67131181430cae0956564040050e5fe7135e2d8590000 -000049454e44ae426082 -"""), - 'basn0g16': _dehex(""" -89504e470d0a1a0a0000000d49484452000000200000002010000000000681f9 -6b0000000467414d41000186a031e8965f0000005e49444154789cd5d2310ac0 -300c4351395bef7fc6dca093c0287b32d52a04a3d98f3f3880a7b857131363a0 -3a82601d089900dd82f640ca04e816dc06422640b7a03d903201ba05b7819009 -d02d680fa44c603f6f07ec4ff41938cf7f0016d84bd85fae2b9fd70000000049 -454e44ae426082 -"""), - 'basn2c08': _dehex(""" -89504e470d0a1a0a0000000d4948445200000020000000200802000000fc18ed -a30000000467414d41000186a031e8965f0000004849444154789cedd5c10900 -300c024085ec91fdb772133b442bf4a1f8cee12bb40d043b800a14f81ca0ede4 -7d4c784081020f4a871fc284071428f0a0743823a94081bb7077a3c00182b1f9 -5e0f40cf4b0000000049454e44ae426082 -"""), - 'basn2c16': _dehex(""" -89504e470d0a1a0a0000000d4948445200000020000000201002000000ac8831 -e00000000467414d41000186a031e8965f000000e549444154789cd596c10a83 -301044a7e0417fcb7eb7fdadf6961e06039286266693cc7a188645e43dd6a08f -1042003e2fe09aef6472737e183d27335fcee2f35a77b702ebce742870a23397 -f3edf2705dd10160f3b2815fe8ecf2027974a6b0c03f74a6e4192843e75c6c03 -35e8ec3202f5e84c0181bbe8cca967a00d9df3491bb040671f2e6087ce1c2860 -8d1e05f8c7ee0f1d00b667e70df44467ef26d01fbd9bc028f42860f71d188bce -fb8d3630039dbd59601e7ab3c06cf428507f0634d039afdc80123a7bb1801e7a -b1802a7a14c89f016d74ce331bf080ce9e08f8414f04bca133bfe642fe5e07bb -c4ec0000000049454e44ae426082 -"""), - 'basn3p04': _dehex(""" -89504e470d0a1a0a0000000d4948445200000020000000200403000000815467 -c70000000467414d41000186a031e8965f000000037342495404040477f8b5a3 -0000002d504c54452200ff00ffff8800ff22ff000099ffff6600dd00ff77ff00 -ff000000ff99ddff00ff00bbffbb000044ff00ff44d2b049bd00000047494441 -54789c63e8e8080d3d7366d5aaf27263e377ef66ce64204300952b28488e002a -d7c5851c0154eeddbbe408a07119c81140e52a29912380ca4d4b23470095bb7b -37190200e0c4ead10f82057d0000000049454e44ae426082 -"""), - 'basn6a08': _dehex(""" -89504e470d0a1a0a0000000d4948445200000020000000200806000000737a7a -f40000000467414d41000186a031e8965f0000006f49444154789cedd6310a80 -300c46e12764684fa1f73f55048f21c4ddc545781d52e85028fc1f4d28d98a01 -305e7b7e9cffba33831d75054703ca06a8f90d58a0074e351e227d805c8254e3 -1bb0420f5cdc2e0079208892ffe2a00136a07b4007943c1004d900195036407f -011bf00052201a9c160fb84c0000000049454e44ae426082 -"""), - 'cs3n3p08': _dehex(""" -89504e470d0a1a0a0000000d494844520000002000000020080300000044a48a -c60000000467414d41000186a031e8965f0000000373424954030303a392a042 -00000054504c544592ff0000ff9200ffff00ff0000dbff00ff6dffb600006dff -b6ff00ff9200dbff000049ffff2400ff000024ff0049ff0000ffdb00ff4900ff -b6ffff0000ff2400b6ffffdb000092ffff6d000024ffff49006dff00df702b17 -0000004b49444154789c85cac70182000000b1b3625754b0edbfa72324ef7486 -184ed0177a437b680bcdd0031c0ed00ea21f74852ed00a1c9ed0086da0057487 -6ed0121cd6d004bda0013a421ff803224033e177f4ae260000000049454e44ae -426082 -"""), - 's09n3p02': _dehex(""" -89504e470d0a1a0a0000000d49484452000000090000000902030000009dffee -830000000467414d41000186a031e8965f000000037342495404040477f8b5a3 -0000000c504c544500ff000077ffff00ffff7700ff5600640000001f49444154 -789c63600002fbff0c0c56ab19182ca381581a4283f82071200000696505c36a -437f230000000049454e44ae426082 -"""), - 'tbgn3p08': _dehex(""" -89504e470d0a1a0a0000000d494844520000002000000020080300000044a48a -c60000000467414d41000186a031e8965f00000207504c54457f7f7fafafafab -abab110000222200737300999999510d00444400959500959595e6e600919191 -8d8d8d620d00898989666600b7b700911600000000730d007373736f6f6faaaa -006b6b6b676767c41a00cccc0000f30000ef00d51e0055555567670000dd0051 -515100d1004d4d4de61e0038380000b700160d0d00ab00560d00090900009500 -009100008d003333332f2f2f2f2b2f2b2b000077007c7c001a05002b27000073 -002b2b2b006f00bb1600272727780d002323230055004d4d00cc1e00004d00cc -1a000d00003c09006f6f00002f003811271111110d0d0d55554d090909001100 -4d0900050505000d00e2e200000900000500626200a6a6a6a2a2a29e9e9e8484 -00fb00fbd5d500801100800d00ea00ea555500a6a600e600e6f7f700e200e233 -0500888888d900d9848484c01a007777003c3c05c8c8008080804409007c7c7c -bb00bbaa00aaa600a61e09056262629e009e9a009af322005e5e5e05050000ee -005a5a5adddd00a616008d008d00e20016050027270088110078780000c40078 -00787300736f006f44444400aa00c81e004040406600663c3c3c090000550055 -1a1a00343434d91e000084004d004d007c004500453c3c00ea1e00222222113c -113300331e1e1efb22001a1a1a004400afaf00270027003c001616161e001e0d -160d2f2f00808000001e00d1d1001100110d000db7b7b7090009050005b3b3b3 -6d34c4230000000174524e530040e6d86600000001624b474402660b7c640000 -01f249444154789c6360c0048c8c58049100575f215ee92e6161ef109cd2a15e -4b9645ce5d2c8f433aa4c24f3cbd4c98833b2314ab74a186f094b9c2c27571d2 -6a2a58e4253c5cda8559057a392363854db4d9d0641973660b0b0bb76bb16656 -06970997256877a07a95c75a1804b2fbcd128c80b482a0b0300f8a824276a9a8 -ec6e61612b3e57ee06fbf0009619d5fac846ac5c60ed20e754921625a2daadc6 -1967e29e97d2239c8aec7e61fdeca9cecebef54eb36c848517164514af16169e -866444b2b0b7b55534c815cc2ec22d89cd1353800a8473100a4485852d924a6a -412adc74e7ad1016ceed043267238c901716f633a812022998a4072267c4af02 -92127005c0f811b62830054935ce017b38bf0948cc5c09955f030a24617d9d46 -63371fd940b0827931cbfdf4956076ac018b592f72d45594a9b1f307f3261b1a -084bc2ad50018b1900719ba6ba4ca325d0427d3f6161449486f981144cf3100e -2a5f2a1ce8683e4ddf1b64275240c8438d98af0c729bbe07982b8a1c94201dc2 -b3174c9820bcc06201585ad81b25b64a2146384e3798290c05ad280a18c0a62e -e898260c07fca80a24c076cc864b777131a00190cdfa3069035eccbc038c30e1 -3e88b46d16b6acc5380d6ac202511c392f4b789aa7b0b08718765990111606c2 -9e854c38e5191878fbe471e749b0112bb18902008dc473b2b2e8e72700000000 -49454e44ae426082 -"""), - 'Tp2n3p08': _dehex(""" -89504e470d0a1a0a0000000d494844520000002000000020080300000044a48a -c60000000467414d41000186a031e8965f00000300504c544502ffff80ff05ff -7f0703ff7f0180ff04ff00ffff06ff000880ff05ff7f07ffff06ff000804ff00 -0180ff02ffff03ff7f02ffff80ff0503ff7f0180ffff0008ff7f0704ff00ffff -06ff000802ffffff7f0704ff0003ff7fffff0680ff050180ff04ff000180ffff -0008ffff0603ff7f80ff05ff7f0702ffffff000880ff05ffff0603ff7f02ffff -ff7f070180ff04ff00ffff06ff000880ff050180ffff7f0702ffff04ff0003ff -7fff7f0704ff0003ff7f0180ffffff06ff000880ff0502ffffffff0603ff7fff -7f0702ffff04ff000180ff80ff05ff0008ff7f07ffff0680ff0504ff00ff0008 -0180ff03ff7f02ffff02ffffffff0604ff0003ff7f0180ffff000880ff05ff7f -0780ff05ff00080180ff02ffffff7f0703ff7fffff0604ff00ff7f07ff0008ff -ff0680ff0504ff0002ffff0180ff03ff7fff0008ffff0680ff0504ff000180ff -02ffff03ff7fff7f070180ff02ffff04ff00ffff06ff0008ff7f0780ff0503ff -7fffff06ff0008ff7f0780ff0502ffff03ff7f0180ff04ff0002ffffff7f07ff -ff0604ff0003ff7fff00080180ff80ff05ffff0603ff7f0180ffff000804ff00 -80ff0502ffffff7f0780ff05ffff0604ff000180ffff000802ffffff7f0703ff -7fff0008ff7f070180ff03ff7f02ffff80ff05ffff0604ff00ff0008ffff0602 -ffff0180ff04ff0003ff7f80ff05ff7f070180ff04ff00ff7f0780ff0502ffff -ff000803ff7fffff0602ffffff7f07ffff0680ff05ff000804ff0003ff7f0180 -ff02ffff0180ffff7f0703ff7fff000804ff0080ff05ffff0602ffff04ff00ff -ff0603ff7fff7f070180ff80ff05ff000803ff7f0180ffff7f0702ffffff0008 -04ff00ffff0680ff0503ff7f0180ff04ff0080ff05ffff06ff000802ffffff7f -0780ff05ff0008ff7f070180ff03ff7f04ff0002ffffffff0604ff00ff7f07ff -000880ff05ffff060180ff02ffff03ff7f80ff05ffff0602ffff0180ff03ff7f -04ff00ff7f07ff00080180ffff000880ff0502ffff04ff00ff7f0703ff7fffff -06ff0008ffff0604ff00ff7f0780ff0502ffff03ff7f0180ffdeb83387000000 -f874524e53000000000000000008080808080808081010101010101010181818 -1818181818202020202020202029292929292929293131313131313131393939 -393939393941414141414141414a4a4a4a4a4a4a4a52525252525252525a5a5a -5a5a5a5a5a62626262626262626a6a6a6a6a6a6a6a73737373737373737b7b7b -7b7b7b7b7b83838383838383838b8b8b8b8b8b8b8b94949494949494949c9c9c -9c9c9c9c9ca4a4a4a4a4a4a4a4acacacacacacacacb4b4b4b4b4b4b4b4bdbdbd -bdbdbdbdbdc5c5c5c5c5c5c5c5cdcdcdcdcdcdcdcdd5d5d5d5d5d5d5d5dedede -dededededee6e6e6e6e6e6e6e6eeeeeeeeeeeeeeeef6f6f6f6f6f6f6f6b98ac5 -ca0000012c49444154789c6360e7169150d230b475f7098d4ccc28a96ced9e32 -63c1da2d7b8e9fb97af3d1fb8f3f18e8a0808953544a4dd7c4c2c9233c2621bf -b4aab17fdacce5ab36ee3a72eafaad87efbefea68702362e7159652d031b07cf -c0b8a4cce28aa68e89f316aedfb4ffd0b92bf79fbcfcfe931e0a183904e55435 -8decdcbcc22292b3caaadb7b27cc5db67af3be63e72fdf78fce2d31f7a2860e5 -119356d037b374f10e8a4fc92eaa6fee99347fc9caad7b0f9ebd74f7c1db2fbf -e8a180995f484645dbdccad12f38363dafbcb6a573faeca5ebb6ed3e7ce2c29d -e76fbefda38702063e0149751d537b67ff80e8d4dcc29a86bea97316add9b0e3 -c0e96bf79ebdfafc971e0a587885e515f58cad5d7d43a2d2720aeadaba26cf5a -bc62fbcea3272fde7efafac37f3a28000087c0fe101bc2f85f0000000049454e -44ae426082 -"""), - 'tbbn1g04': _dehex(""" -89504e470d0a1a0a0000000d494844520000002000000020040000000093e1c8 -290000000467414d41000186a031e8965f0000000274524e530007e8f7589b00 -000002624b47440000aa8d23320000013e49444154789c55d1cd4b024118c7f1 -efbe6419045b6a48a72d352808b435284f9187ae9b098627a1573a19945beba5 -e8129e8222af11d81e3a4545742de8ef6af6d5762e0fbf0fc33c33f36085cb76 -bc4204778771b867260683ee57e13f0c922df5c719c2b3b6c6c25b2382cea4b9 -9f7d4f244370746ac71f4ca88e0f173a6496749af47de8e44ba8f3bf9bdfa98a -0faf857a7dd95c7dc8d7c67c782c99727997f41eb2e3c1e554152465bb00fe8e -b692d190b718d159f4c0a45c4435915a243c58a7a4312a7a57913f05747594c6 -46169866c57101e4d4ce4d511423119c419183a3530cc63db88559ae28e7342a -1e9c8122b71139b8872d6e913153224bc1f35b60e4445bd4004e20ed6682c759 -1d9873b3da0fbf50137dc5c9bde84fdb2ec8bde1189e0448b63584735993c209 -7a601bd2710caceba6158797285b7f2084a2f82c57c01a0000000049454e44ae -426082 -"""), - 'tbrn2c08': _dehex(""" -89504e470d0a1a0a0000000d4948445200000020000000200802000000fc18ed -a30000000467414d41000186a031e8965f0000000674524e53007f007f007f8a -33334f00000006624b474400ff0000000033277cf3000004d649444154789cad -965f68537714c73fd912d640235e692f34d0406fa0c1663481045ab060065514 -56660a295831607df0a1488715167060840a1614e6431e9cb34fd2c00a762c85 -f6a10f816650c13b0cf40612e1822ddc4863bd628a8924d23d6464f9d3665dd9 -f7e977ce3dbff3cd3939bfdfef6bb87dfb364782dbed065ebe7cd93acc78b4ec -a228debd7bb7bfbfbfbbbbfb7f261045311a8d261209405194274f9ea4d3e916 -f15f1c3eb5dd6e4fa5fecce526239184a2b0b8486f6f617171b1f5ae4311381c -8e57af5e5dbd7a351088150a78bd389d44222c2f93cdfe66b7db8f4ee07038b6 -b6b6bebf766d7e7e7e60a06432313b4ba984c3c1c4049a46b95c5a58583822c1 -dbb76f27272733d1b9df853c3030c0f232562b9108cf9eb1b888d7cbf030abab -31abd5fa1f08dc6ef7e7cf9f1f3f7e1c8944745d4f1400c62c001313acad21cb -b8dd2c2c603271eb1640341aad4c6d331aa7e8c48913a150a861307ecc11e964 -74899919bc5e14e56fffc404f1388502f178dceff7ef4bf0a5cfe7abb533998c -e5f9ea2f1dd88c180d64cb94412df3dd57e83a6b3b3c7a84c98420100c72fd3a -636348bae726379fe69e8e8d8dbd79f3a6558b0607079796965256479b918085 -7b02db12712b6181950233023f3f647494ee6e2e5ea45864cce5b8a7fe3acffc -3aebb22c2bd5d20e22d0757d7b7bbbbdbd3d94a313bed1b0aa3cd069838b163a -8d4c59585f677292d0b84d9a995bd337def3fe6bbe5e6001989b9b6bfe27ea08 -36373781542ab56573248b4c5bc843ac4048c7ab21aa24ca00534c25482828a3 -8c9ee67475bbaaaab22cb722c8e57240a150301a8d219de94e44534d7d90e885 -87acb0e2c4f9800731629b6c5ee14a35a6b9887d2a0032994cb9cf15dbe59650 -ff7b46a04c9a749e7cc5112214266cc65c31354d5b5d5d3d90209bcd5616a552 -a95c2e87f2a659bd9ee01c2cd73964e438f129a6aa9e582c363838b80f81d7eb -5555b56a2a8ad2d9d7affd0409f8015c208013fea00177b873831b0282c964f2 -783c1e8fa7582cee5f81a669b5e6eeeeaee58e8559b0c233d8843c7c0b963a82 -34e94b5cb2396d7d7d7db22c8ba258fb0afd43f0e2c58b919191ba9de9b4d425 -118329b0c3323c8709d02041b52b4ea7f39de75d2a934a2693c0a953a76a93d4 -5d157ebf7f6565a5542a553df97c5e10045dd731c130b86113cc300cbd489224 -08422a952a140a95788fc763b1d41558d7a2d7af5f5fb870a1d6a3aaaacd6603 -18802da84c59015bd2e6897b745d9765b99a1df0f97c0daf74e36deaf7fbcd66 -73ad2797cb89a2c839880188a2e8743a8bc5a22ccbba5e376466b3b9bdbdbd21 -6123413a9d0e0402b51e4dd3bababa788eb022b85caeb6b6364551b6b7b76942 -43f7f727007a7a7a04a1ee8065b3595fde2768423299ac1ec6669c3973e65004 -c0f8f878ad69341a33994ced2969c0d0d0502412f9f8f163f3a7fd654b474787 -288ad53e74757535df6215b85cae60302849d2410aecc037f9f2e5cbd5b5c160 -680eb0dbede170381c0e7ff8f0a185be3b906068684892a4ca7a6f6faff69328 -8ad3d3d3f7efdfdfdbdbfb57e96868a14d0d0643381c96242997cbe5f3794010 -84603078fcf8f1d6496bd14a3aba5c2ea7d369341a5555b5582c8140e0fcf9f3 -1b1b1b87cf4eeb0a8063c78e45a3d19e9e1ebfdfdf5a831e844655d18093274f -9e3d7bf6d3a74f3b3b3b47c80efc05ff7af28fefb70d9b0000000049454e44ae -426082 -"""), - 'basn6a16': _dehex(""" -89504e470d0a1a0a0000000d494844520000002000000020100600000023eaa6 -b70000000467414d41000186a031e8965f00000d2249444154789cdd995f6c1c -d775c67ff38fb34b724d2ee55a8e4b04a0ac87049100cab4dbd8c6528902cb4d -10881620592e52d4325ac0905bc98a94025e71fd622cb5065ac98a0c283050c0 -728a00b6e542a1d126885cd3298928891d9a0444037e904434951d4b90b84b2f -c9dde1fcebc33977a95555348f411e16dfce9d3b77ee77eebde77ce78c95a669 -0ad07c17009a13edd898b87dfb1fcb7d2b4d1bff217f33df80deb1e6267df0ff -c1e6e6dfafdf1f5a7fd30f9aef66b6d546dd355bf02c40662e3307f9725a96c6 -744c3031f83782f171c148dbc3bf1774f5dad1e79d6f095a3f54d4fbec5234ef -d9a2f8d73afe4f14f57ef4f42def7b44f19060f06b45bddf1c5534d77fd922be -2973a15a82e648661c6e3240aa3612ead952b604bde57458894f29deaf133bac -13d2766f5227a4a3b8cf08da7adfd6fbd6bd8a4fe9dbb43d35e3dfa3f844fbf8 -9119bf4f7144094fb56333abf8a86063ca106f94b3a3b512343765e60082097f -1bb86ba72439a653519b09f5cee1ce61c897d37eedf5553580ae60f4af8af33a -b14fd400b6a0f34535c0434afc0b3a9f07147527a5fa7ca218ff56c74d74dc3f -155cfd3325fc278acf2ae1cb4a539f5f9937c457263b0bd51234c732a300cdd1 -cc1840f0aaff54db0e4874ed5a9b5d6d27d4bb36746d80de72baa877ff4b275a -d7895ed1897ea4139b5143fcbb1a62560da1ed9662aaed895ec78a91c18795b8 -5e07ab4af8ba128e95e682e0728bf8f2e5ae815a091a53d902ac1920d8e05f06 -589de8d8d66680789f4e454fb9d9ec66cd857af796ee2d902fa73fd5bba775a2 -153580ae44705ed0d37647d15697cb8f14bfa3e3e8fdf8031d47af571503357c -f30d25acedcbbf135c9a35c49766ba07ab255859e8ec03684e66860182dff8f7 -0304bff6ff1c20fc81b7afdd00a71475539a536e36bb5973a19e3b923b02bde5 -e4efd4003ac170eb2d13fe274157afedbd82d6fb3a9a1e85e4551d47cf7078f8 -9671fe4289ebf5f2bf08d63f37c4eb4773c55a0996efeefa0ca011671d8060ca -2f0004c7fcc300e166ef0240f825efe3361f106d57d423d0723f7acacd66376b -2ed47b7a7a7a205f4ef4ac4691e0aad9aa0d41cf13741c3580a506487574ddca -61a8c403c1863ebfbcac3475168b2de28b8b3d77544bb05ce92a02aceced3c0d -d0cc65ea371b201cf1c601c24dde1c4078cedbdeb60322f50126a019bf6edc9b -39e566b39b3517eaf97c3e0fbde5e4491d45bd74537145d155b476aa0176e868 -c6abebf30dbd5e525c54ac8e18e2d56abeb756827a3d970358a97416019a6f64 -f60004fdfe1580d5c98e618070cc1b05887eee7e0d209a70db7d8063029889b4 -c620ead78d7b33a7dc6c76b3e6427ddddbebde867c393aa7845e5403e8ca794a -d0d6fb897af5f03525fe5782f5e7046bdaef468bf88d1debc6ab25583cd17310 -6079b9ab0ba059c914018245bf076075b5a303200c3c1f209a733701444fbbaf -00c4134ebb016c5d0b23614c243701cdf875e3decce9349bddacb9505fbf7dfd -76e82d87736a00f5d2b5ffd4b7dce2719a4d25ae717ee153c1abef18e257cfad -7fa45682da48ef38c052b53b0fd06864b300c151ff08c0ea431de701a287dd5f -004497dc7b01a253ee3e80b8c7f91c20f967fb6fdb7c80ada7d8683723614c24 -3701cdf875e3decc29379bddacb950ef3fd47f08f2e5a61ea4aa2a3eb757cd55 -13345efcfa59c12b2f19e2578ef77fb75a82854ffbee01a83f977b11a031931d -040802df07082b5e11207cc17b1e209a770700e2df0a83e409fb7580f827c230 -99b06fd901fb058d6835dacd481813c94d40337eddb83773cacd66376b2ed437 -bebcf165e82d2f4e4beb7f3fa6e652c2d7ee10bc78c010bfb87fe3c95a09ae9f -bd732740bd2fb700d0f865f64180e059ff044018ca0ca28a5b04883f701e0088 -bfec7c0c909cb71f0448c6ec518074b375012079d9dedf66004bcfbc51eb2dd1 -aadacd481813c94d40337eddb83773cacd66376b2ed487868686205fbe7c49ef -5605a73f34c4a7a787eeab96e0da81bb4e022c15ba27019a5b339300e16bf286 -a8eae601e25866907cdf3e0890acb36f00245fb57f05904e59c300e92561946e -b2e600d209ab7d07f04d458dfb46ad1bd16ab49b913026929b8066fcba716fe6 -949bcd6ed65ca8ef7e7cf7e3d05b7e7c8f217ee6cdddbb6a25a856f37980e0c7 -fe4e80a82623c48193014846ec7180f4acf518409aca0cd28a5504e03b32c374 -de1a00608a0240faaa327a4b19fe946fb6f90054dbb5f2333d022db56eb4966a -3723614c243701cdf8f556bea8a7dc6c76b3e66bd46584ddbbcebc0990cf4b0f -ff4070520c282338a7e26700ec725202b01e4bcf0258963c6f1d4d8f0030cb20 -805549c520930c03584fa522b676f11600ffc03fde3e1b3489a9c9054c9aa23b -c08856a3dd8c843191dc0434e3d78d7b33a75c36fb993761f7ae5a69f72ef97f -e6ad336fed7e1c60e8bee96980bbdebbb60da07b7069062033d9dc0ae03d296f -70ab511ec071640676252902d833c916007b3e1900b0a6d2028035968e025861 -ea01581369fb11488c34d18cbc95989afccca42baad65ba2d5683723614c24d7 -8066fcbab8b7e96918baaf5aaa56219f975fb50a43f7c9bde90fa73f1c1a02d8 -78f2e27e803b77ca08b90519315b6fe400fc1392097a9eccc0ad444500e70199 -a1331f0f00d8934901c07e5d526ceb87c2d07e2579badd005a2b31a5089391b7 -1253358049535a6add8856dd0146c298482e01ede27ed878b256ba7600ee3a09 -c18fc1df09fe01084ec25defc1b56db0f1a4f4bd78e0e2818d2f0334e7330300 -7df7c888b917e50dd9c1c60c80efcb0cbc63e1f700bce7c31700dccbd1060027 -8add9b0de06c8e2f00d84962b7d7030e2a61538331b98051f92631bd253f336a -dd8856a3dd44c25c390efddfad96ae9f853b77c25201ba27c533b8bdf28b6ad0 -3d084b33d2e7fa59099e9901b8f2d29597fa0f01848f78e70082117f1ca07b76 -6910209b9519f895a008d031bbba05c09d8f06005c5b18b8fba25300cea6780e -c03e911c6ccf06d507b48a4fa606634a114609de929f9934c5a87511ad57cfc1 -fa476aa5854fa1ef1e3910b905686e85cc24c40138198915f133d2d6dc2a7dea -7df2ccc2a752faf2cec1d577aebeb37e3b4034eeee0008dff3be0e6b923773b4 -7904c0ef9119767cb4fa1500ef1361e08e452500f71561e84cc4ed3e20fab6a2 -c905f40cb76a3026bf3319b91ac2e46792a6dcd801ebc6aba5da08f48ecb81c8 -bd088d5f42f6417191de93908c803d0e76199292b485af41b60e8d9c3c537f0e -8211f0c7211a077707dc18b931b2ee6d80a4d7ae024491ebc24d4a708ff70680 -7f25e807e8785f1878e322d6ddaf453f0770ff2dfa769b01423dbbad72a391b6 -5a7c3235985629423372494cab55c8f7d64a8b27a0e7202c55a13b0f8d19c80e -4ae9ca3f015115dc3ca467c17a4c7ee95970ab10e5a54ff0ac3cd39881ee5958 -1a84f03df0be0e492fd855a8d6aa35d10b4962dbb0a604a3d3ee5e80a8eee600 -a24977f8660378bf0bbf00e01d0a8fb7f980f04b8aa6ce6aca8d5a7533c52753 -839152c4e222f4dc512dd5eb90cbc981e8ea12cf90cd8a8bf47d89159e2741d3 -7124f65b96fcd254dae258fa84a13c13043246a32129574787e49eae2b49b86d -c3e2e78b9ff7f4002415bb08907c66df0d103b4e0c104db90500ff70700c203a -ee1e82dba4c3e16e256c0acca6ceaae9afd1f612d7eb472157ac95962bd05594 -7dd1598466053245088e827f44628657942a825b84e4fb601f84b4025611aca3 -901e01bb024911dc0a4445f08e41f83df02b10142173149ab71baf027611ea95 -7a257704201d14cd9af4d90b00f194530088cb4e09c0df1c5c0088f7393f6833 -c0aa3ac156655de3bca9b34ab9716906ba07aba5e5bba1eb3358d90b9da7c533 -64f6888bf47b60f521e8380fe10be03d2feac17900927560df40f4e48f805960 -50328d648bf4893f9067c217a0631656b7c898c122847bc07b03a2d3e0ee85e4 -33b0ef867450c4fad2ecd26cf7168074c0ba0c904cdac300c9cfec4701924df6 -1cdca61e10685c6f7d52d0caba1498972f43d740adb4b2009d7d7220b20e3473 -90a943d00ffe959bb6eac3e0fe42ea49ee00c45f06e76329b1dabf127d690d80 -5581b408f63c2403e0cc433c00ee658836803b0fd100747c04ab5f917704fd10 -d5c1cd41ec801343d207f602a403605d86e5f9e5f9ae0d00e994556833806685 -c931fb709b0f08b4e869bea5c827859549e82c544b8d29c816a0390999613920 -7e610d5727a16318c2003c1fa24be0de2b32caf92224e7c17e5004b6350c4c01 -05601218066b0ad28224e149019c086257ca315102de2712903bde97b8144d82 -3b2c6ac52d403c054e019249b087f53d0558995a99ea946c70cc927458b3c1ff -550f30050df988d4284376b4566a8e416654cc921985e037e0df0fc131f00f4b -acf0c6211c036f14a239703741740adc7da227edd7e56b833d0ae92549b4d357 -25dfb49ed2ff63908e6adf27d6d0dda7638d4154d2778daca17f58e61297c129 -41f233b01f5dc3740cac51688c35c6b22580f48224fee9b83502569a66b629f1 -09f3713473413e2666e7fe6f6c6efefdfafda1f56f6e06f93496d9d67cb7366a -9964b6f92e64b689196ec6c604646fd3fe4771ff1bf03f65d8ecc3addbb5f300 -00000049454e44ae426082 -"""), -} - -def read_pam_header(infile): - """ - Read (the rest of a) PAM header. `infile` should be positioned - immediately after the initial 'P7' line (at the beginning of the - second line). Returns are as for `read_pnm_header`. - """ - - # Unlike PBM, PGM, and PPM, we can read the header a line at a time. - header = dict() - while True: - l = infile.readline().strip() - if l == strtobytes('ENDHDR'): - break - if not l: - raise EOFError('PAM ended prematurely') - if l[0] == strtobytes('#'): - continue - l = l.split(None, 1) - if l[0] not in header: - header[l[0]] = l[1] - else: - header[l[0]] += strtobytes(' ') + l[1] - - required = ['WIDTH', 'HEIGHT', 'DEPTH', 'MAXVAL'] - required = [strtobytes(x) for x in required] - WIDTH,HEIGHT,DEPTH,MAXVAL = required - present = [x for x in required if x in header] - if len(present) != len(required): - raise Error('PAM file must specify WIDTH, HEIGHT, DEPTH, and MAXVAL') - width = int(header[WIDTH]) - height = int(header[HEIGHT]) - depth = int(header[DEPTH]) - maxval = int(header[MAXVAL]) - if (width <= 0 or - height <= 0 or - depth <= 0 or - maxval <= 0): - raise Error( - 'WIDTH, HEIGHT, DEPTH, MAXVAL must all be positive integers') - return 'P7', width, height, depth, maxval - -def read_pnm_header(infile, supported=('P5','P6')): - """ - Read a PNM header, returning (format,width,height,depth,maxval). - `width` and `height` are in pixels. `depth` is the number of - channels in the image; for PBM and PGM it is synthesized as 1, for - PPM as 3; for PAM images it is read from the header. `maxval` is - synthesized (as 1) for PBM images. - """ - - # Generally, see http://netpbm.sourceforge.net/doc/ppm.html - # and http://netpbm.sourceforge.net/doc/pam.html - - supported = [strtobytes(x) for x in supported] - - # Technically 'P7' must be followed by a newline, so by using - # rstrip() we are being liberal in what we accept. I think this - # is acceptable. - type = infile.read(3).rstrip() - if type not in supported: - raise NotImplementedError('file format %s not supported' % type) - if type == strtobytes('P7'): - # PAM header parsing is completely different. - return read_pam_header(infile) - # Expected number of tokens in header (3 for P4, 4 for P6) - expected = 4 - pbm = ('P1', 'P4') - if type in pbm: - expected = 3 - header = [type] - - # We have to read the rest of the header byte by byte because the - # final whitespace character (immediately following the MAXVAL in - # the case of P6) may not be a newline. Of course all PNM files in - # the wild use a newline at this point, so it's tempting to use - # readline; but it would be wrong. - def getc(): - c = infile.read(1) - if not c: - raise Error('premature EOF reading PNM header') - return c - - c = getc() - while True: - # Skip whitespace that precedes a token. - while c.isspace(): - c = getc() - # Skip comments. - while c == '#': - while c not in '\n\r': - c = getc() - if not c.isdigit(): - raise Error('unexpected character %s found in header' % c) - # According to the specification it is legal to have comments - # that appear in the middle of a token. - # This is bonkers; I've never seen it; and it's a bit awkward to - # code good lexers in Python (no goto). So we break on such - # cases. - token = strtobytes('') - while c.isdigit(): - token += c - c = getc() - # Slight hack. All "tokens" are decimal integers, so convert - # them here. - header.append(int(token)) - if len(header) == expected: - break - # Skip comments (again) - while c == '#': - while c not in '\n\r': - c = getc() - if not c.isspace(): - raise Error('expected header to end with whitespace, not %s' % c) - - if type in pbm: - # synthesize a MAXVAL - header.append(1) - depth = (1,3)[type == strtobytes('P6')] - return header[0], header[1], header[2], depth, header[3] - -def write_pnm(file, width, height, pixels, meta): - """Write a Netpbm PNM/PAM file.""" - - bitdepth = meta['bitdepth'] - maxval = 2**bitdepth - 1 - # Rudely, the number of image planes can be used to determine - # whether we are L (PGM), LA (PAM), RGB (PPM), or RGBA (PAM). - planes = meta['planes'] - # Can be an assert as long as we assume that pixels and meta came - # from a PNG file. - assert planes in (1,2,3,4) - if planes in (1,3): - if 1 == planes: - # PGM - # Could generate PBM if maxval is 1, but we don't (for one - # thing, we'd have to convert the data, not just blat it - # out). - fmt = 'P5' - else: - # PPM - fmt = 'P6' - file.write('%s %d %d %d\n' % (fmt, width, height, maxval)) - if planes in (2,4): - # PAM - # See http://netpbm.sourceforge.net/doc/pam.html - if 2 == planes: - tupltype = 'GRAYSCALE_ALPHA' - else: - tupltype = 'RGB_ALPHA' - file.write('P7\nWIDTH %d\nHEIGHT %d\nDEPTH %d\nMAXVAL %d\n' - 'TUPLTYPE %s\nENDHDR\n' % - (width, height, planes, maxval, tupltype)) - # Values per row - vpr = planes * width - # struct format - fmt = '>%d' % vpr - if maxval > 0xff: - fmt = fmt + 'H' - else: - fmt = fmt + 'B' - for row in pixels: - file.write(struct.pack(fmt, *row)) - file.flush() - -def color_triple(color): - """ - Convert a command line colour value to a RGB triple of integers. - FIXME: Somewhere we need support for greyscale backgrounds etc. - """ - if color.startswith('#') and len(color) == 4: - return (int(color[1], 16), - int(color[2], 16), - int(color[3], 16)) - if color.startswith('#') and len(color) == 7: - return (int(color[1:3], 16), - int(color[3:5], 16), - int(color[5:7], 16)) - elif color.startswith('#') and len(color) == 13: - return (int(color[1:5], 16), - int(color[5:9], 16), - int(color[9:13], 16)) - -def _add_common_options(parser): - """Call *parser.add_option* for each of the options that are - common between this PNG--PNM conversion tool and the gen - tool. - """ - parser.add_option("-i", "--interlace", - default=False, action="store_true", - help="create an interlaced PNG file (Adam7)") - parser.add_option("-t", "--transparent", - action="store", type="string", metavar="#RRGGBB", - help="mark the specified colour as transparent") - parser.add_option("-b", "--background", - action="store", type="string", metavar="#RRGGBB", - help="save the specified background colour") - parser.add_option("-g", "--gamma", - action="store", type="float", metavar="value", - help="save the specified gamma value") - parser.add_option("-c", "--compression", - action="store", type="int", metavar="level", - help="zlib compression level (0-9)") - return parser - -def _main(argv): - """ - Run the PNG encoder with options from the command line. - """ - - # Parse command line arguments - from optparse import OptionParser - import re - version = '%prog ' + re.sub(r'( ?\$|URL: |Rev:)', '', __version__) - parser = OptionParser(version=version) - parser.set_usage("%prog [options] [imagefile]") - parser.add_option('-r', '--read-png', default=False, - action='store_true', - help='Read PNG, write PNM') - parser.add_option("-a", "--alpha", - action="store", type="string", metavar="pgmfile", - help="alpha channel transparency (RGBA)") - _add_common_options(parser) - - (options, args) = parser.parse_args(args=argv[1:]) - - # Convert options - if options.transparent is not None: - options.transparent = color_triple(options.transparent) - if options.background is not None: - options.background = color_triple(options.background) - - # Prepare input and output files - if len(args) == 0: - infilename = '-' - infile = sys.stdin - elif len(args) == 1: - infilename = args[0] - infile = open(infilename, 'rb') - else: - parser.error("more than one input file") - outfile = sys.stdout - if sys.platform == "win32": - import msvcrt, os - msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY) - - if options.read_png: - # Encode PNG to PPM - png = Reader(file=infile) - width,height,pixels,meta = png.asDirect() - write_pnm(outfile, width, height, pixels, meta) - else: - # Encode PNM to PNG - format, width, height, depth, maxval = \ - read_pnm_header(infile, ('P5','P6','P7')) - # When it comes to the variety of input formats, we do something - # rather rude. Observe that L, LA, RGB, RGBA are the 4 colour - # types supported by PNG and that they correspond to 1, 2, 3, 4 - # channels respectively. So we use the number of channels in - # the source image to determine which one we have. We do not - # care about TUPLTYPE. - greyscale = depth <= 2 - pamalpha = depth in (2,4) - supported = map(lambda x: 2**x-1, range(1,17)) - try: - mi = supported.index(maxval) - except ValueError: - raise NotImplementedError( - 'your maxval (%s) not in supported list %s' % - (maxval, str(supported))) - bitdepth = mi+1 - writer = Writer(width, height, - greyscale=greyscale, - bitdepth=bitdepth, - interlace=options.interlace, - transparent=options.transparent, - background=options.background, - alpha=bool(pamalpha or options.alpha), - gamma=options.gamma, - compression=options.compression) - if options.alpha: - pgmfile = open(options.alpha, 'rb') - format, awidth, aheight, adepth, amaxval = \ - read_pnm_header(pgmfile, 'P5') - if amaxval != '255': - raise NotImplementedError( - 'maxval %s not supported for alpha channel' % amaxval) - if (awidth, aheight) != (width, height): - raise ValueError("alpha channel image size mismatch" - " (%s has %sx%s but %s has %sx%s)" - % (infilename, width, height, - options.alpha, awidth, aheight)) - writer.convert_ppm_and_pgm(infile, pgmfile, outfile) - else: - writer.convert_pnm(infile, outfile) - - -if __name__ == '__main__': - try: - _main(sys.argv) - except Error as e: - print(e, file=sys.stderr) diff --git a/micropsi_core/world/island/resources/groundmaps/insel.png b/micropsi_core/world/island/resources/groundmaps/insel.png deleted file mode 100644 index dad05ae9..00000000 Binary files a/micropsi_core/world/island/resources/groundmaps/insel.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/groundmaps/mars.png b/micropsi_core/world/island/resources/groundmaps/mars.png deleted file mode 100644 index 14cb4b80..00000000 Binary files a/micropsi_core/world/island/resources/groundmaps/mars.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/groundmaps/psi_1.png b/micropsi_core/world/island/resources/groundmaps/psi_1.png deleted file mode 100644 index 8c5aaed7..00000000 Binary files a/micropsi_core/world/island/resources/groundmaps/psi_1.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/groundmaps/psi_emo.png b/micropsi_core/world/island/resources/groundmaps/psi_emo.png deleted file mode 100644 index 279f2857..00000000 Binary files a/micropsi_core/world/island/resources/groundmaps/psi_emo.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/island.json b/micropsi_core/world/island/resources/island.json deleted file mode 100644 index 9e26dfee..00000000 --- a/micropsi_core/world/island/resources/island.json +++ /dev/null @@ -1 +0,0 @@ -{} \ No newline at end of file diff --git a/micropsi_core/world/island/resources/objectimages/Lamp.png b/micropsi_core/world/island/resources/objectimages/Lamp.png deleted file mode 100644 index a2883e8e..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/Lamp.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/boletus-edulis.png b/micropsi_core/world/island/resources/objectimages/boletus-edulis.png deleted file mode 100644 index e937b5d7..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/boletus-edulis.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/boulder.png b/micropsi_core/world/island/resources/objectimages/boulder.png deleted file mode 100644 index 79c60c6a..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/boulder.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/braintree.png b/micropsi_core/world/island/resources/objectimages/braintree.png deleted file mode 100644 index f7371c27..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/braintree.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/braitenberg_0.png b/micropsi_core/world/island/resources/objectimages/braitenberg_0.png deleted file mode 100644 index c3737adf..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/braitenberg_0.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/braitenberg_112.png b/micropsi_core/world/island/resources/objectimages/braitenberg_112.png deleted file mode 100644 index a1456c3b..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/braitenberg_112.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/braitenberg_135.png b/micropsi_core/world/island/resources/objectimages/braitenberg_135.png deleted file mode 100644 index 202c5243..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/braitenberg_135.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/braitenberg_157.png b/micropsi_core/world/island/resources/objectimages/braitenberg_157.png deleted file mode 100644 index fd21f838..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/braitenberg_157.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/braitenberg_180.png b/micropsi_core/world/island/resources/objectimages/braitenberg_180.png deleted file mode 100644 index b682af94..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/braitenberg_180.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/braitenberg_202.png b/micropsi_core/world/island/resources/objectimages/braitenberg_202.png deleted file mode 100644 index 38fb8c5c..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/braitenberg_202.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/braitenberg_22.png b/micropsi_core/world/island/resources/objectimages/braitenberg_22.png deleted file mode 100644 index 8d84d1ed..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/braitenberg_22.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/braitenberg_225.png b/micropsi_core/world/island/resources/objectimages/braitenberg_225.png deleted file mode 100644 index 95cdef5b..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/braitenberg_225.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/braitenberg_247.png b/micropsi_core/world/island/resources/objectimages/braitenberg_247.png deleted file mode 100644 index 8b39aedf..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/braitenberg_247.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/braitenberg_270.png b/micropsi_core/world/island/resources/objectimages/braitenberg_270.png deleted file mode 100644 index 88af08a7..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/braitenberg_270.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/braitenberg_292.png b/micropsi_core/world/island/resources/objectimages/braitenberg_292.png deleted file mode 100644 index 1aceae0b..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/braitenberg_292.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/braitenberg_315.png b/micropsi_core/world/island/resources/objectimages/braitenberg_315.png deleted file mode 100644 index 086d6330..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/braitenberg_315.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/braitenberg_337.png b/micropsi_core/world/island/resources/objectimages/braitenberg_337.png deleted file mode 100644 index c01fae54..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/braitenberg_337.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/braitenberg_45.png b/micropsi_core/world/island/resources/objectimages/braitenberg_45.png deleted file mode 100644 index 2275fbc8..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/braitenberg_45.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/braitenberg_67.png b/micropsi_core/world/island/resources/objectimages/braitenberg_67.png deleted file mode 100644 index a594ccff..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/braitenberg_67.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/braitenberg_90.png b/micropsi_core/world/island/resources/objectimages/braitenberg_90.png deleted file mode 100644 index 8d337cee..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/braitenberg_90.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/cubensis.png b/micropsi_core/world/island/resources/objectimages/cubensis.png deleted file mode 100644 index 6d9115b6..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/cubensis.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/fly-agaris.png b/micropsi_core/world/island/resources/objectimages/fly-agaris.png deleted file mode 100644 index 316d0aaf..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/fly-agaris.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/juniper-berries.png b/micropsi_core/world/island/resources/objectimages/juniper-berries.png deleted file mode 100644 index 32bfbde9..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/juniper-berries.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/maple.png b/micropsi_core/world/island/resources/objectimages/maple.png deleted file mode 100644 index 0cb11e9c..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/maple.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/menhir.png b/micropsi_core/world/island/resources/objectimages/menhir.png deleted file mode 100644 index 84631ea4..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/menhir.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/palm-tree.png b/micropsi_core/world/island/resources/objectimages/palm-tree.png deleted file mode 100644 index a1f261ff..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/palm-tree.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/rock.png b/micropsi_core/world/island/resources/objectimages/rock.png deleted file mode 100644 index db15b930..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/rock.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/Micropsi_0.png b/micropsi_core/world/island/resources/objectimages/unused/Micropsi_0.png deleted file mode 100644 index e774a826..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/Micropsi_0.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/Micropsi_112.png b/micropsi_core/world/island/resources/objectimages/unused/Micropsi_112.png deleted file mode 100644 index a9b8b158..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/Micropsi_112.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/Micropsi_135.png b/micropsi_core/world/island/resources/objectimages/unused/Micropsi_135.png deleted file mode 100644 index c82dadab..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/Micropsi_135.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/Micropsi_157.png b/micropsi_core/world/island/resources/objectimages/unused/Micropsi_157.png deleted file mode 100644 index 68092ee2..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/Micropsi_157.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/Micropsi_180.png b/micropsi_core/world/island/resources/objectimages/unused/Micropsi_180.png deleted file mode 100644 index eb3fbb9e..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/Micropsi_180.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/Micropsi_202.png b/micropsi_core/world/island/resources/objectimages/unused/Micropsi_202.png deleted file mode 100644 index 406f9380..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/Micropsi_202.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/Micropsi_22.png b/micropsi_core/world/island/resources/objectimages/unused/Micropsi_22.png deleted file mode 100644 index cd83c0cc..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/Micropsi_22.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/Micropsi_225.png b/micropsi_core/world/island/resources/objectimages/unused/Micropsi_225.png deleted file mode 100644 index bca8c988..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/Micropsi_225.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/Micropsi_247.png b/micropsi_core/world/island/resources/objectimages/unused/Micropsi_247.png deleted file mode 100644 index c20368c1..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/Micropsi_247.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/Micropsi_270.png b/micropsi_core/world/island/resources/objectimages/unused/Micropsi_270.png deleted file mode 100644 index 2be379fc..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/Micropsi_270.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/Micropsi_292.png b/micropsi_core/world/island/resources/objectimages/unused/Micropsi_292.png deleted file mode 100644 index 9bdad1e0..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/Micropsi_292.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/Micropsi_315.png b/micropsi_core/world/island/resources/objectimages/unused/Micropsi_315.png deleted file mode 100644 index 7006581a..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/Micropsi_315.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/Micropsi_337.png b/micropsi_core/world/island/resources/objectimages/unused/Micropsi_337.png deleted file mode 100644 index 366b0c24..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/Micropsi_337.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/Micropsi_45.png b/micropsi_core/world/island/resources/objectimages/unused/Micropsi_45.png deleted file mode 100644 index a2526724..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/Micropsi_45.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/Micropsi_67.png b/micropsi_core/world/island/resources/objectimages/unused/Micropsi_67.png deleted file mode 100644 index 0b677a1b..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/Micropsi_67.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/Micropsi_90.png b/micropsi_core/world/island/resources/objectimages/unused/Micropsi_90.png deleted file mode 100644 index a557ff6f..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/Micropsi_90.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/Mouselamp.png b/micropsi_core/world/island/resources/objectimages/unused/Mouselamp.png deleted file mode 100644 index d4e6916a..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/Mouselamp.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/agent_small.png b/micropsi_core/world/island/resources/objectimages/unused/agent_small.png deleted file mode 100644 index 5fca212d..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/agent_small.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/banana.png b/micropsi_core/world/island/resources/objectimages/unused/banana.png deleted file mode 100644 index ec57774b..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/banana.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/banana_small.png b/micropsi_core/world/island/resources/objectimages/unused/banana_small.png deleted file mode 100644 index 646ec9f3..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/banana_small.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/battery.png b/micropsi_core/world/island/resources/objectimages/unused/battery.png deleted file mode 100644 index 11cea013..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/battery.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/boulder.png b/micropsi_core/world/island/resources/objectimages/unused/boulder.png deleted file mode 100644 index d71a132c..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/boulder.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/fliegenpilz.png b/micropsi_core/world/island/resources/objectimages/unused/fliegenpilz.png deleted file mode 100644 index 819716c3..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/fliegenpilz.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/fungus_small.png b/micropsi_core/world/island/resources/objectimages/unused/fungus_small.png deleted file mode 100644 index 329be24f..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/fungus_small.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/hackklotz.png b/micropsi_core/world/island/resources/objectimages/unused/hackklotz.png deleted file mode 100644 index 8e816697..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/hackklotz.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/lamp_small.png b/micropsi_core/world/island/resources/objectimages/unused/lamp_small.png deleted file mode 100644 index 39b4afc7..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/lamp_small.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/marsagent_0.png b/micropsi_core/world/island/resources/objectimages/unused/marsagent_0.png deleted file mode 100644 index 4db71a97..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/marsagent_0.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/marsagent_112.png b/micropsi_core/world/island/resources/objectimages/unused/marsagent_112.png deleted file mode 100644 index 22081796..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/marsagent_112.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/marsagent_135.png b/micropsi_core/world/island/resources/objectimages/unused/marsagent_135.png deleted file mode 100644 index 68329609..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/marsagent_135.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/marsagent_157.png b/micropsi_core/world/island/resources/objectimages/unused/marsagent_157.png deleted file mode 100644 index f64137b6..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/marsagent_157.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/marsagent_180.png b/micropsi_core/world/island/resources/objectimages/unused/marsagent_180.png deleted file mode 100644 index 2a90634c..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/marsagent_180.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/marsagent_202.png b/micropsi_core/world/island/resources/objectimages/unused/marsagent_202.png deleted file mode 100644 index 6ed1a372..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/marsagent_202.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/marsagent_22.png b/micropsi_core/world/island/resources/objectimages/unused/marsagent_22.png deleted file mode 100644 index 9a10d904..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/marsagent_22.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/marsagent_225.png b/micropsi_core/world/island/resources/objectimages/unused/marsagent_225.png deleted file mode 100644 index 225d1d7b..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/marsagent_225.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/marsagent_247.png b/micropsi_core/world/island/resources/objectimages/unused/marsagent_247.png deleted file mode 100644 index 48191933..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/marsagent_247.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/marsagent_270.png b/micropsi_core/world/island/resources/objectimages/unused/marsagent_270.png deleted file mode 100644 index 09689204..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/marsagent_270.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/marsagent_292.png b/micropsi_core/world/island/resources/objectimages/unused/marsagent_292.png deleted file mode 100644 index dd931877..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/marsagent_292.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/marsagent_315.png b/micropsi_core/world/island/resources/objectimages/unused/marsagent_315.png deleted file mode 100644 index 8a59c148..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/marsagent_315.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/marsagent_337.png b/micropsi_core/world/island/resources/objectimages/unused/marsagent_337.png deleted file mode 100644 index e72431c9..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/marsagent_337.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/marsagent_45.png b/micropsi_core/world/island/resources/objectimages/unused/marsagent_45.png deleted file mode 100644 index 7679f265..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/marsagent_45.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/marsagent_67.png b/micropsi_core/world/island/resources/objectimages/unused/marsagent_67.png deleted file mode 100644 index 35b1a3bc..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/marsagent_67.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/marsagent_90.png b/micropsi_core/world/island/resources/objectimages/unused/marsagent_90.png deleted file mode 100644 index 59cedbc7..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/marsagent_90.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/mushroom_small.png b/micropsi_core/world/island/resources/objectimages/unused/mushroom_small.png deleted file mode 100644 index 26c64358..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/mushroom_small.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/nettle.png b/micropsi_core/world/island/resources/objectimages/unused/nettle.png deleted file mode 100644 index 79071ccb..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/nettle.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/omni0.png b/micropsi_core/world/island/resources/objectimages/unused/omni0.png deleted file mode 100644 index 0b5b08ed..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/omni0.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/omni11.png b/micropsi_core/world/island/resources/objectimages/unused/omni11.png deleted file mode 100644 index fd7124cf..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/omni11.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/omni110.png b/micropsi_core/world/island/resources/objectimages/unused/omni110.png deleted file mode 100644 index ec86e1fe..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/omni110.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/omni120.png b/micropsi_core/world/island/resources/objectimages/unused/omni120.png deleted file mode 100644 index 0b5b08ed..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/omni120.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/omni131.png b/micropsi_core/world/island/resources/objectimages/unused/omni131.png deleted file mode 100644 index fd7124cf..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/omni131.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/omni142.png b/micropsi_core/world/island/resources/objectimages/unused/omni142.png deleted file mode 100644 index 50bf7913..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/omni142.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/omni153.png b/micropsi_core/world/island/resources/objectimages/unused/omni153.png deleted file mode 100644 index 786cf30b..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/omni153.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/omni164.png b/micropsi_core/world/island/resources/objectimages/unused/omni164.png deleted file mode 100644 index 1832785c..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/omni164.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/omni175.png b/micropsi_core/world/island/resources/objectimages/unused/omni175.png deleted file mode 100644 index 6f1c5a9b..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/omni175.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/omni186.png b/micropsi_core/world/island/resources/objectimages/unused/omni186.png deleted file mode 100644 index 88d2d335..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/omni186.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/omni197.png b/micropsi_core/world/island/resources/objectimages/unused/omni197.png deleted file mode 100644 index b7a6f13e..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/omni197.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/omni208.png b/micropsi_core/world/island/resources/objectimages/unused/omni208.png deleted file mode 100644 index fcb90737..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/omni208.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/omni219.png b/micropsi_core/world/island/resources/objectimages/unused/omni219.png deleted file mode 100644 index 37838dd2..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/omni219.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/omni22.png b/micropsi_core/world/island/resources/objectimages/unused/omni22.png deleted file mode 100644 index 50bf7913..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/omni22.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/omni230.png b/micropsi_core/world/island/resources/objectimages/unused/omni230.png deleted file mode 100644 index ec86e1fe..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/omni230.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/omni240.png b/micropsi_core/world/island/resources/objectimages/unused/omni240.png deleted file mode 100644 index 0b5b08ed..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/omni240.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/omni251.png b/micropsi_core/world/island/resources/objectimages/unused/omni251.png deleted file mode 100644 index fd7124cf..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/omni251.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/omni262.png b/micropsi_core/world/island/resources/objectimages/unused/omni262.png deleted file mode 100644 index 50bf7913..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/omni262.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/omni273.png b/micropsi_core/world/island/resources/objectimages/unused/omni273.png deleted file mode 100644 index 786cf30b..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/omni273.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/omni284.png b/micropsi_core/world/island/resources/objectimages/unused/omni284.png deleted file mode 100644 index 1832785c..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/omni284.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/omni295.png b/micropsi_core/world/island/resources/objectimages/unused/omni295.png deleted file mode 100644 index 6f1c5a9b..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/omni295.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/omni306.png b/micropsi_core/world/island/resources/objectimages/unused/omni306.png deleted file mode 100644 index 88d2d335..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/omni306.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/omni317.png b/micropsi_core/world/island/resources/objectimages/unused/omni317.png deleted file mode 100644 index b7a6f13e..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/omni317.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/omni328.png b/micropsi_core/world/island/resources/objectimages/unused/omni328.png deleted file mode 100644 index fcb90737..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/omni328.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/omni33.png b/micropsi_core/world/island/resources/objectimages/unused/omni33.png deleted file mode 100644 index 786cf30b..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/omni33.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/omni339.png b/micropsi_core/world/island/resources/objectimages/unused/omni339.png deleted file mode 100644 index 37838dd2..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/omni339.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/omni350.png b/micropsi_core/world/island/resources/objectimages/unused/omni350.png deleted file mode 100644 index ec86e1fe..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/omni350.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/omni44.png b/micropsi_core/world/island/resources/objectimages/unused/omni44.png deleted file mode 100644 index 1832785c..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/omni44.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/omni55.png b/micropsi_core/world/island/resources/objectimages/unused/omni55.png deleted file mode 100644 index 6f1c5a9b..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/omni55.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/omni66.png b/micropsi_core/world/island/resources/objectimages/unused/omni66.png deleted file mode 100644 index 88d2d335..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/omni66.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/omni77.png b/micropsi_core/world/island/resources/objectimages/unused/omni77.png deleted file mode 100644 index b7a6f13e..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/omni77.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/omni88.png b/micropsi_core/world/island/resources/objectimages/unused/omni88.png deleted file mode 100644 index fcb90737..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/omni88.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/omni99.png b/micropsi_core/world/island/resources/objectimages/unused/omni99.png deleted file mode 100644 index 37838dd2..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/omni99.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/ore_gold.png b/micropsi_core/world/island/resources/objectimages/unused/ore_gold.png deleted file mode 100644 index 062e20da..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/ore_gold.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/ore_poisoned.png b/micropsi_core/world/island/resources/objectimages/unused/ore_poisoned.png deleted file mode 100644 index 7595e037..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/ore_poisoned.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/ouruglypalmtree.png b/micropsi_core/world/island/resources/objectimages/unused/ouruglypalmtree.png deleted file mode 100644 index 77b1aac0..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/ouruglypalmtree.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/pinetree.png b/micropsi_core/world/island/resources/objectimages/unused/pinetree.png deleted file mode 100644 index 1bd1bddb..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/pinetree.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/repairdroid.png b/micropsi_core/world/island/resources/objectimages/unused/repairdroid.png deleted file mode 100644 index 00fcee0e..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/repairdroid.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/rock.png b/micropsi_core/world/island/resources/objectimages/unused/rock.png deleted file mode 100644 index d80c3c0c..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/rock.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/sagaagent.png b/micropsi_core/world/island/resources/objectimages/unused/sagaagent.png deleted file mode 100644 index 1076f83d..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/sagaagent.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/shockpodbush_small.png b/micropsi_core/world/island/resources/objectimages/unused/shockpodbush_small.png deleted file mode 100644 index a18fe1e6..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/shockpodbush_small.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/shroom1.png b/micropsi_core/world/island/resources/objectimages/unused/shroom1.png deleted file mode 100644 index 94edcf52..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/shroom1.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/shroom2.png b/micropsi_core/world/island/resources/objectimages/unused/shroom2.png deleted file mode 100644 index 57948efc..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/shroom2.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/smallflower.png b/micropsi_core/world/island/resources/objectimages/unused/smallflower.png deleted file mode 100644 index af3f9c04..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/smallflower.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/soilentgreen.png b/micropsi_core/world/island/resources/objectimages/unused/soilentgreen.png deleted file mode 100644 index 88fa3cc8..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/soilentgreen.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/solarstation.png b/micropsi_core/world/island/resources/objectimages/unused/solarstation.png deleted file mode 100644 index 6fe9255e..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/solarstation.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/stone1.png b/micropsi_core/world/island/resources/objectimages/unused/stone1.png deleted file mode 100644 index 52087e02..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/stone1.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/stone2.png b/micropsi_core/world/island/resources/objectimages/unused/stone2.png deleted file mode 100644 index 8214dce6..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/stone2.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/sunflower.png b/micropsi_core/world/island/resources/objectimages/unused/sunflower.png deleted file mode 100644 index 565a0d50..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/sunflower.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/thornbush_small.png b/micropsi_core/world/island/resources/objectimages/unused/thornbush_small.png deleted file mode 100644 index cedcaa5e..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/thornbush_small.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/tree.png b/micropsi_core/world/island/resources/objectimages/unused/tree.png deleted file mode 100644 index 94914e19..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/tree.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/tree_small.png b/micropsi_core/world/island/resources/objectimages/unused/tree_small.png deleted file mode 100644 index acaa865d..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/tree_small.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/treestump.png b/micropsi_core/world/island/resources/objectimages/unused/treestump.png deleted file mode 100644 index 237727fe..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/treestump.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/uglyuglytree.png b/micropsi_core/world/island/resources/objectimages/unused/uglyuglytree.png deleted file mode 100644 index 0a8891c6..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/uglyuglytree.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/unknown.png b/micropsi_core/world/island/resources/objectimages/unused/unknown.png deleted file mode 100644 index 4bd84ecf..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/unknown.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/unknown_small.png b/micropsi_core/world/island/resources/objectimages/unused/unknown_small.png deleted file mode 100644 index 3251c518..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/unknown_small.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/unknownbox.png b/micropsi_core/world/island/resources/objectimages/unused/unknownbox.png deleted file mode 100644 index 9bb9263d..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/unknownbox.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/waterempty.png b/micropsi_core/world/island/resources/objectimages/unused/waterempty.png deleted file mode 100644 index 5435ef15..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/waterempty.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/waterfull.png b/micropsi_core/world/island/resources/objectimages/unused/waterfull.png deleted file mode 100644 index 97d0fee1..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/waterfull.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/waterhole_small.png b/micropsi_core/world/island/resources/objectimages/unused/waterhole_small.png deleted file mode 100644 index 026e4521..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/waterhole_small.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/waterholeempty_small.png b/micropsi_core/world/island/resources/objectimages/unused/waterholeempty_small.png deleted file mode 100644 index 7c1b7046..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/waterholeempty_small.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/unused/wirselkraut_small.png b/micropsi_core/world/island/resources/objectimages/unused/wirselkraut_small.png deleted file mode 100644 index 6b69c66f..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/unused/wirselkraut_small.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/well.png b/micropsi_core/world/island/resources/objectimages/well.png deleted file mode 100644 index 2d2d2d20..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/well.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/objectimages/wirselkraut.png b/micropsi_core/world/island/resources/objectimages/wirselkraut.png deleted file mode 100644 index 89cd5c24..00000000 Binary files a/micropsi_core/world/island/resources/objectimages/wirselkraut.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/worldimages/doernerisland.png b/micropsi_core/world/island/resources/worldimages/doernerisland.png deleted file mode 100644 index ca0557e1..00000000 Binary files a/micropsi_core/world/island/resources/worldimages/doernerisland.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/worldimages/insel.png b/micropsi_core/world/island/resources/worldimages/insel.png deleted file mode 100644 index afeb9ff2..00000000 Binary files a/micropsi_core/world/island/resources/worldimages/insel.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/worldimages/mars.png b/micropsi_core/world/island/resources/worldimages/mars.png deleted file mode 100644 index 46e46cf2..00000000 Binary files a/micropsi_core/world/island/resources/worldimages/mars.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/worldimages/miniisland.png b/micropsi_core/world/island/resources/worldimages/miniisland.png deleted file mode 100644 index c086131d..00000000 Binary files a/micropsi_core/world/island/resources/worldimages/miniisland.png and /dev/null differ diff --git a/micropsi_core/world/island/resources/worldimages/psi_emo.png b/micropsi_core/world/island/resources/worldimages/psi_emo.png deleted file mode 100644 index 16c6212b..00000000 Binary files a/micropsi_core/world/island/resources/worldimages/psi_emo.png and /dev/null differ diff --git a/micropsi_core/world/island/structured_objects/__init__.py b/micropsi_core/world/island/structured_objects/__init__.py deleted file mode 100644 index 3647b333..00000000 --- a/micropsi_core/world/island/structured_objects/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__author__ = 'rvuine' diff --git a/micropsi_core/world/island/structured_objects/objects.py b/micropsi_core/world/island/structured_objects/objects.py deleted file mode 100644 index f40fee53..00000000 --- a/micropsi_core/world/island/structured_objects/objects.py +++ /dev/null @@ -1,152 +0,0 @@ -__author__ = 'rvuine' - -from micropsi_core.world.worldobject import WorldObject - - -class Shape(): - - def __init__(self, type, color): - self.type = type - self.color = color - -VER_B = Shape("ver", "brown") -COM_G = Shape("com", "green") -CIR_P = Shape("cir", "purple") -CIR_B = Shape("cir", "brown") -CIR_R = Shape("cir", "red") -COM_C = Shape("com", "charcoal") -COM_N = Shape("com", "navy") -CIR_W = Shape("cir", "white") - -OBJECTS = { - "Lightsource": { - "type": "Lightsource", - "shape_grid": [ - [None, None, CIR_W, None, None], - [None, None, VER_B, None, None], - [None, None, VER_B, None, None], - [None, None, VER_B, None, None], - [None, None, VER_B, None, None] - ] - }, - "PalmTree": { - "type": "PalmTree", - "shape_grid": [ - [None, None, COM_G, None, None], # 0/-2 - [None, COM_G, VER_B, COM_G, None], # -1/-1 0/-1 1/-1 - [None, None, VER_B, None, None], # 0/ 0 - [None, None, VER_B, None, None], # 0/ 1 - [None, None, VER_B, None, None] # 0/ 2 - ] - }, - "Maple": { - "type": "Maple", - "shape_grid": [ - [None, None, COM_G, None, None], - [None, COM_G, COM_G, COM_G, None], - [None, COM_G, VER_B, COM_G, None], - [None, None, VER_B, None, None], - [None, None, VER_B, None, None] - ] - }, - "Braintree": { - "type": "Braintree", - "shape_grid": [ - [None, None, None, None, None], - [None, COM_G, COM_G, COM_G, None], - [None, COM_G, COM_G, COM_G, None], - [None, VER_B, COM_G, None, None], - [None, None, VER_B, None, None] - ] - }, - "Wirselkraut": { - "type": "Wirselkraut", - "shape_grid": [ - [None, None, None, None, None], - [None, None, None, None, None], - [None, None, None, None, None], - [None, None, None, None, None], - [None, None, COM_G, None, None] - ] - }, - "Thornbush": { - "type": "Thornbush", - "shape_grid": [ - [None, None, None, None, None], - [None, None, None, None, None], - [None, None, None, None, None], - [None, None, COM_G, None, None], - [None, COM_G, COM_G, COM_G, None] - ] - }, - "Juniper": { - "type": "Juniper", - "shape_grid": [ - [None, None, None, None, None], - [None, None, None, None, None], - [None, None, CIR_P, None, None], - [None, CIR_P, COM_G, CIR_P, None], - [None, COM_G, COM_G, COM_G, None] - ] - }, - "Champignon": { - "type": "Champignon", - "shape_grid": [ - [None, None, None, None, None], - [None, None, None, None, None], - [None, None, None, None, None], - [None, None, None, None, None], - [None, None, CIR_B, None, None] - ] - }, - "FlyAgaric": { - "type": "FlyAgaric", - "shape_grid": [ - [None, None, None, None, None], - [None, None, None, None, None], - [None, None, None, None, None], - [None, None, None, None, None], - [None, None, CIR_R, None, None] - ] - }, - "Stone": { - "type": "Stone", - "shape_grid": [ - [None, None, None, None, None], - [None, None, None, None, None], - [None, None, None, None, None], - [None, None, None, None, None], - [None, None, COM_C, None, None] - ] - }, - "Boulder": { - "type": "Boulder", - "shape_grid": [ - [None, None, None, None, None], - [None, None, None, None, None], - [None, None, None, None, None], - [None, None, COM_G, COM_C, None], - [None, COM_C, COM_C, COM_C, None] - ] - }, - "Menhir": { - "type": "Menhir", - "shape_grid": [ - [None, None, None, None, None], - [None, None, COM_C, None, None], - [None, None, COM_C, None, None], - [None, None, COM_C, None, None], - [None, None, COM_C, COM_C, None] - ] - }, - "Waterhole": { - "type": "Waterhole", - "shape_grid": [ - [None, None, None, None, None], - [None, None, None, None, None], - [None, None, None, None, None], - [None, None, None, None, None], - [None, COM_N, COM_N, COM_N, COM_N] - ] - } -} diff --git a/micropsi_core/world/island/structured_objects/scene.py b/micropsi_core/world/island/structured_objects/scene.py deleted file mode 100644 index 68a5b3e2..00000000 --- a/micropsi_core/world/island/structured_objects/scene.py +++ /dev/null @@ -1,130 +0,0 @@ -__author__ = 'rvuine' - -GRIDSIZE = 5 -HALFGRID = int((GRIDSIZE-1) / 2) - - -class Scene(): - - @property - def fovea_x(self): - return self.__fovea_x - - @property - def fovea_y(self): - return self.__fovea_y - - def __init__(self, world, agent_id): - self.__fovea_x = 0 - self.__fovea_y = 0 - - self.__shape_grid = [[0] * GRIDSIZE for i in range(GRIDSIZE)] - self.__shape_name = "none" - - self.__world = world - self.__agent_id = agent_id - - def reset_fovea(self): - """ - Resets the fovea to the center of the grid - """ - self.__fovea_x = 0 - self.__fovea_y = 0 - self.__update_world_data() - - def move_fovea_x(self, x): - """ - Horizontally moves the fovea by x elements on the grid - """ - self.__fovea_x += int(x) - if self.__fovea_x > HALFGRID: - self.__fovea_x = HALFGRID - if self.__fovea_x < -HALFGRID: - self.__fovea_x = -HALFGRID - self.__update_world_data() - - def move_fovea_y(self, y): - """ - Vertically moves the fovea by y elements on the grid - """ - self.__fovea_y += int(y) - if self.__fovea_y > HALFGRID: - self.__fovea_y = HALFGRID - if self.__fovea_y < -HALFGRID: - self.__fovea_y = -HALFGRID - self.__update_world_data() - - def load_object(self, shape_name, shape_grid): - """ - Loads an object into the grid. - shape_grid is a two-dimensional array of size GRIDSIZExGRIDSIZE, containing None or Shape objects - """ - self.__shape_grid = shape_grid - self.__shape_name = shape_name - self.__update_world_data() - - def is_fovea_on_shape_type(self, shapetype): - """ - Returns true if the shape unter the fovea is of type shapetype - """ - return ((-HALFGRID <= self.__fovea_x <= HALFGRID) and - (-HALFGRID <= self.__fovea_y <= HALFGRID) and - (self.__shape_grid[int(self.__fovea_y+HALFGRID)][int(self.__fovea_x+HALFGRID)] is not None) and - (self.__shape_grid[int(self.__fovea_y+HALFGRID)][int(self.__fovea_x+HALFGRID)].type is shapetype)) - - def is_fovea_on_shape_color(self, shapecolor): - """ - Returns true if the shape unter the fovea is shapecolor-colored - """ - return ((-HALFGRID <= self.__fovea_x <= HALFGRID) and - (-HALFGRID <= self.__fovea_y <= HALFGRID) and - (self.__shape_grid[int(self.__fovea_y+HALFGRID)][int(self.__fovea_x+HALFGRID)] is not None) and - (self.__shape_grid[int(self.__fovea_y+HALFGRID)][int(self.__fovea_x+HALFGRID)].color is shapecolor)) - - def is_shapetype_in_scene(self, shapetype): - """ - Returns true if a shape of type shapetype is in the scene (ignoring fovea position) - """ - for shapeline in self.__shape_grid: - for shape in shapeline: - if shape is not None and shape.type is shapetype: - return True - return False - - def is_shapecolor_in_scene(self, shapecolor): - """ - Returns true if a shapecolor-colored shape is in the scene (ignoring fovea position) - """ - for shapeline in self.__shape_grid: - for shape in shapeline: - if shape is not None and shape.color is shapecolor: - return True - return False - - def __update_world_data(self): - """ - Updates the world's data object with scene information - """ - if self.__world.data['agents'] is None: - self.__world.data['agents'] = {} - if self.__world.data['agents'][self.__agent_id] is None: - self.__world.data['agents'][self.__agent_id] = {} - self.__world.data['agents'][self.__agent_id]['scene'] = self.__serialize() - - def __serialize(self): - """ - Serializes the scene into a dict, containing the shape grid array - """ - shape_grid = [[None] * GRIDSIZE for i in range(GRIDSIZE)] - for line in range(GRIDSIZE): - for column in range(GRIDSIZE): - if self.__shape_grid[line][column] is not None: - shape_grid[line][column] = {"type": self.__shape_grid[line][column].type, - "color": self.__shape_grid[line][column].color} - return { - "type": "structured_object", - "shape_name": self.__shape_name, - "shape_grid": shape_grid, - "fovea_x": self.fovea_x, - "fovea_y": self.fovea_y - } diff --git a/micropsi_core/world/island/structured_objects/structured_objects.py b/micropsi_core/world/island/structured_objects/structured_objects.py deleted file mode 100644 index 7159bea1..00000000 --- a/micropsi_core/world/island/structured_objects/structured_objects.py +++ /dev/null @@ -1,100 +0,0 @@ -__author__ = 'rvuine' - -import logging -from micropsi_core.world.island import island -from micropsi_core.world.island.structured_objects.objects import * -from micropsi_core.world.island.structured_objects.scene import Scene -from micropsi_core.world.worldadapter import WorldAdapter - - -class StructuredObjects(WorldAdapter): - """A world adapter exposing objects composed of basic shapes and colors to the agent""" - - def __init__(self, world, uid=None, **data): - super(StructuredObjects, self).__init__(world, uid, **data) - - self.datasources = {'fov-x': 0, 'fov-y': 0, 'major-newscene': 0} - self.datatargets = {'fov_x': 0, 'fov_y': 0, 'fov_reset': 0} - - self.shapetypes = [] - self.shapecolors = [] - - for key, objecttype in OBJECTS.items(): - for shapeline in objecttype['shape_grid']: - for shape in shapeline: - if shape is not None and shape.type not in self.shapetypes: - self.shapetypes.append(shape.type) - if shape is not None and shape.color not in self.shapecolors: - self.shapecolors.append(shape.color) - - for shapetype in self.shapetypes: - self.datasources['fovea-' + shapetype] = 0 - self.datasources['presence-' + shapetype] = 0 - - for shapecolor in self.shapecolors: - self.datasources["fovea-" + shapecolor] = 0 - self.datasources["presence-" + shapecolor] = 0 - - self.currentobject = None - self.scene = None - - self.scene = Scene(world, uid) - self.scene.load_object("PalmTree", OBJECTS["PalmTree"]["shape_grid"]) - - def initialize_worldobject(self, data): - if "position" not in data: - self.position = self.world.groundmap['start_position'] - - def get_datasource(self, key): - """ - allows the agent to read a value from a datasource. - overrides default to make sure newscene signals are picked up by the node net - """ - if key == "major-newscene": - if self.datasource_snapshots[key] == 1: - self.datasources[key] = 0 - return 1 - else: - return WorldAdapter.get_datasource(self, key) - - def update_data_sources_and_targets(self): - """called on every world calculation step to advance the life of the agent""" - - # we don't move, for now - self.position = self.world.get_movement_result(self.position, (0, 0)) - - #find nearest object to load into the scene - lowest_distance_to_worldobject = float("inf") - nearest_worldobject = None - for key, worldobject in self.world.objects.items(): - # TODO: use a proper 2D geometry library - distance = island._2d_distance_squared(self.position, worldobject.position) - if distance < lowest_distance_to_worldobject: - lowest_distance_to_worldobject = distance - nearest_worldobject = worldobject - - if self.currentobject is not nearest_worldobject and nearest_worldobject.structured_object_type is not None: - self.currentobject = nearest_worldobject - self.scene.load_object(self.currentobject.structured_object_type, - OBJECTS[self.currentobject.structured_object_type]['shape_grid']) - self.datasources["major-newscene"] = 1 - logging.getLogger("agent.%s" % self.uid).debug("StructuredObjects WA selected new scene: %s", - self.currentobject.structured_object_type) - - #manage the scene - if self.datatargets['fov_reset'] > 0: - self.scene.reset_fovea() - - self.scene.move_fovea_x(self.datatargets['fov_x']) - self.scene.move_fovea_y(self.datatargets['fov_y']) - - self.datasources["fov-x"] = self.scene.fovea_x - self.datasources["fov-y"] = self.scene.fovea_y - - for shapetype in self.shapetypes: - self.datasources["fovea-"+shapetype] = 1 if self.scene.is_fovea_on_shape_type(shapetype) else 0 - self.datasources["presence-"+shapetype] = 1 if self.scene.is_shapetype_in_scene(shapetype) else 0 - - for shapecolor in self.shapecolors: - self.datasources["fovea-"+shapecolor] = 1 if self.scene.is_fovea_on_shape_color(shapecolor) else 0 - self.datasources["presence-"+shapecolor] = 1 if self.scene.is_shapecolor_in_scene(shapecolor) else 0 diff --git a/micropsi_core/world/minecraft/README.md b/micropsi_core/world/minecraft/README.md deleted file mode 100644 index 394aa76b..00000000 --- a/micropsi_core/world/minecraft/README.md +++ /dev/null @@ -1,105 +0,0 @@ -about Minecraft world ------ -Refresh-rate of the 3d visualization is directly bound to the world update cycle. Set it to somewhere below 110ms to get a smooth visualization. * - -ReadMe for Running a Minecraft experiment under OS X (other OSes should also be fine, just make sure to install latest pylget (from repo) and pycrypto) - -1. install http://brew.sh/ - -2. brew install python3 - -2.5.* brew install mercurial - -[3. pip3 install pycrypto (might not be necessary anymore because new makefile installs it)] - -4. pip3 install hg+https://pyglet.googlecode.com/hg/ (only for visualisation branch) * - -5. git clone https://github.com/joschabach/micropsi2/ - -5.5. make - -6. git clone https://github.com/jonasrk/minecraft_servers - -7. git clone https://github.com/jonasrk/MicroPsi-2-Minecraft-Experiment [outdated!] - -8. edit config.ini in a way, that it points to the data directory from point 7 - -9. Server: ./minecraft_servers/1.7.4/start.command - -10. Micropsi ./run.sh - -11. http://localhost:6543/ (you may need to login as admin/admin) - -12. Select Minecraft World and Nodenet - -13. Press "Play" next to the world. (Wait until you see something. *) - -14. Press "Play" next to the nodenet. The minecraft bot will hopefully move. - - -\* Only needed for visualisation which is not part of the current master. - - - -# Known working Minecraft experiments: -## With Pyglet Visualisation, 1.5.2 Server, Diamond finding experiment - -* https://github.com/joschabach/micropsi2/tree/minecraft_with_visualisation - -commit b62a506dc42dafc8bb661e5af59073833eaa4cc8 - -* https://github.com/jonasrk/minecraft_servers - -commit 75e2cf65ba38efce8b8328106201df94a9e1a3ae -cd 'minecraft server 1.5.2'; ./start.command - -* https://github.com/jonasrk/MicroPsi-2-Minecraft-Experiment - -commit 21e114f72364bfd4818debf1be28959b6424255a -./micropsi2_data - - -## Without Visualisation, 1.7.4 Server, Diamond finding experiment with jumping and gravity - -* https://github.com/joschabach/micropsi2/tree/master - -commit 0bf90d110ab24f22ccb07dc938cec6484475ac81 - -* https://github.com/jonasrk/minecraft_servers - -commit e3c7ea937e027f1d87d5036ef07a3e73124fd8e8 -cd 1.7.4; ./start.command - -* https://github.com/jonasrk/MicroPsi-2-Minecraft-Experiment -commit 9d7cb90f7420850afe13735e094501b9b72830a5 -./micropsi2_data/micropsi2_data_for_new_spock - -## Without Visualisation, 1.7.4 Server, Ground Types experiment with jumping and gravity - -* https://github.com/joschabach/micropsi2/tree/master - -commit ae8d9a378aa6a927ac923db2c0fca8267262da1c - -* https://github.com/jonasrk/minecraft_servers - -commit 53ac1b904e0ac7f85a36ef97f19123c1bc9159ed -cd 1.7.4; ./start.command - -* https://github.com/jonasrk/MicroPsi-2-Minecraft-Experiment -commit cc98943adbf3e82b99899e8ed6d3f2cbfb925993 -./micropsi2_data/micropsi2_data_for_new_spock - -## Without Visualisation, 1.7.4 Server, Obstacle experiment with jumping and gravity - -* https://github.com/joschabach/micropsi2/tree/master - -commit 88ce9c205451b2dffb39a156fbbd305719c0f3f6 - -* https://github.com/jonasrk/minecraft_servers - -commit 268f9621bcfa95304600ed6fcbffb57674e0d153 -cd 1.7.4; ./start.command - -* https://github.com/jonasrk/MicroPsi-2-Minecraft-Experiment -commit 1fad259cd1043983f4af8c3facd9730d1d7603dd -./micropsi2_data/micropsi2_data_for_new_spock diff --git a/micropsi_core/world/minecraft/__init__.py b/micropsi_core/world/minecraft/__init__.py deleted file mode 100644 index d95ce28a..00000000 --- a/micropsi_core/world/minecraft/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__author__ = 'jonas' diff --git a/micropsi_core/world/minecraft/minecraft.py b/micropsi_core/world/minecraft/minecraft.py deleted file mode 100644 index 262e3f50..00000000 --- a/micropsi_core/world/minecraft/minecraft.py +++ /dev/null @@ -1,511 +0,0 @@ -from threading import Thread - -from spock import plugins as spockplugins -from spock.client import Client -from spock.plugins.core.event import EventPlugin -from spock.plugins.helpers.clientinfo import ClientInfoPlugin -from spock.plugins.helpers.move import MovementPlugin -from spock.plugins.helpers.reconnect import ReConnectPlugin -from spock.plugins.helpers.world import WorldPlugin - -from micropsi_core.world.world import World -from micropsi_core.world.worldadapter import WorldAdapter -from micropsi_core.world.minecraft.spockplugin import MicropsiPlugin -from micropsi_core.world.minecraft.minecraft_graph_locomotion import MinecraftGraphLocomotion -from micropsi_core.world.minecraft.minecraft_vision import MinecraftVision -from micropsi_core.world.minecraft.minecraft_histogram_vision import MinecraftHistogramVision - - -class Minecraft(World): - """ - A minecraft world. - Connects to a minecraft server and serves as a bridge between agent and server. See config.ini for configuration - """ - - supported_worldadapters = [ - 'MinecraftWorldAdapter', - 'MinecraftBraitenberg', - 'MinecraftGraphLocomotion', - 'MinecraftHistogramVision', - "MinecraftVision" - ] - - assets = { - 'template': 'minecraft/minecraft.tpl', - 'paperjs': 'minecraft/minecraft.js', - 'x': 256, - 'y': 256, - } - - def __init__(self, filename, world_type="Minecraft", name="", owner="", engine=None, uid=None, version=1, config={}): - """ - Initializes spock client including MicropsiPlugin, starts minecraft communication thread. - """ - from micropsi_core.runtime import add_signal_handler - - self.instances = { - 'spock': None, - 'thread': None - } - # do spock things first, then initialize micropsi world because the latter requires self.spockplugin - # register all necessary spock plugins - # DefaultPlugins contain EventPlugin, NetPlugin, TimerPlugin, AuthPlugin, - # ThreadPoolPlugin, StartPlugin and KeepalivePlugin - plugins = spockplugins.DefaultPlugins - plugins.append(ClientInfoPlugin) - plugins.append(MovementPlugin) - plugins.append(WorldPlugin) - plugins.append(MicropsiPlugin) - plugins.append(ReConnectPlugin) - - # get spock configs - settings = self.get_config() - - # add plugin-specific settings - settings['plugins'] = plugins - settings['plugin_settings'] = { - MicropsiPlugin: { - "micropsi_world": self - }, - EventPlugin: { - "killsignals": False - } - } - - # instantiate spock client if not yet done, which in turn instantiates its plugins - # ( MicropsiPlugin sets self.spockplugin upon instantiation ) - if self.instances['spock'] is None: - self.instances['spock'] = Client(plugins=plugins, settings=settings) - - if self.instances['thread'] is None: - # start new thread for minecraft comm" which starts spock client - thread = Thread( - target=self.instances['spock'].start, - args=(settings['server'], settings['port'])) - # Note: client.start() is attached in StartPlugin w/ setattr(self.client, 'start', self.start) - thread.start() - self.instances['thread'] = thread - # - add_signal_handler(self.kill_minecraft_thread) - - # once MicropsiPlugin is instantiated and running, initialize micropsi world - World.__init__(self, filename, world_type=world_type, name=name, owner=owner, uid=uid, version=version) - - # make data accessible to frontend - self.data['assets'] = self.assets - - # copied from jonas' code as is - self.current_step = 0 - self.first_step = True - self.chat_ping_counter = 0 - self.the_image = None - - def get_config(self): - """ - Collect config settings required by spock /minecraft as specified in - config.ini. - """ - from configuration import config as cfg - - settings = { - 'username': cfg['minecraft']['username'], - 'password': cfg['minecraft']['password'], - 'authenticated': True if cfg['minecraft']['authenticated'] == 'True' else False, - 'bufsize': 4096, # size of socket buffer - 'sock_quit': True, # stop bot on socket error or hangup - 'sess_quit': True, # stop bot on failed session login - 'thread_workers': 5, # number of workers in the thread pool - 'packet_trace': False, - 'mc_username': "test", - 'mc_password': "test", - 'server': cfg['minecraft']['server'], - 'port': int(cfg['minecraft']['port']) - } - return settings - - def kill_minecraft_thread(self, *args): - """ - """ - if hasattr(self, 'spockplugin'): - self.spockplugin.event.kill() - self.instances['thread'].join() - # self.spockplugin.threadpool.shutdown(False) - - def __del__(self): - from importlib import reload - self.kill_minecraft_thread() - reload(spockplugins) - - -class Minecraft2D(Minecraft): - """ A Minecraft world that offers a 2d visualization of the agent's perspective""" - - supported_worldadapters = [ - 'MinecraftWorldAdapter', - 'MinecraftGraphLocomotion' - ] - - assets = { - 'template': 'minecraft/minecraft.tpl', - 'paperjs': 'minecraft/minecraft2d.js', - } - - def step(self): - """ - Is called on every world step to advance the calculation. - """ - World.step(self) - - # a 2D perspective projection - self.get_perspective_projection(self.spockplugin.clientinfo.position) - - def get_world_view(self, step): - """ returns a list of world objects, and the current step of the calculation """ - return { - 'objects': self.get_world_objects(), - 'agents': self.data.get('agents', {}), - 'current_step': self.current_step, - 'projection': self.data['projection'], - 'assets': self.assets - } - - def get_perspective_projection(self, agent_info): - """ - """ - from math import sqrt - from micropsi_core.world.minecraft import structs - - # specs - focal_length = 1 # distance of image plane from projective point - max_dist = 150 # maximum distance for raytracing - resolution = 4 # camera resolution for a specific visual field - im_width = 32 # width of projection /image plane - im_height = 16 # height of projection /image plane - cam_width = 1. # width of viewport /camera coords - cam_height = 1. # height of viewport /camera coords - - # save parameters for frontend - self.assets['width'] = im_width * resolution - self.assets['height'] = im_height * resolution - - # get agent's position, yaw, and pitch - position = (int(agent_info['x']), int(agent_info['y']), int(agent_info['z'])) - yaw = 360 - float(agent_info['yaw']) % 360 # given in degrees - # check which yaw value is straight forward, potentially it's 90, ie. mc yaw + 90 - pitch = float(agent_info['pitch']) # given in degrees - - # "Yaw is measured in degrees, and does not follow classical trigonometry rules. The unit circle of yaw on - # the XZ-plane starts at (0, 1) and turns counterclockwise, with 90 at (-1, 0), 180 at (0,-1) and 270 at - # (1, 0). Additionally, yaw is not clamped to between 0 and 360 degrees; any number is valid, including - # negative numbers and numbers greater than 360." - - # "Pitch is measured in degrees, where 0 is looking straight ahead, - # -90 is looking straight up, and 90 is looking straight down. " - - # perspective of particular yaw values - # 0 - - # 90 - - # 180 - - # 270 - - - # perspective of particular pitch values - # 0 - straight ahead - # 90 - straight down - # 180 - upside down straight backwards - # 270 - straight up - - # span viewport - tick_w = cam_width / im_width / resolution - tick_h = cam_height / im_height / resolution - # the horizontal plane is split half-half, the vertical plane is shifted upwards wrt the agent's position - h_line = [i for i in self.frange(position[0] - 0.5 * cam_width, position[0] + 0.5 * cam_width, tick_w)] - v_line = [i for i in self.frange(position[1] - 0.05 * cam_height, position[1] + 0.95 * cam_height, tick_h)] - - # compute pixel values of image plane - projection = tuple() - - x0, y0, z0 = position # agent's position aka projective point - zi = z0 + focal_length - - for xi in reversed(h_line): - for yi in reversed(v_line): - - distance = 0 # just a counter - block_type = 0 - xb, yb, zb = xi, yi, zi - - # compute difference vector between projective point and image point - diff = (xi - x0, yi - y0, zi - z0) - - # normalize difference vector - magnitude = sqrt(diff[0] ** 2 + diff[1] ** 2 + diff[2] ** 2) - if magnitude == 0.: - magnitude = 1. - norm = (diff[0] / magnitude, diff[1] / magnitude, diff[2] / magnitude) - - # rotate norm vector - norm = self.rotate_around_x_axis(norm, pitch) - norm = self.rotate_around_y_axis(norm, yaw) - - # rotate diff vector - diff = self.rotate_around_x_axis(diff, pitch) - diff = self.rotate_around_y_axis(diff, yaw) - - # add diff to projection point aka agent's position - xb, yb, zb = x0 + diff[0], y0 + diff[1], z0 + diff[2] - - while block_type <= 0: # which is air - - # check block type of next distance point along ray - # aka add normalized difference vector to image point - xb = xb + norm[0] - yb = yb + norm[1] - zb = zb + norm[2] - - block_type = self.spockplugin.get_block_type( - int(xb), - int(yb), - int(zb), - ) - - distance += 1 - if distance >= max_dist: - break - - # add block name, distance to projection plane - # hm, if block_type unknown, expect an exception - if structs.block_names.get(str(block_type)): - block_name = structs.block_names[str(block_type)] - projection += (block_name, distance) - - self.data['projection'] = projection - - def rotate_around_x_axis(self, pos, angle): - """ Rotate a 3D point around the x-axis given a specific angle. """ - from math import radians, cos, sin - - # convert angle in degrees to radians - theta = radians(angle) - - # rotate vector - x = pos[0] - y = pos[1] * cos(theta) - pos[2] * sin(theta) - z = pos[1] * sin(theta) + pos[2] * cos(theta) - - return (x, y, z) - - def rotate_around_y_axis(self, pos, angle): - """ Rotate a 3D point around the y-axis given a specific angle. """ - from math import radians, cos, sin - - # convert angle in degrees to radians - theta = radians(angle) - - # rotate vector - x = pos[0] * cos(theta) + pos[2] * sin(theta) - y = pos[1] - z = - pos[0] * sin(theta) + pos[2] * cos(theta) - - return (x, y, z) - - def rotate_around_z_axis(self, pos, angle): - """ Rotate a 3D point around the z-axis given a specific angle. """ - from math import radians, cos, sin - - # convert angle in degrees to radians - theta = radians(angle) - - # rotate vector - x = pos[0] * cos(theta) - pos[1] * sin(theta) - y = pos[0] * sin(theta) + pos[1] * cos(theta) - z = pos[2] - - return (x, y, z) - - def frange(self, start, end, step): - """ - Range for floats. - """ - while start < end: - yield start - start += step - - -class MinecraftWorldAdapter(WorldAdapter): - """ - World adapter for a basic Minecraft agent that receives its xyz position and - the ground type of the block it is standing on as sensory input, and randomly - moves into one of the four cardinal directions ( until it dies ). - """ - - spawn_position = { - 'x': -105, - 'y': 63, - 'z': 59, - } - - def __init__(self, world, uid=None, **data): - world.spockplugin.clientinfo.spawn_position = self.spawn_position - WorldAdapter.__init__(self, world, uid=uid, **data) - self.datasources = dict((i, 0) for i in ['x', 'y', 'z', 'yaw', 'pitch', 'groundtype']) - self.datatargets = dict((i, 0) for i in ['go_north', 'go_east', 'go_west', 'go_south', 'yaw', 'pitch']) - - - def initialize_worldobject(self, data): - - self.datasources['x'] = self.world.spockplugin.clientinfo.position['x'] - self.datasources['y'] = self.world.spockplugin.clientinfo.position['y'] - self.datasources['z'] = self.world.spockplugin.clientinfo.position['z'] - self.datasources['yaw'] = self.world.spockplugin.clientinfo.position['yaw'] - self.datasources['pitch'] = self.world.spockplugin.clientinfo.position['pitch'] - self.datasources['groundtype'] = self.get_groundtype() - - def update_data_sources_and_targets(self): - """ Advances the agent's life on every cycle of the world calculation. """ - import random - - # translate data targets - self.position = (self.datasources['x'], self.datasources['y'], self.datasources['z']) - - movement = self.translate_datatargets_to_xz() - - # note: movement info is sent regardless of change - self.world.spockplugin.dispatchMovement(movement[0], movement[1]) - - position = self.world.spockplugin.clientinfo.position - amp = random.choice([-4, -3, 2, 3, 4]) - position['yaw'] = (position['yaw'] + amp * self.datatargets['yaw']) % 360 - # not used yet but data target gets activation every once in a random while - # position['pitch'] = (position['pitch'] + self.datatargets['pitch']) - # position['pitch'] = 0 - self.world.spockplugin.move(position=position) - - # get new datasources - self.datasources['x'] = self.world.spockplugin.clientinfo.position['x'] - self.datasources['y'] = self.world.spockplugin.clientinfo.position['y'] - self.datasources['z'] = self.world.spockplugin.clientinfo.position['z'] - self.datasources['yaw'] = self.world.spockplugin.clientinfo.position['yaw'] - self.datasources['pitch'] = self.world.spockplugin.clientinfo.position['pitch'] - self.datasources['groundtype'] = self.get_groundtype() - - def translate_datatargets_to_xz(self): - """ Translates movements in cardinal directions to x,z coordinates. """ - - # Reminder: x increases East, decreases West, - # z increases South, decreases North - x, z = 0., 0. - if self.datatargets['go_north'] > 0: - z = -1. - elif self.datatargets['go_east'] > 0: - x = 1. - elif self.datatargets['go_south'] > 0: - z = 1. - elif self.datatargets['go_west'] > 0: - x = -1. - return (x, z) - - def get_groundtype(self): - """ - """ - try: - groundtype = self.world.spockplugin.get_block_type( - int(self.datasources['x']), - int(self.datasources['y'] - 1), - int(self.datasources['z'])) - - except AttributeError: - groundtype = None - - return groundtype - - -class MinecraftBraitenberg(WorldAdapter): - - def __init__(self, world, uid=None, **data): - super().__init__(world, uid, **data) - self.datasources = { - 'diamond_offset_x': 0, - 'diamond_offset_z': 0, - 'grd_stone': 0, - 'grd_dirt': 0, - 'grd_wood': 0, - 'grd_coal': 0, - 'obstcl_x+': 0, - 'obstcl_x-': 0, - 'obstcl_z+': 0, - 'obstcl_z-': 0 - } - self.datatargets = { - 'move_x': 0, - 'move_z': 0 - } - - def update_data_sources_and_targets(self): - """called on every world calculation step to advance the life of the agent""" - # find diamond - bot_x = self.world.spockplugin.clientinfo.position['x'] - bot_y = self.world.spockplugin.clientinfo.position['y'] - bot_z = self.world.spockplugin.clientinfo.position['z'] - bot_coords = (bot_x, bot_y, bot_z) - x_chunk = bot_x // 16 - z_chunk = bot_z // 16 - - current_column = self.world.spockplugin.world.columns[(x_chunk, z_chunk)] - current_section = current_column.chunks[int((bot_y - 1) // 16)] - - self.detect_groundtypes(bot_coords, current_section) - self.detect_diamond(current_column, bot_coords, x_chunk, z_chunk) - self.detect_obstacles(bot_coords, current_section) - - move_x = self.datatargets['move_x'] - move_z = self.datatargets['move_z'] - self.world.spockplugin.psi_dispatcher.dispatchPsiCommands(bot_coords, current_section, move_x, move_z) - - def detect_diamond(self, current_column, bot_coords, x_chunk, z_chunk): - for y in range(0, 16): - current_section = current_column.chunks[int((bot_coords[1] + y - 10 // 2) // 16)] # TODO explain formula - if current_section is not None: - for x in range(0, 16): - for z in range(0, 16): - # TODO explain formula - current_block = current_section.get(x, int((bot_coords[1] + y - 10 // 2) % 16), z).id - if current_block == 56: - diamond_coords = (x + x_chunk * 16, y, z + z_chunk * 16) - self.datasources['diamond_offset_x'] = bot_coords[0] - diamond_coords[0] - self.datasources['diamond_offset_z'] = bot_coords[2] - diamond_coords[2] - - def detect_groundtypes(self, bot_coords, current_section): - block_below = current_section.get( - int(bot_coords[0]) % 16, - int((bot_coords[1] - 1) % 16), - int(bot_coords[2]) % 16).id - self.datasources['grd_dirt'] = 1 if (block_below == 2) else 0 - self.datasources['grd_stone'] = 1 if (block_below == 1) else 0 - self.datasources['grd_wood'] = 1 if (block_below == 17) else 0 - self.datasources['grd_coal'] = 1 if (block_below == 173) else 0 - - def detect_obstacles(self, bot_coords, current_section): - self.datasources['obstcl_x+'] = \ - 1 if current_section.get( - int(bot_coords[0] + 1) % 16, - int((bot_coords[1] + 1) % 16), - int(bot_coords[2]) % 16).id != 0 \ - else 0 - self.datasources['obstcl_x-'] = \ - 1 if current_section.get( - int(bot_coords[0] - 1) % 16, - int((bot_coords[1] + 1) % 16), - int(bot_coords[2]) % 16).id != 0 \ - else 0 - self.datasources['obstcl_z+'] = \ - 1 if current_section.get( - int(bot_coords[0]) % 16, - int((bot_coords[1] + 1) % 16), - int(bot_coords[2] + 1) % 16).id != 0 \ - else 0 - self.datasources['obstcl_z-'] = \ - 1 if current_section.get( - int(bot_coords[0]) % 16, - int((bot_coords[1] + 1) % 16), - int(bot_coords[2] - 1) % 16).id != 0 \ - else 0 diff --git a/micropsi_core/world/minecraft/minecraft_graph_locomotion.py b/micropsi_core/world/minecraft/minecraft_graph_locomotion.py deleted file mode 100644 index 75b6faa8..00000000 --- a/micropsi_core/world/minecraft/minecraft_graph_locomotion.py +++ /dev/null @@ -1,408 +0,0 @@ -from micropsi_core.world.worldadapter import WorldAdapter -from micropsi_core import tools -import random -import logging -import time -from functools import partial -from spock.mcp.mcpacket import Packet - - -class MinecraftGraphLocomotion(WorldAdapter): - - loco_node_template = { - 'uid': "", - 'name': "", - 'x': 0, - 'y': 0, - 'z': 0, - 'exit_one_uid': None, - 'exit_two_uid': None, - 'exit_three_uid': None, - } - - loco_nodes = {} - - home_uid = tools.generate_uid() - underground_garden_uid = tools.generate_uid() - village_uid = tools.generate_uid() - cathedral_uid = tools.generate_uid() - summit_uid = tools.generate_uid() - cloud_uid = tools.generate_uid() - bungalow_uid = tools.generate_uid() - farm_uid = tools.generate_uid() - forest_uid = tools.generate_uid() - desert_outpost_uid = tools.generate_uid() - swamp_uid = tools.generate_uid() - - loco_nodes_indexes = [None, 'home', 'underground garden', 'village', 'cathedral', 'summit', 'cloud', 'bungalow', 'farm', 'forest', 'desert outpost', 'swamp'] - - loco_nodes[home_uid] = loco_node_template.copy() - loco_nodes[home_uid]['name'] = "home" - loco_nodes[home_uid]['uid'] = home_uid - loco_nodes[home_uid]['x'] = -105 - loco_nodes[home_uid]['y'] = 63 - loco_nodes[home_uid]['z'] = 59 - loco_nodes[home_uid]['exit_one_uid'] = cloud_uid - loco_nodes[home_uid]['exit_two_uid'] = cathedral_uid - loco_nodes[home_uid]['exit_three_uid'] = village_uid - - loco_nodes[underground_garden_uid] = loco_node_template.copy() - loco_nodes[underground_garden_uid]['name'] = "underground garden" - loco_nodes[underground_garden_uid]['uid'] = underground_garden_uid - loco_nodes[underground_garden_uid]['x'] = -264 - loco_nodes[underground_garden_uid]['y'] = 62 - loco_nodes[underground_garden_uid]['z'] = 65 - loco_nodes[underground_garden_uid]['exit_one_uid'] = home_uid - loco_nodes[underground_garden_uid]['exit_two_uid'] = village_uid - - loco_nodes[village_uid] = loco_node_template.copy() - loco_nodes[village_uid]['name'] = "village" - loco_nodes[village_uid]['uid'] = village_uid - loco_nodes[village_uid]['x'] = -293 - loco_nodes[village_uid]['y'] = 64 - loco_nodes[village_uid]['z'] = -220 - loco_nodes[village_uid]['exit_one_uid'] = underground_garden_uid - loco_nodes[village_uid]['exit_two_uid'] = home_uid - - loco_nodes[cathedral_uid] = loco_node_template.copy() - loco_nodes[cathedral_uid]['name'] = "cathedral" - loco_nodes[cathedral_uid]['uid'] = cathedral_uid - loco_nodes[cathedral_uid]['x'] = -100 - loco_nodes[cathedral_uid]['y'] = 63 - loco_nodes[cathedral_uid]['z'] = 282 - loco_nodes[cathedral_uid]['exit_one_uid'] = home_uid - loco_nodes[cathedral_uid]['exit_two_uid'] = cloud_uid - loco_nodes[cathedral_uid]['exit_three_uid'] = bungalow_uid - - loco_nodes[summit_uid] = loco_node_template.copy() - loco_nodes[summit_uid]['name'] = "summit" - loco_nodes[summit_uid]['uid'] = summit_uid - loco_nodes[summit_uid]['x'] = -233 - loco_nodes[summit_uid]['y'] = 102 - loco_nodes[summit_uid]['z'] = 307 - loco_nodes[summit_uid]['exit_one_uid'] = swamp_uid - - loco_nodes[cloud_uid] = loco_node_template.copy() - loco_nodes[cloud_uid]['name'] = "cloud" - loco_nodes[cloud_uid]['uid'] = cloud_uid - loco_nodes[cloud_uid]['x'] = -98 - loco_nodes[cloud_uid]['y'] = 63 - loco_nodes[cloud_uid]['z'] = 198 - loco_nodes[cloud_uid]['exit_one_uid'] = home_uid - loco_nodes[cloud_uid]['exit_two_uid'] = cathedral_uid - - loco_nodes[bungalow_uid] = loco_node_template.copy() - loco_nodes[bungalow_uid]['name'] = "bungalow" - loco_nodes[bungalow_uid]['uid'] = bungalow_uid - loco_nodes[bungalow_uid]['x'] = 28 - loco_nodes[bungalow_uid]['y'] = 63 - loco_nodes[bungalow_uid]['z'] = 292 - loco_nodes[bungalow_uid]['exit_one_uid'] = cathedral_uid - loco_nodes[bungalow_uid]['exit_two_uid'] = farm_uid - - loco_nodes[farm_uid] = loco_node_template.copy() - loco_nodes[farm_uid]['name'] = "farm" - loco_nodes[farm_uid]['uid'] = farm_uid - loco_nodes[farm_uid]['x'] = -50 - loco_nodes[farm_uid]['y'] = 64 - loco_nodes[farm_uid]['z'] = 410 - loco_nodes[farm_uid]['exit_one_uid'] = bungalow_uid - loco_nodes[farm_uid]['exit_two_uid'] = cathedral_uid - loco_nodes[farm_uid]['exit_three_uid'] = forest_uid - - loco_nodes[forest_uid] = loco_node_template.copy() - loco_nodes[forest_uid]['name'] = "forest" - loco_nodes[forest_uid]['uid'] = forest_uid - loco_nodes[forest_uid]['x'] = -273 - loco_nodes[forest_uid]['y'] = 65 - loco_nodes[forest_uid]['z'] = 782 - loco_nodes[forest_uid]['exit_one_uid'] = farm_uid - loco_nodes[forest_uid]['exit_two_uid'] = desert_outpost_uid - loco_nodes[forest_uid]['exit_three_uid'] = swamp_uid - - loco_nodes[desert_outpost_uid] = loco_node_template.copy() - loco_nodes[desert_outpost_uid]['name'] = "desert outpost" - loco_nodes[desert_outpost_uid]['uid'] = desert_outpost_uid - loco_nodes[desert_outpost_uid]['x'] = -243 - loco_nodes[desert_outpost_uid]['y'] = 64 - loco_nodes[desert_outpost_uid]['z'] = 958 - loco_nodes[desert_outpost_uid]['exit_one_uid'] = forest_uid - - loco_nodes[swamp_uid] = loco_node_template.copy() - loco_nodes[swamp_uid]['name'] = "swamp" - loco_nodes[swamp_uid]['uid'] = swamp_uid - loco_nodes[swamp_uid]['x'] = -529 - loco_nodes[swamp_uid]['y'] = 63 - loco_nodes[swamp_uid]['z'] = 504 - loco_nodes[swamp_uid]['exit_one_uid'] = forest_uid - loco_nodes[swamp_uid]['exit_two_uid'] = summit_uid - - tp_tolerance = 5 - - action_timeout = 10 - - actions = ['eat', 'sleep', 'take_exit_one', 'take_exit_two', 'take_exit_three'] - - logger = None - - def __init__(self, world, uid=None, **data): - super().__init__(world, uid, **data) - - self.datasources = { - 'health': 1, - 'food': 1, - 'temperature': 0.5, - 'food_supply': 0, - 'fatigue': 0, - 'awake': 1, - 'current_location_index': 0 - } - - targets = ['take_exit_one', 'take_exit_two', 'take_exit_three', 'pitch', 'yaw', 'eat', 'sleep'] - self.datatarget_history = {} - for t in targets: - self.datatargets[t] = 0 - self.datatarget_feedback[t] = 0 - self.datatarget_history[t] = 0 - - # a collection of conditions to check on every update(..), eg., for action feedback - self.waiting_list = [] - - self.target_loco_node_uid = None - - self.current_loco_node = None - - self.last_slept = 0 - self.sleeping = False - - self.spockplugin = self.world.spockplugin - self.spockplugin.worldadapter = self - self.waiting_for_spock = True - self.logger = logging.getLogger("agent.%s" % self.uid) - self.spockplugin.event.reg_event_handler('PLAY= 1 and not self.datatarget_history['take_exit_one'] >= 1: - # if the current node on the transition graph has the selected exit - if self.current_loco_node['exit_one_uid'] is not None: - self.register_action( - 'take_exit_one', - partial(self.locomote, self.current_loco_node['exit_one_uid']), - partial(self.check_movement_feedback, self.current_loco_node['exit_one_uid']) - ) - else: - self.datatarget_feedback['take_exit_one'] = -1. - - if self.datatargets['take_exit_two'] >= 1 and not self.datatarget_history['take_exit_two'] >= 1: - if self.current_loco_node['exit_two_uid'] is not None: - self.register_action( - 'take_exit_two', - partial(self.locomote, self.current_loco_node['exit_two_uid']), - partial(self.check_movement_feedback, self.current_loco_node['exit_two_uid']) - ) - else: - self.datatarget_feedback['take_exit_two'] = -1. - - if self.datatargets['take_exit_three'] >= 1 and not self.datatarget_history['take_exit_three'] >=1: - if self.current_loco_node['exit_three_uid'] is not None: - self.register_action( - 'take_exit_three', - partial(self.locomote, self.current_loco_node['exit_three_uid']), - partial(self.check_movement_feedback, self.current_loco_node['exit_three_uid']) - ) - else: - self.datatarget_feedback['take_exit_three'] = -1. - - if self.datatargets['eat'] >= 1 and not self.datatarget_history['eat'] >= 1: - if self.has_bread() and self.datasources['food'] < 1: - self.register_action( - 'eat', - self.spockplugin.eat, - partial(self.check_eat_feedback, self.spockplugin.clientinfo.health['food']) - ) - else: - self.datatarget_feedback['eat'] = -1. - - if self.datatargets['sleep'] >= 1 and not self.datatarget_history['sleep'] >= 1: - if self.check_movement_feedback(self.home_uid) and self.spockplugin.world.time_of_day % 24000 > 12500: - # we're home and it's night, so we can sleep now: - self.register_action('sleep', self.sleep, self.check_waking_up) - else: - self.datatarget_feedback['sleep'] = -1. - - # update datatarget history - for k in self.datatarget_history.keys(): - self.datatarget_history[k] = self.datatargets[k] - - def locomote(self, target_loco_node_uid): - new_loco_node = self.loco_nodes[target_loco_node_uid] - - self.logger.debug('locomoting to %s' % new_loco_node['name']) - - self.spockplugin.chat("/tppos {0} {1} {2}".format( - new_loco_node['x'], - new_loco_node['y'], - new_loco_node['z'])) - - self.target_loco_node_uid = target_loco_node_uid - - self.current_loco_node = new_loco_node - - def check_for_action_feedback(self): - """ """ - # check if any pending datatarget_feedback can be confirmed with data from the world - if self.waiting_list: - new_waiting_list = [] - for index, item in enumerate(self.waiting_list): - if item['validation'](): - if self.datatargets[item['datatarget']] != 0: - self.datatarget_feedback[item['datatarget']] = 1. - else: - new_waiting_list.append(item) - - self.waiting_list = new_waiting_list - - def register_action(self, datatarget, action_function, validation_function): - """ registers an action to be performed by the agent. Will wait, and eventually re-trigger the action - until the validation function returns true, signalling success of the action""" - self.waiting_list.append({ - 'datatarget': datatarget, - 'action': action_function, - 'validation': validation_function, - 'time': time.clock() - }) - action_function() - - def has_bread(self): - for idx, item in enumerate(self.spockplugin.quickslots): - if item.get('id', 0) == 297: - self.spockplugin.change_held_item(idx) - return True - self.logger.debug('Agent has no bread!') - return False - - def check_eat_feedback(self, old_value): - food = self.spockplugin.clientinfo.health['food'] - return food > old_value or food == 20 - - def check_movement_feedback(self, target_loco_node): - if abs(self.loco_nodes[target_loco_node]['x'] - int(self.spockplugin.clientinfo.position['x'])) <= self.tp_tolerance \ - and abs(self.loco_nodes[target_loco_node]['y'] - int(self.spockplugin.clientinfo.position['y'])) <= self.tp_tolerance \ - and abs(self.loco_nodes[target_loco_node]['z'] - int(self.spockplugin.clientinfo.position['z'])) <= self.tp_tolerance: - # hand the agent a bread, if it just arrived at the farm, or at the village - if target_loco_node == self.village_uid or target_loco_node == self.farm_uid: - self.spockplugin.give_item('bread') - return True - return False - - def check_waking_up(self): - """ Checks whether we're done sleeping. - Sets the datatarget_feedback to 1 and returns True if so, False otherwise""" - if not self.sleeping: - self.datatarget_feedback['sleep'] = 1 - return True - return False - - def sleep(self): - """ Attempts to use the bed located at -103/63/59""" - self.logger.debug('going to sleep') - data = { - 'location': { - 'x': -103, - 'y': 63, - 'z': 59 - }, - 'direction': 1, - 'held_item': { - 'id': 297, - 'amount': 0, - 'damage': 0 - }, - 'cur_pos_x': -103, - 'cur_pos_y': 63, - 'cur_pos_z': 59 - } - self.spockplugin.net.push(Packet(ident='PLAY>Player Block Placement', data=data)) diff --git a/micropsi_core/world/minecraft/minecraft_histogram_vision.py b/micropsi_core/world/minecraft/minecraft_histogram_vision.py deleted file mode 100644 index 0d110739..00000000 --- a/micropsi_core/world/minecraft/minecraft_histogram_vision.py +++ /dev/null @@ -1,264 +0,0 @@ -import random -from configuration import config as cfg -from .minecraft_graph_locomotion import MinecraftGraphLocomotion -from .minecraft_projection_mixin import MinecraftProjectionMixin - - -class MinecraftHistogramVision(MinecraftGraphLocomotion, MinecraftProjectionMixin): - - # specs for vision /fovea - # focal length larger 0 means zoom in, smaller 0 means zoom out - # ( small values of focal length distort the image if things are close ) - # image proportions define the part of the world that can be viewed - # patch dimensions define the size of the sampled patch that's stored to file - focal_length = 0.5 # distance of image plane from projective point /fovea - max_dist = 64 # maximum distance for raytracing - resolution_w = 1.0 # number of rays per tick in viewport /camera coordinate system - resolution_h = 1.0 # number of rays per tick in viewport /camera coordinate system - im_width = 128 # width of projection /image plane in the world - im_height = 64 # height of projection /image plane in the world - cam_width = 1. # width of normalized device /camera /viewport - cam_height = 1. # height of normalized device /camera /viewport - # Note: adapt patch width to be smaller than or equal to resolution x image dimension - patch_width = 32 # width of a fovea patch # 128 || 32 - patch_height = 32 # height of a patch # 64 || 32 - num_fov = 6 # the root number of fov__ sensors, ie. there are num_fov x num_fov fov__ sensors - num_steps_to_keep_vision_stable = 3 - - # Note: actors fov_x, fov_y and the saccader's gates fov_x, fov_y ought to be parametrized [0.,2.] w/ threshold 1. - # -- 0. means inactivity, values between 1. and 2. are the scaled down movement in x/y direction on the image plane - - def __init__(self, world, uid=None, **data): - super().__init__(world, uid, **data) - self.datasources.update({ - 'fov_x': 0, # fovea sensors receive their input from the fovea actors - 'fov_y': 0, - 'fov_hist__-01': 0, # these names must be the most commonly observed block types - 'fov_hist__000': 0, - 'fov_hist__001': 0, - 'fov_hist__002': 0, - 'fov_hist__003': 0, - 'fov_hist__004': 0, - 'fov_hist__009': 0, - 'fov_hist__012': 0, - 'fov_hist__017': 0, - 'fov_hist__018': 0, - 'fov_hist__020': 0, - 'fov_hist__026': 0, - 'fov_hist__031': 0, - 'fov_hist__064': 0, - 'fov_hist__106': 0, - }) - - targets = { - 'orientation': 0, - 'fov_x': 0, - 'fov_y': 0 - } - - self.datatargets.update(targets) - self.datatarget_feedback.update(targets) - - # add datasources for fovea - for i in range(self.num_fov): - for j in range(self.num_fov): - name = "fov__%02d_%02d" % (i, j) - self.datasources[name] = 0. - - self.simulated_vision = False - if 'simulate_vision' in cfg['minecraft']: - self.simulated_vision = True - self.simulated_vision_datafile = cfg['minecraft']['simulate_vision'] - self.logger.info("Setting up minecraft_graph_locomotor to simulate vision from data file %s", self.simulated_vision_datafile) - - import os - import csv - self.simulated_vision_data = None - self.simulated_vision_datareader = csv.reader(open(self.simulated_vision_datafile)) - if os.path.getsize(self.simulated_vision_datafile) < (500 * 1024 * 1024): - self.simulated_vision_data = [[float(datapoint) for datapoint in sample] for sample in self.simulated_vision_datareader] - self.simulated_data_entry_index = 0 - self.simulated_data_entry_max = len(self.simulated_vision_data) - 1 - - if 'record_vision' in cfg['minecraft']: - self.record_file = open(cfg['minecraft']['record_vision'], 'a') - - def update_data_sources_and_targets(self): - """called on every world calculation step to advance the life of the agent""" - - if self.waiting_for_spock: - super().update_data_sources_and_targets() - - else: - if self.simulated_vision: - self.simulate_visual_input() - else: - super().update_data_sources_and_targets() - - # change pitch and yaw every x world steps to increase sensory variation - # < ensures some stability to enable learning in the autoencoder - if self.world.current_step % self.num_steps_to_keep_vision_stable == 0: - # for patches pitch = 10 and yaw = random.randint(-10,10) were used - # for visual field pitch = randint(0, 30) and yaw = randint(1, 360) were used - self.spockplugin.clientinfo.position['pitch'] = 10 - self.spockplugin.clientinfo.position['yaw'] = random.randint(-10, 10) - self.datatargets['pitch'] = self.spockplugin.clientinfo.position['pitch'] - self.datatargets['yaw'] = self.spockplugin.clientinfo.position['yaw'] - # Note: datatargets carry spikes not continuous signals, ie. pitch & yaw will be 0 in the next step - self.datatarget_feedback['pitch'] = 1.0 - self.datatarget_feedback['yaw'] = 1.0 - - # - orientation = self.datatargets['orientation'] # x_axis + 360 / orientation degrees - self.datatarget_feedback['orientation'] = 1.0 - # self.datatargets['orientation'] = 0 - - # sample all the time - # update fovea sensors, get sensory input, provide action feedback - # make sure fovea datasources don't go below 0. - self.datasources['fov_x'] = self.datatargets['fov_x'] - 1. if self.datatargets['fov_x'] > 0. else 0. - self.datasources['fov_y'] = self.datatargets['fov_y'] - 1. if self.datatargets['fov_y'] > 0. else 0. - loco_label = self.current_loco_node['name'] # because python uses call-by-object - self.get_visual_input(self.datasources['fov_x'], self.datasources['fov_y'], loco_label) - - # Note: saccading can't fail because fov_x, fov_y are internal actors, hence we return immediate feedback - if self.datatargets['fov_x'] > 0.0: - self.datatarget_feedback['fov_x'] = 1.0 - if self.datatargets['fov_y'] > 0.0: - self.datatarget_feedback['fov_y'] = 1.0 - - def get_visual_input(self, fov_x, fov_y, label): - """ - Spans an image plane. - - Note that the image plane is walked left to right, top to bottom ( before rotation )! - This means that fov__00_00 gets the top left pixel, fov__15_15 gets the bottom right pixel. - """ - # set agent position - pos_x = self.spockplugin.clientinfo.position['x'] - pos_y = self.spockplugin.clientinfo.position['y'] + 0.620 # add some stance to y pos ( which is ground + 1 ) - pos_z = self.spockplugin.clientinfo.position['z'] - - # set yaw and pitch ( in degrees ) - yaw = self.spockplugin.clientinfo.position['yaw'] - # consider setting yaw to a random value between 0 and 359 - pitch = self.spockplugin.clientinfo.position['pitch'] - - # compute ticks per dimension - tick_w = self.cam_width / self.im_width / self.resolution_w - tick_h = self.cam_height / self.im_height / self.resolution_h - - # span image plane - # the horizontal plane is split half-half, the vertical plane is shifted upwards - h_line = [i for i in self.frange(pos_x - 0.5 * self.cam_width, pos_x + 0.5 * self.cam_width, tick_w)] - v_line = [i for i in self.frange(pos_y - 0.05 * self.cam_height, pos_y + 0.95 * self.cam_height, tick_h)] - - # scale up fov_x, fov_y - fov_x = round(fov_x * (self.im_width * self.resolution_w - self.patch_width)) - fov_y = round(fov_y * (self.im_height * self.resolution_h - self.patch_height)) - - x0, y0, z0 = pos_x, pos_y, pos_z # agent's position aka projective point - zi = z0 + self.focal_length - - v_line.reverse() - - # compute block type values for the whole patch /fovea - patch = [] - for i in range(self.patch_height): - for j in range(self.patch_width): - try: - block_type, distance = self.project(h_line[fov_x + j], v_line[fov_y + i], zi, x0, y0, z0, yaw, pitch) - except IndexError: - block_type, distance = -1, -1 - self.logger.warning("IndexError at (%d,%d)" % (fov_x + j, fov_y + i)) - patch.append(block_type) - - # write block type histogram values to self.datasources['fov_hist__*'] - # for every block type seen in patch, if there's a datasource for it, fill it with its normalized frequency - normalizer = self.patch_width * self.patch_height - # reset fov_hist sensors, then fill them with new values - for k in self.datasources.keys(): - if k.startswith('fov_hist__'): - self.datasources[k] = 0. - for bt in set(patch): - name = "fov_hist__%03d" % bt - if name in self.datasources: - self.datasources[name] = patch.count(bt) / normalizer - - # COMPUTE VALUES FOR fov__%02d_%02d SENSORS - # if all values in the patch are the same, write zeros - if patch[1:] == patch[:-1]: - - zero_patch = True - patch_resc = [0.0] * self.patch_width * self.patch_height - - else: - - zero_patch = False - # convert block types into binary values: map air and emptiness to black (0), everything else to white (1) - patch_ = [0.0 if v <= 0 else 1.0 for v in patch] - - # normalize block type values - # subtract the sample mean from each of its pixels - mean = float(sum(patch_)) / len(patch_) - patch_avg = [x - mean for x in patch_] # TODO: throws error in ipython - why not here !? - - # truncate to +/- 3 standard deviations and scale to -1 and +1 - - var = [x ** 2.0 for x in patch_avg] - std = (sum(var) / len(var)) ** 0.5 # ASSUMPTION: all values of x are equally likely - pstd = 3.0 * std - # if block types are all the same number, eg. -1, std will be 0, therefore - if pstd == 0.0: - patch_std = [0.0 for x in patch_avg] - else: - patch_std = [max(min(x, pstd), -pstd) / pstd for x in patch_avg] - - # scale from [-1,+1] to [0.1,0.9] and write values to sensors - patch_resc = [(1.0 + x) * 0.4 + 0.1 for x in patch_std] - - self.write_visual_input_to_datasources(patch_resc, self.patch_width, self.patch_height) - - if 'record_vision' in cfg['minecraft']: - # do *not* record homogeneous and replayed patches - if not zero_patch and not self.simulated_vision: - if label == self.current_loco_node['name']: - data = "{0}".format(",".join(str(b) for b in patch)) - self.record_file.write("%s,%s,%d,%d,%d,%d\n" % (data, label, pitch, yaw, fov_x, fov_y)) - else: - self.logger.warn('potentially corrupt data were ignored') - - def simulate_visual_input(self): - """ - Every steps read the next line - from the vision file and fill its values into fov__*_* datasources. - """ - if self.world.current_step % self.num_steps_to_keep_vision_stable == 0: - line = None - if self.simulated_vision_data is None: - line = next(self.simulated_vision_datareader, None) - if line is None: - self.logger.info("Simulating vision from data file, starting over...") - import csv - self.simulated_vision_datareader = csv.reader(open(self.simulated_vision_datafile)) - line = next(self.simulated_vision_datareader) - line = [float(entry) for entry in line] - else: - self.simulated_data_entry_index += 1 - if self.simulated_data_entry_index > self.simulated_data_entry_max: - self.logger.info("Simulating vision from memory, starting over, %s entries.", self.simulated_data_entry_max + 1) - self.simulated_data_entry_index = 0 - line = self.simulated_vision_data[self.simulated_data_entry_index] - self.write_visual_input_to_datasources(line, self.num_fov, self.num_fov) - - def write_visual_input_to_datasources(self, patch, patch_width, patch_height): - """ - Write a patch of the size self.num_fov times self.num_fov to self.datasourcesp['fov__*_*']. - If num_fov is less than patch height and width, chose the horizontally centered , vertically 3/4 lower patch. - """ - left_margin = max(0, (int(patch_width - self.num_fov) // 2) - 1) - top_margin = max(0, (int(patch_height - self.num_fov) // 4 * 3) - 1) - for i in range(self.num_fov): - for j in range(self.num_fov): - name = 'fov__%02d_%02d' % (i, j) - self.datasources[name] = patch[(patch_height * (i + top_margin)) + j + left_margin] diff --git a/micropsi_core/world/minecraft/minecraft_projection_mixin.py b/micropsi_core/world/minecraft/minecraft_projection_mixin.py deleted file mode 100644 index 6bd14d6f..00000000 --- a/micropsi_core/world/minecraft/minecraft_projection_mixin.py +++ /dev/null @@ -1,97 +0,0 @@ -from math import sqrt, radians, cos, sin - - -class MinecraftProjectionMixin(object): - - def project(self, xi, yi, zi, x0, y0, z0, yaw, pitch): - """ - Given a point on the projection plane and the agent's position, cast a - ray to find the nearest block type that isn't air and its distance from - the projective plane. - """ - distance = 0 # just a counter - block_type = -1 # consider mapping nothingness to air, ie. -1 to 0 - - # compute difference vector between projective point and image point - diff = (xi - x0, yi - y0, zi - z0) - - # normalize difference vector - magnitude = sqrt(diff[0] ** 2 + diff[1] ** 2 + diff[2] ** 2) - if magnitude == 0.: - magnitude = 1. - norm = (diff[0] / magnitude, diff[1] / magnitude, diff[2] / magnitude) - - # rotate norm vector - norm = self.rotate_around_x_axis(norm, pitch) - norm = self.rotate_around_y_axis(norm, yaw) - - # rotate diff vector - diff = self.rotate_around_x_axis(diff, pitch) - diff = self.rotate_around_y_axis(diff, yaw) - - # add diff to projection point aka agent's position - xb, yb, zb = x0 + diff[0], y0 + diff[1], z0 + diff[2] - - while block_type <= 0: # which is air and nothingness - - # check block type of next distance point along ray - # aka add normalized difference vector to image point - # TODO: consider a more efficient way to move on the ray, eg. a log scale - xb += norm[0] - yb += norm[1] - zb += norm[2] - - block_type = self.spockplugin.get_block_type(xb, yb, zb) - - distance += 1 - if distance >= self.max_dist: - break - - return block_type, distance - - def rotate_around_x_axis(self, pos, angle): - """ Rotate a 3D point around the x-axis given a specific angle. """ - - # convert angle in degrees to radians - theta = radians(angle) - - # rotate vector - xx, y, z = pos - yy = y * cos(theta) - z * sin(theta) - zz = y * sin(theta) + z * cos(theta) - - return (xx, yy, zz) - - def rotate_around_y_axis(self, pos, angle): - """ Rotate a 3D point around the y-axis given a specific angle. """ - - # convert angle in degrees to radians - theta = radians(angle) - - # rotate vector - x, yy, z = pos - xx = x * cos(theta) + z * sin(theta) - zz = - x * sin(theta) + z * cos(theta) - - return (xx, yy, zz) - - def rotate_around_z_axis(self, pos, angle): - """ Rotate a 3D point around the z-axis given a specific angle. """ - - # convert angle in degrees to radians - theta = radians(angle) - - # rotate vector - x, y, zz = pos - xx = x * cos(theta) - y * sin(theta) - yy = x * sin(theta) + y * cos(theta) - - return (xx, yy, zz) - - def frange(self, start, end, step): - """ - Range for floats. - """ - while start < end: - yield start - start += step diff --git a/micropsi_core/world/minecraft/minecraft_vision.py b/micropsi_core/world/minecraft/minecraft_vision.py deleted file mode 100644 index b6052c77..00000000 --- a/micropsi_core/world/minecraft/minecraft_vision.py +++ /dev/null @@ -1,395 +0,0 @@ -from configuration import config as cfg -from .minecraft_graph_locomotion import MinecraftGraphLocomotion -from .minecraft_projection_mixin import MinecraftProjectionMixin - - -class MinecraftVision(MinecraftGraphLocomotion, MinecraftProjectionMixin): - - logger = None - - # specs for vision /fovea - # image width and height define the part of the world that can be viewed - # ie. they provide the proportions of the projection /image plane in the world - im_width = 128 - im_height = 64 - # camera values define width and height of the normalized device /camera /viewport - cam_width = 1. - cam_height = 1. - # focal length defines the distance between the image plane and the projective point /fovea - # ( focal length > 0 means zooming in, < 0 means zooming out; - # small values distort the image, in particular if objects are close ) - focal_length = 0.5 - # the maximal distance for raytracing -- the value was determined by manually trying several values - max_dist = 64 - - # Six parameters determine the agent's visual input: fov_x and fov_y, res_x and res_y, len_x and len_y. - # They describe the fovea position, the zoom level aka resolution level, and the number of receptors respectively. - # The first four variables are local, the other two are fields. Note: a rectangular receptor field is assumed. - len_x = 16 - len_y = 16 - - # tiling used for splitting visual field into sections - tiling_x = 7 - tiling_y = 3 - - # cf. autoencoders require similar activation ( up to noise ) for three consecutive steps - num_steps_to_keep_vision_stable = 3 - - def __init__(self, world, uid=None, **data): - - super().__init__(world, uid, **data) - - # don't use fov_act_00_00 because it complicates debug plots - self.fovea_actor = "fov_act__01_03" - - # add datasources for fovea sensors aka fov__*_* - for i in range(self.len_x): - for j in range(self.len_y): - name = "fov__%02d_%02d" % (i, j) - self.datasources[name] = 0. - - # add datasources for fovea position sensors aka fov_pos__*_* - for x in range(self.tiling_x): - for y in range(self.tiling_y): - name = "fov_pos__%02d_%02d" % (y, x) - self.datasources[name] = 0. - - # add fovea actors to datatargets, datatarget_feedback, datatarget_history, and actions - for x in range(self.tiling_x): - for y in range(self.tiling_y): - name = "fov_act__%02d_%02d" % (y, x) - self.datatargets[name] = 0. - self.datatarget_feedback[name] = 0. - self.datatarget_history[name] = 0. - self.actions.append(name) - - self.simulated_vision = False - if 'simulate_vision' in cfg['minecraft']: - self.simulated_vision = True - self.simulated_vision_datafile = cfg['minecraft']['simulate_vision'] - self.logger.info("Setting up minecraft_graph_locomotor to simulate vision from data file %s", self.simulated_vision_datafile) - - import os - import csv - self.simulated_vision_data = None - self.simulated_vision_datareader = csv.reader(open(self.simulated_vision_datafile)) - if os.path.getsize(self.simulated_vision_datafile) < (500 * 1024 * 1024): - self.simulated_vision_data = [[float(datapoint) for datapoint in sample] for sample in self.simulated_vision_datareader] - self.simulated_data_entry_index = 0 - self.simulated_data_entry_max = len(self.simulated_vision_data) - 1 - - if 'record_vision' in cfg['minecraft']: - self.record_file = open(cfg['minecraft']['record_vision'], 'a') - - self.visual_field = {} - - def update_data_sources_and_targets(self): - """called on every world calculation step to advance the life of the agent""" - - # first thing when spock initialization is done, determine current loco node - if self.simulated_vision: - self.simulate_visual_input(self.len_x, self.len_y) - - else: - super().update_data_sources_and_targets() - - if not self.waiting_for_spock: - # handle fovea actuators and sensors: action feedback, relay to sensors, default actuator - active_fovea_actor = None - for x in range(self.tiling_x): - for y in range(self.tiling_y): - actor_name = "fov_act__%02d_%02d" % (y, x) - sensor_name = "fov_pos__%02d_%02d" % (y, x) - # relay activation of fovea actuators to fovea sensor nodes - self.datasources[sensor_name] = self.datatargets[actor_name] - # provide action feedback for fovea actor nodes - if self.datatargets[actor_name] > 0.: - self.datatarget_feedback[actor_name] = 1. - active_fovea_actor = actor_name - - # if there's no active_fovea_actor use the last fovea position as default - if active_fovea_actor is None: - active_fovea_actor = self.fovea_actor - self.datasources[active_fovea_actor.replace("act", "pos")] = 1. - self.datatarget_feedback[active_fovea_actor] = 1. - - # determine if fovea position changed - fovea_position_changed = self.fovea_actor != active_fovea_actor - # store the currently active fovea actor node name for the next round - self.fovea_actor = active_fovea_actor - - # change pitch and yaw every x world steps to increase sensory variation - # < ensures some stability to enable learning in the autoencoder - if self.world.current_step % self.num_steps_to_keep_vision_stable == 0: - # for patches pitch = 10 and yaw = random.randint(-10,10) were used - # for visual field pitch = randint(0, 30) and yaw = randint(1, 360) were used - self.spockplugin.clientinfo.position['pitch'] = 10 - self.spockplugin.clientinfo.position['yaw'] = 180 # random.randint(1, 360) - self.datatargets['pitch'] = self.spockplugin.clientinfo.position['pitch'] - self.datatargets['yaw'] = self.spockplugin.clientinfo.position['yaw'] - # Note: datatargets carry spikes not continuous signals, ie. pitch & yaw will be 0 in the next step - self.datatarget_feedback['pitch'] = 1.0 - self.datatarget_feedback['yaw'] = 1.0 - - # TODO: recompute visual input only if self.world.current_step % self.num_steps_to_keep_vision_stable == 0 - # else re-write previous sensor values to datasources - - # sample all the time - loco_label = self.current_loco_node['name'] # because python uses call-by-object - # get indices of section currently viewed, i.e. the respective active fovea actor - y_sec, x_sec = [int(val) for val in self.fovea_actor.split('_')[-2:]] - # translate x_sec, y_sec, and z_oom to fov_x, fov_y, res_x, res_y - fov_x, fov_y, res_x, res_y = self.translate_xyz_to_vision_params(x_sec, y_sec, 1) # z_oom = 1 - self.get_visual_input(fov_x, fov_y, res_x, res_y, self.len_x, self.len_y, loco_label) - - self.collect_visual_data() - if cfg['minecraft'].get('debug_vision') and fovea_position_changed: - self.plot_visual_field() - - def locomote(self, target_loco_node_uid): - - if cfg['minecraft'].get('debug_vision') and hasattr(self, 'visual_field'): - self.visual_field = {} - - super().locomote(target_loco_node_uid) - - def translate_xyz_to_vision_params(self, x_sec, y_sec, z_oom): - """ - Visual input can be retrieved given a fovea position in terms of (fov_x, fov_y), - a resolution for each dimension (res_x, res_y), and a excerpt or patch of the - complete visual field (len_x, len_y). This world adapter offers three actors: - x_sec, y_sec, and z_oom. These need to be translated to the parameters which - determine where to compute the visual input. This translation happens here. - """ - # add a buffer to self.tiling_x/y because the rays peak out of their - # assigned image plane sections #TODO validate the magic number 2 - fov_x = (1.0 / (self.tiling_x + 2)) * x_sec - fov_y = (1.0 / (self.tiling_y + 2)) * y_sec - - res_x = (self.len_x * (4 ** z_oom)) / self.im_width - res_y = (self.len_y * (2 ** z_oom)) / self.im_height - - # Note: for now, len_x and len_y are stable and don't change dynamically. - # Hence there's no translation regarding their values here. - - return fov_x, fov_y, res_x, res_y - - def get_visual_input(self, fov_x, fov_y, res_x, res_y, len_x, len_y, label): - """ - Spans an image plane ( of size ... ), selects a patch on that image plane - starting from (fov_x, fov_y) and of size (len_x, len_y) and raytraces - in the Minecraft block world to fill that patch with block type values - of a 2D perspective projection. - - Order of traversal: left to right, top to bottom ( before rotation ); - that is fov_00_00 gets the top left pixel. - """ - if res_x == 0.0 or res_y == 0.0 or len_x == 0.0 or len_y == 0.0: - return - - # get agent position - pos_x = self.spockplugin.clientinfo.position['x'] - pos_y = self.spockplugin.clientinfo.position['y'] + 0.620 # add some stance to y pos ( which is ground + 1 ) - pos_z = self.spockplugin.clientinfo.position['z'] - - # get yaw and pitch ( in degrees ) - yaw = self.spockplugin.clientinfo.position['yaw'] - pitch = self.spockplugin.clientinfo.position['pitch'] - - # compute ticks per dimension - tick_w = self.cam_width / self.im_width / res_x - tick_h = self.cam_height / self.im_height / res_y - - # span image plane - # the horizontal plane is split half-half, the vertical plane is shifted upwards - h_line = [i for i in self.frange(pos_x - 0.5 * self.cam_width, pos_x + 0.5 * self.cam_width, tick_w)] - v_line = [i for i in self.frange(pos_y - 0.05 * self.cam_height, pos_y + 0.95 * self.cam_height, tick_h)] - - # scale up fov_x, fov_y - which is originally in the domain [0,1] - # fov_x = int(round(fov_x * (self.im_width * res_x - len_x))) - # fov_y = int(round(fov_y * (self.im_height * res_y - len_y))) - fov_x = int(round(fov_x * len(h_line))) - fov_y = int(round(fov_y * len(v_line))) - - x0, y0, z0 = pos_x, pos_y, pos_z # agent's position aka projective point - zi = z0 + self.focal_length - - v_line.reverse() # inline - - # do raytracing to compute the resp. block type values of a 2D perspective projection - sensor_values = [] - for i in range(len_x): - for j in range(len_y): - try: - block_type, distance = self.project(h_line[fov_x + j], v_line[fov_y + i], zi, x0, y0, z0, yaw, pitch) - except IndexError: - block_type, distance = -1, -1 - self.logger.warning("IndexError at (%d,%d)" % (fov_x + j, fov_y + i)) - sensor_values.append(block_type) - - # homogeneous_patch = False - # if sensor_values[1:] == sensor_values[:-1]: # if all sensor values are the same, ignore the sample ie. write zeros - # homogeneous_patch = True - # norm_sensor_values = [0.0] * len_x * len_y - - # preprocess sensor values - # # BINARIZE - # # convert block types into binary values: map air and emptiness to black (0), everything else to white (1) - # sensor_values_ = [0.0 if v <= 0 else 1.0 for v in sensor_values] - # GRAY-SCALE VALUES - from .structs import block_colors - # fetch RGB value, convert it to gray-scale value in YUV space - sensor_values_ = [] - for bt in sensor_values: - red, green, blue = block_colors[str(bt)] - red, green, blue = red / 255., green / 255., blue / 255. # normalize to [0, 1] - # RGB to Y transform - y = 0.299 * red + 0.587 * green + 0.114 * blue # digital CCIR601 - sensor_values_.append(y) - - # normalize the sensor values - norm_sensor_values = self.normalize_sensor_values(sensor_values_) - - # write new sensor values to datasources - self.write_visual_input_to_datasources(norm_sensor_values, len_x, len_y) - - if 'record_vision' in cfg['minecraft']: - # do *not* record homogeneous and replayed patches - if not self.simulated_vision: # if not homogeneous_patch and not self.simulated_vision: - if label == self.current_loco_node['name']: - data = "{0}".format(",".join(str(b) for b in sensor_values)) - self.record_file.write("%s,%s,%d,%d,%d,%d,%.3f,%.3f,%d,%d\n" % - (data, label, pitch, yaw, fov_x, fov_y, res_x, res_y, len_x, len_y)) - else: - self.logger.warn('potentially corrupt data were ignored') - - def simulate_visual_input(self, len_x, len_y): - """ - Every steps read the next line - from the vision file and fill its values into fov__*_* datasources. - """ - if self.world.current_step % self.num_steps_to_keep_vision_stable == 0: - line = None - if self.simulated_vision_data is None: - line = next(self.simulated_vision_datareader, None) - if line is None: - self.logger.info("Simulating vision from data file, starting over...") - import csv - self.simulated_vision_datareader = csv.reader(open(self.simulated_vision_datafile)) - line = next(self.simulated_vision_datareader) - line = [float(entry) for entry in line] - else: - self.simulated_data_entry_index += 1 - if self.simulated_data_entry_index > self.simulated_data_entry_max: - self.logger.info("Simulating vision from memory, starting over, %s entries.", self.simulated_data_entry_max + 1) - self.simulated_data_entry_index = 0 - line = self.simulated_vision_data[self.simulated_data_entry_index] - self.write_visual_input_to_datasources(line, len_x, len_y) - - def write_visual_input_to_datasources(self, sensor_values, len_x, len_y): - """ - Write computed fovea sensor values to the respective datasources fov__*_*. - """ - for x in range(len_x): - for y in range(len_y): - name = 'fov__%02d_%02d' % (x, y) - self.datasources[name] = sensor_values[(len_y * x) + y] - - def normalize_sensor_values(self, patch): - """ - Normalize sensor values to zero mean and 3 standard deviation. - TODO: make doc correct and precise. - """ - # normalize block type values - # subtract the sample mean from each of its pixels - mean = float(sum(patch)) / len(patch) - patch_avg = [x - mean for x in patch] # TODO: throws error in ipython - why not here !? - - # truncate to +/- 3 standard deviations and scale to -1 and +1 - - var = [x ** 2.0 for x in patch_avg] - std = (sum(var) / len(var)) ** 0.5 # ASSUMPTION: all values of x are equally likely - pstd = 3.0 * std - # if block types are all the same number, eg. -1, std will be 0, therefore - if pstd == 0.0: - patch_std = [0.0 for x in patch_avg] - else: - patch_std = [max(min(x, pstd), -pstd) / pstd for x in patch_avg] - - # scale from [-1,+1] to [0.1,0.9] and write values to sensors - patch_resc = [(1.0 + x) * 0.4 + 0.1 for x in patch_std] - return patch_resc - - def collect_visual_data(self): - """ Collect the visual data for the current fovea position - Resets the data if fovea-position is at 0/0 - """ - # if it's the top-left fovea actor, reset the visual field by emptying the buffer - # ( background: this method only works with scanning for now; scanning starts at - # fov_act__00_00; so if that's the current fovea actor, it's time for a new plot ) - if self.fovea_actor == 'fov_act__00_00': - self.visual_field = {} - - # if values for this position in the grid exist already, return - if self.fovea_actor in self.visual_field: - return - - keys = sorted(list(self.datasources.keys())) - activations = [self.datasources[key] for key in keys if key.startswith('fov__')] - - self.visual_field[self.fovea_actor] = activations - - def plot_visual_field(self): - """ - Visualize the entire visual field of the agent at a given position. - - Works only in combination with scanning for now because the plot is - generated only if all tiling_x times tiling_y patches are filled with - values starting from fov_act__00_00. - - TODO: refactor code such that a plot is always generated right before - locomotion with the patches that happened to have been sampled. - """ - - from micropsi_core.nodenet import vizapi - import os - import numpy as np - - # once every tile has been filled with content, plot the actual image - if len(set(self.visual_field.keys())) == (self.tiling_x * self.tiling_y): - - i = 0 - while True: - - filename_png = os.path.join( - os.path.dirname(os.path.realpath(__file__)), - "%s_%d.png" % (self.current_loco_node['name'], i)) - # ?? - if not os.path.exists(filename_png): - break - i += 1 - - # sort keys to get them into the correct order, cf. names are given - # left to right, top to bottom - sorted_keys = list(self.visual_field.keys()) - sorted_keys.sort() - - # collect values - A = np.zeros((len(sorted_keys), len([k for k in self.datasources.keys() if k.startswith('fov__')]))) - for i, key in enumerate(sorted_keys): - A[i, :] = np.array(self.visual_field[key]) - - (r, c) = A.shape - image = vizapi.NodenetPlot(plotsize=(7, 3)) - A = A.reshape(3, 7, int(np.sqrt(c)), int(np.sqrt(c))) - image.add_4d_matrix_plot(A, hspace=0, wspace=0, vmin=A.min(), vmax=A.max()) - - image.save_to_file(filename_png, transparent=True, dpi=300) - - def frange(self, start, end, step): - """ - Range for floats. - """ - while start < end: - yield start - start += step diff --git a/micropsi_core/world/minecraft/spockplugin.py b/micropsi_core/world/minecraft/spockplugin.py deleted file mode 100644 index dff1aee5..00000000 --- a/micropsi_core/world/minecraft/spockplugin.py +++ /dev/null @@ -1,174 +0,0 @@ -import logging -from spock.mcmap import smpmap -from spock.mcp import mcdata, mcpacket -from spock.mcp.mcpacket import Packet -from spock.utils import pl_announce - - -STANCE_ADDITION = 1.620 -STEP_LENGTH = 1.0 -JUMPING_MAGIC_NUMBER = 0 # 2 used to work - - -@pl_announce('Micropsi') -class MicropsiPlugin(object): - - def __init__(self, ploader, settings): - - # register required plugins - self.net = ploader.requires('Net') - self.event = ploader.requires('Event') - self.world = ploader.requires('World') - self.clientinfo = ploader.requires('ClientInfo') - self.threadpool = ploader.requires('ThreadPool') - - self.inventory = [] - self.quickslots = [] - - self.event.reg_event_handler( - (3, 0, 48), - self.update_inventory - ) - - self.worldadapter = None - - # make references between micropsi world and MicropsiPlugin - self.micropsi_world = settings['micropsi_world'] - self.micropsi_world.spockplugin = self - - def chat(self, message): - # else push chat message - self.net.push(Packet(ident='PLAY>Chat Message', data={'message': message})) - - def is_connected(self): - return self.net.connected and self.net.proto_state - - def dispatchMovement(self, move_x, move_z): - target_coords = self.get_int_coordinates() - if move_x: - target_coords['x'] += 1 - elif move_z: - target_coords['z'] += 1 - - ground_offset = 2 # assume impossible - y = target_coords['y'] - 1 # current block agent is standing on - - # check if the next step is possible: nothing in the way, height diff <= 1 - if self.get_block_type(target_coords['x'], y + 2, target_coords['z']) > 0: - ground_offset = 2 - elif self.get_block_type(target_coords['x'], y + 1, target_coords['z']) > 0 and \ - self.get_block_type(target_coords['x'], y + 3, target_coords['z']) <= 0: - ground_offset = 1 - elif self.get_block_type(target_coords['x'], y, target_coords['z']) > 0: - ground_offset = 0 - elif self.get_block_type(target_coords['x'], y - 1, target_coords['z']) > 0: - ground_offset = -1 - - if ground_offset < 2: - self.clientinfo.position['x'] = target_coords['x'] + .5 - self.clientinfo.position['y'] = target_coords['y'] + ground_offset - self.clientinfo.position['stance'] = target_coords['y'] + ground_offset + STANCE_ADDITION - self.clientinfo.position['z'] = target_coords['z'] + .5 - self.clientinfo.position['on_ground'] = True - - def get_block_type(self, x, y, z): - """ - Get the block type of a particular voxel. - """ - x, y, z = int(x), int(y), int(z) - x, rx = divmod(x, 16) - y, ry = divmod(y, 16) - z, rz = divmod(z, 16) - - if y > 0x0F: - return -1 # was 0 - try: - column = self.world.columns[(x, z)] - chunk = column.chunks[y] - except KeyError: - return -1 - - if chunk is None: - return -1 # was 0 - return chunk.block_data.get(rx, ry, rz) >> 4 - - def get_biome_info(self, pos=None): - from spock.mcmap.mapdata import biomes - if pos is None: - pos = self.get_int_coordinates() - key = (pos['x'] // 16, pos['z'] // 16) - columns = self.world.columns - if key not in columns: - return None - current_column = columns[key] - biome_id = current_column.biome.get(pos['x'] % 16, pos['z'] % 16) - if biome_id >= 0: - return biomes[biome_id] - else: - return None - - def get_temperature(self, pos=None): - if pos is None: - pos = self.get_int_coordinates() - biome = self.get_biome_info(pos=pos) - if biome: - temp = biome['temperature'] - if pos['y'] > 64: - temp -= (0.00166667 * (pos['y'] - 64)) - return temp - else: - return None - - def eat(self): - """ Attempts to eat the held item. Assumes held item implements eatable """ - self.worldadapter.logger.debug('eating a bread') - data = { - 'location': self.get_int_coordinates(), - 'direction': -1, - 'held_item': { - 'id': 297, - 'amount': 1, - 'damage': 0 - }, - 'cur_pos_x': -1, - 'cur_pos_y': -1, - 'cur_pos_z': -1 - } - self.net.push(Packet(ident='PLAY>Player Block Placement', data=data)) - - def give_item(self, item, amount=1): - message = "/item %s %d" % (str(item), amount) - self.net.push(Packet(ident='PLAY>Chat Message', data={'message': message})) - - def update_inventory(self, event, packet): - # 0 = crafting output - # 1-4 = crafting ingredients - # 5-8 = wearables from helm to boot - # 9-35 = inventory by rows - # 36-44 = quickslots - self.inventory = packet.data['slots'] - self.quickslots = packet.data['slots'][36:45] - - def count_inventory_item(self, item): - count = 0 - for slot in self.inventory: - if slot and slot['id'] == item: - count += slot['amount'] - return count - - def change_held_item(self, target_slot): - """ Changes the held item to a quick inventory slot """ - self.net.push(Packet(ident='PLAY>Held Item Change', data={'slot': target_slot})) - - def move(self, position=None): - if not (self.net.connected and self.net.proto_state == mcdata.PLAY_STATE): - return - # writes new data to clientinfo which is pulled and pushed to Minecraft by ClientInfoPlugin - self.clientinfo.position = position - - def get_int_coordinates(self): - return { - 'x': int(self.clientinfo.position['x']), - 'y': int(self.clientinfo.position['y']), - 'z': int(self.clientinfo.position['z']) - } diff --git a/micropsi_core/world/minecraft/structs.py b/micropsi_core/world/minecraft/structs.py deleted file mode 100644 index 1f2aa4a8..00000000 --- a/micropsi_core/world/minecraft/structs.py +++ /dev/null @@ -1,401 +0,0 @@ -block_names = { - "-1": "Emptiness", - "0": "Air", - "1": "Stone", - "2": "Grass", - "3": "Dirt", - "4": "Cobblestone", - "5": "Oak_Wood_Planks", - "6": "Sapling", - "7": "Bedrock", - "8": "Water", - "9": "Water", - "10": "Lava", - "11": "Lava", - "12": "Sand", - "13": "Gravel", - "14": "Gold_Ore", - "15": "Iron_Ore", - "16": "Coal_Ore", - "17": "Wood", - "18": "Leaves", - "19": "Sponge", - "20": "Glass", - "21": "Lapis_Lazuli_Ore", - "22": "Lapis_Lazuli_(Block)", - "23": "Dispenser", - "24": "Sandstone", - "25": "Note_Block", - "26": "Bed", - "27": "Powered_Rail_Off", - "28": "Detector_Rail", - "29": "Sticky_Piston", - "30": "WebBlock", - "31": "Tall_Grass", - "32": "Dead_Bush", - "33": "Piston", - "34": "Block_34", - "35": "White_Wool", - "37": "Dandelion", - "38": "Poppy", - "39": "Brown_Mushroom", - "40": "Red_Mushroom", - "41": "Gold_(Block)", - "42": "Iron_(Block)", - "43": "Double_Stone_Slab", - "44": "Stone_Slab", - "45": "Brick_(Block)", - "46": "TNT", - "47": "Bookshelf", - "48": "Moss_Stone", - "49": "Obsidian", - "50": "Torch", - "51": "Fire", - "52": "Monster_Spawner", - "53": "Wooden_Stairs", - "54": "Chest", - "55": "Redstone_(Wire,Inventory)", - "56": "Diamond_Ore", - "57": "Diamond_(Block)", - "58": "Crafting_Table", - "59": "Crops", - "60": "Farmland", - "61": "Furnace", - "62": "Furnace_(Active)", - "63": "Sign", - "64": "Wooden_Door", - "65": "Ladders", - "66": "Rails", - "67": "Cobblestone_Stairs", - "68": "Wall_Sign", - "69": "Lever", - "70": "Stone_Pressure_Plate", - "71": "Iron_Door", - "72": "Wooden_Pressure_Plate", - "73": "Redstone_Ore", - "74": "Redstone_Ore", - "75": "Redstone_(Torch,_Inactive)", - "76": "Redstone_(Torch,_Active)", - "77": "Stone_Button", - "78": "Snow", - "79": "Ice", - "80": "Snow_(Block)", - "81": "Cactus", - "82": "Clay_Block", - "83": "Sugar_Canes", - "84": "Jukebox", - "85": "Fence", - "86": "Pumpkin", - "87": "Netherrack", - "88": "Soul_Sand", - "89": "Glowstone_(Block)", - "90": "Portal", - "91": "Jack_o'Lantern", - "92": "Cake", - "93": "Redstone_(Repeater,_Inactive)", - "94": "Redstone_(Repeater,_Active)", - "95": "White_Stained_Glass", - "96": "Trapdoor", - "97": "Stone", - "98": "Stone_Brick", - "99": "BrownMushroomCap", - "100": "RedMushroomCap", - "101": "Iron_Bars", - "102": "Glass_Pane", - "103": "Melon_(Block)", - "104": "Seed_Stem", - "105": "Seed_Stem", - "106": "Vine", - "107": "Fence_Gate_(Closed)", - "108": "Brick_Stairs", - "109": "Stone_Brick_Stairs", - "110": "Mycelium", - "111": "Lily_Pad", - "112": "Nether_Brick", - "113": "Nether_Brick_Fence", - "114": "Nether_Brick_Stairs", - "115": "Nether_Wart", - "116": "Enchantment_Table", - "117": "Brewing_Stand", - "118": "Cauldron", - "119": "End_Portal", - "120": "End_Portal_Frame", - "121": "End_Stone", - "122": "Dragon_Egg", - "123": "Redstone_Lamp", - "124": "Redstone_Lamp_(Active)", - "125": "Oak_Wood_Planks", - "126": "Oak-Wood_Slab", - "127": "Cocoa_Plant", - "128": "Sandstone_Stairs", - "129": "Emerald_Ore", - "130": "Ender_Chest", - "131": "Tripwire_Hook", - "132": "Tripwire", - "133": "Block_of_Emerald", - "134": "Spruce_Wood_Stairs", - "135": "Birch_Wood_Stairs", - "136": "Jungle_Wood_Stairs", - "137": "Command_Block", - "138": "Beacon_Block", - "139": "Cobblestone_Wall", - "140": "Flower_Pot", - "141": "Carrot_(Block)", - "142": "Potatoes_(Block)", - "143": "Wooden_Button", - "144": "Skeleton_Skull", - "145": "Anvil", - "146": "Trapped_Chest", - "147": "Weighted_Pressure_Plate_(Light)", - "148": "Weighted_Pressure_Plate_(Heavy)", - "149": "Redstone_Comparator_(inactive)", - "150": "Redstone_Comparator_(active)", - "151": "Daylight_Sensor", - "152": "Block_of_Redstone", - "153": "Nether_Quartz_Ore", - "154": "Hopper", - "155": "Block_of_Quartz", - "156": "Quartz_Stairs", - "157": "Activator_Rail", - "158": "Dropper", - "159": "White_Stained_Clay", - "160": "White_Stained_Glass_Pane", - "161": "Leaves", - "162": "Acacia_Wood", - "163": "Acacia_Wood_Stairs", - "164": "Dark_Oak_Wood_Stairs", - "165": "Slime_Block", - "166": "Barrier", - "167": "Iron_Trapdoor", - "168": "Prismarine", - "169": "Sea_Lantern", - "170": "Hay_Block", - "171": "White_Carpet", - "172": "Hardened_Clay", - "173": "Block_of_Coal", - "174": "Packed_Ice", - "175": "Sunflower", - "176": "Free-standing_Banner_(Small)", - "177": "Wall-mounted_Banner_(Small)", - "178": "Inverted_Daylight_Sensor", - "179": "Red_Sandstone", - "180": "Red_Sandstone_Stairs", - "181": "Red_Sandstone", - "182": "Red_Sandstone_Slab", - "183": "Spruce_Fence_Gate_(Closed)", - "184": "Birch_Fence_Gate_(Closed)", - "185": "Jungle_Fence_Gate_(Closed)", - "186": "Dark_Oak_Fence_Gate_(Closed)", - "187": "Acacia_Fence_Gate_(Closed)", - "188": "Spruce_Fence", - "189": "Birch_Fence", - "190": "Jungle_Fence", - "191": "Dark_Oak_Fence", - "192": "Acacia_Fence", - "193": "Spruce_Door", - "194": "Birch_Door", - "195": "Jungle_Door", - "196": "Acacia_Door", - "197": "Dark_Oak_Door", -} - -block_colors = { - "-1": (250, 250, 250), # "Emptiness", - "0": (250, 250, 250), # (250, 250, 250), # "Air", - "1": (62, 62, 62), # "Stone", - "2": (90, 140, 80), # (), # "Grass", - "3": (150, 105, 75), # "Dirt", - "4": (106, 106, 106), # "Cobblestone", - "5": (145, 120, 75), # "Oak_Wood_Planks", - "6": (75, 205, 37), # "Sapling", - "7": (39, 39, 39), # "Bedrock", - "8": (74, 90, 206), # "Water", - "9": (74, 90, 206), # "Water", - "10": (194, 70, 11), # "Lava", - "11": (194, 70, 11), # "Lava", - "12": (206, 197, 152), # "Sand", - "13": (108, 108, 108), # "Gravel", - "14": (121, 121, 121), # "Gold_Ore", - "15": (118, 118, 118), # "Iron_Ore", - "16": (75, 75, 75), # "Coal_Ore", - "17": (60, 47, 29), # "Wood", - "18": (53, 132, 19), # "Leaves", - "19": (190, 191, 80), # "Sponge", - "20": (151, 179, 183), # "Glass", - "21": (65, 92, 146), # "Lapis_Lazuli_Ore", - "22": (31, 64, 141), # "Lapis_Lazuli_(Block)", - "23": (97, 97, 97), # "Dispenser", - "24": (212, 205, 158), # "Sandstone", - "25": (), # "Note_Block", - "26": (129, 19, 19), # "Bed", - "27": (), # "Powered_Rail_Off", - "28": (), # "Detector_Rail", - "29": (), # "Sticky_Piston", - "30": (), # "WebBlock", - "31": (59, 90, 51), # "Tall_Grass", - "32": (148, 100, 40), # "Dead_Bush", - "33": (), # "Piston", - "34": (), # "Block_34", - "35": (225, 225, 225), # "White_Wool", - "37": (234, 242, 6), # "Dandelion", - "38": (), # "Poppy", - "39": (114, 86, 67), # "Brown_Mushroom", - "40": (), # "Red_Mushroom", - "41": (), # "Gold_(Block)", - "42": (212, 212, 212), # "Iron_(Block)", - "43": (149, 149, 149), # "Double_Stone_Slab", - "44": (150, 150, 150), # "Stone_Slab", - "45": (), # "Brick_(Block)", - "46": (), # "TNT", - "47": (65, 53, 32), # "Bookshelf", - "48": (), # "Moss_Stone", - "49": (), # "Obsidian", - "50": (239, 167, 3), # "Torch", - "51": (190, 90, 4), # "Fire", - "52": (), # "Monster_Spawner", - "53": (142, 115, 71), # "Wooden_Stairs", - "54": (155, 105, 32), # "Chest", - "55": (), # "Redstone_(Wire,Inventory)", - "56": (), # "Diamond_Ore", - "57": (), # "Diamond_(Block)", - "58": (60, 49, 29), # "Crafting_Table", - "59": (138, 119, 18), # "Crops", - "60": (68, 47, 32), # "Farmland", - "61": (39, 39, 39), # "Furnace", - "62": (), # "Furnace_(Active)", - "63": (), # "Sign", - "64": (91, 72, 35), # "Wooden_Door", - "65": (145, 114, 67), # "Ladders", - "66": (), # "Rails", - "67": (90, 90, 90), # "Cobblestone_Stairs", - "68": (104, 85, 51), # "Wall_Sign", - "69": (), # "Lever", - "70": (), # "Stone_Pressure_Plate", - "71": (), # "Iron_Door", - "72": (), # "Wooden_Pressure_Plate", - "73": (), # "Redstone_Ore", - "74": (), # "Redstone_Ore", - "75": (), # "Redstone_(Torch,_Inactive)", - "76": (), # "Redstone_(Torch,_Active)", - "77": (), # "Stone_Button", - "78": (219, 235, 235), # "Snow", - "79": (125, 154, 204), # "Ice", - "80": (), # "Snow_(Block)", - "81": (10, 80, 20), # "Cactus", - "82": (), # "Clay_Block", - "83": (137, 187, 88), # "Sugar_Canes", - "84": (), # "Jukebox", - "85": (66, 53, 32), # "Fence", - "86": (), # "Pumpkin", - "87": (), # "Netherrack", - "88": (), # "Soul_Sand", - "89": (192, 148, 78), # "Glowstone_(Block)", - "90": (), # "Portal", - "91": (), # "Jack_o'Lantern", - "92": (), # "Cake", - "93": (), # "Redstone_(Repeater,_Inactive)", - "94": (), # "Redstone_(Repeater,_Active)", - "95": (), # "White_Stained_Glass", - "96": (), # "Trapdoor", - "97": (), # "Stone", - "98": (78, 78, 78), # "Stone_Brick", - "99": (), # "BrownMushroomCap", - "100": (), # "RedMushroomCap", - "101": (74, 74, 74), # "Iron_Bars", - "102": (201, 228, 233), # "Glass_Pane", - "103": (), # "Melon_(Block)", - "104": (), # "Seed_Stem", - "105": (), # "Seed_Stem", - "106": (44, 111, 15), # "Vine", - "107": (51, 43, 27), # "Fence_Gate_(Closed)", - "108": (), # "Brick_Stairs", - "109": (82, 82, 82), # "Stone_Brick_Stairs", - "110": (), # "Mycelium", - "111": (), # "Lily_Pad", - "112": (), # "Nether_Brick", - "113": (), # "Nether_Brick_Fence", - "114": (), # "Nether_Brick_Stairs", - "115": (), # "Nether_Wart", - "116": (82, 210, 188), # "Enchantment_Table", - "117": (), # "Brewing_Stand", - "118": (), # "Cauldron", - "119": (), # "End_Portal", - "120": (), # "End_Portal_Frame", - "121": (), # "End_Stone", - "122": (), # "Dragon_Egg", - "123": (), # "Redstone_Lamp", - "124": (), # "Redstone_Lamp_(Active)", - "125": (), # "Oak_Wood_Planks", - "126": (), # "Oak-Wood_Slab", - "127": (), # "Cocoa_Plant", - "128": (199, 193, 142), # "Sandstone_Stairs", - "129": (), # "Emerald_Ore", - "130": (), # "Ender_Chest", - "131": (), # "Tripwire_Hook", - "132": (), # "Tripwire", - "133": (), # "Block_of_Emerald", - "134": (), # "Spruce_Wood_Stairs", - "135": (), # "Birch_Wood_Stairs", - "136": (), # "Jungle_Wood_Stairs", - "137": (), # "Command_Block", - "138": (), # "Beacon_Block", - "139": (), # "Cobblestone_Wall", - "140": (84, 45, 35), # "Flower_Pot", - "141": (), # "Carrot_(Block)", - "142": (3, 224, 26), # "Potatoes_(Block)", - "143": (), # "Wooden_Button", - "144": (), # "Skeleton_Skull", - "145": (26, 26, 26), # "Anvil", - "146": (), # "Trapped_Chest", - "147": (), # "Weighted_Pressure_Plate_(Light)", - "148": (), # "Weighted_Pressure_Plate_(Heavy)", - "149": (), # "Redstone_Comparator_(inactive)", - "150": (), # "Redstone_Comparator_(active)", - "151": (), # "Daylight_Sensor", - "152": (), # "Block_of_Redstone", - "153": (), # "Nether_Quartz_Ore", - "154": (), # "Hopper", - "155": (), # "Block_of_Quartz", - "156": (), # "Quartz_Stairs", - "157": (), # "Activator_Rail", - "158": (), # "Dropper", - "159": (), # "White_Stained_Clay", - "160": (), # "White_Stained_Glass_Pane", - "161": (72, 145, 41), # "Leaves", - "162": (), # "Acacia_Wood", - "163": (164, 87, 47), # "Acacia_Wood_Stairs", - "164": (), # "Dark_Oak_Wood_Stairs", - "165": (), # "Slime_Block", - "166": (), # "Barrier", - "167": (), # "Iron_Trapdoor", - "168": (), # "Prismarine", - "169": (), # "Sea_Lantern", - "170": (), # "Hay_Block", - "171": (), # "White_Carpet", - "172": (), # "Hardened_Clay", - "173": (), # "Block_of_Coal", - "174": (), # "Packed_Ice", - "175": (), # "Sunflower", - "176": (), # "Free-standing_Banner_(Small)", - "177": (), # "Wall-mounted_Banner_(Small)", - "178": (), # "Inverted_Daylight_Sensor", - "179": (), # "Red_Sandstone", - "180": (), # "Red_Sandstone_Stairs", - "181": (), # "Red_Sandstone", - "182": (), # "Red_Sandstone_Slab", - "183": (), # "Spruce_Fence_Gate_(Closed)", - "184": (), # "Birch_Fence_Gate_(Closed)", - "185": (), # "Jungle_Fence_Gate_(Closed)", - "186": (), # "Dark_Oak_Fence_Gate_(Closed)", - "187": (), # "Acacia_Fence_Gate_(Closed)", - "188": (), # "Spruce_Fence", - "189": (), # "Birch_Fence", - "190": (), # "Jungle_Fence", - "191": (), # "Dark_Oak_Fence", - "192": (), # "Acacia_Fence", - "193": (), # "Spruce_Door", - "194": (), # "Birch_Door", - "195": (), # "Jungle_Door", - "196": (), # "Acacia_Door", - "197": (), # "Dark_Oak_Door", -} diff --git a/micropsi_core/world/timeseries/timeseries.py b/micropsi_core/world/timeseries/timeseries.py deleted file mode 100644 index f1fc158a..00000000 --- a/micropsi_core/world/timeseries/timeseries.py +++ /dev/null @@ -1,198 +0,0 @@ -""" -Worlds and bodies for agents whose habitats are ordered sequences of vectors. -""" -import os -from configuration import config as cfg -from micropsi_core.world.world import World -from micropsi_core.world.worldadapter import WorldAdapter, ArrayWorldAdapter -import numpy as np -from datetime import datetime - - -class TimeSeries(World): - """ A world that cycles through a fixed time series loaded from a file. - The file should be a numpy archive with the following fields: - 'data': numpy array of shape (nr of ids) x (nr of timestamps) - 'timestamps', a list of timestamps - the legend for the data's second axis - 'ids': a list of IDs - the legend for data's first axis. - """ - supported_worldadapters = ['TimeSeriesRunner'] - - assets = { - 'js': "timeseries/timeseries.js", - 'template': 'timeseries/timeseries.tpl' - } - - def __init__(self, filename, world_type="TimeSeries", name="", owner="", engine=None, uid=None, version=1, config={}): - World.__init__(self, filename, world_type=world_type, name=name, owner=owner, uid=uid, version=version, config=config) - - self.data['assets'] = self.assets - - filename = config.get('time_series_data_file', "timeseries.npz") - if os.path.isabs(filename): - path = filename - else: - path = os.path.join(cfg['micropsi2']['data_directory'], filename) - self.logger.info("loading timeseries from %s for world %s" % (path, uid)) - - self.realtime_per_entry = int(config['realtime_per_entry']) - self.last_realtime_step = datetime.utcnow().timestamp() * 1000 - - try: - with np.load(path) as f: - self.timeseries = f['data'] - self.ids = f['ids'] - self.timestamps = f['timestamps'] - except IOError as error: - self.logger.error("Could not load data file %s, error was: %s" % (path, str(error))) - self.ids = [0] - self.timeseries[[0, 0, 0]] - self.timestamps = [0] - self.len_ts = 1 - return - - # todo use the new configurable world options. - dummydata = config['dummy_data'] == "True" - z_transform = config['z_transform'] == "True" - clip_and_scale = config['clip_and_scale'] == "True" - sigmoid = config['sigmoid'] == "True" - self.shuffle = config['shuffle'] == "True" - - if clip_and_scale and sigmoid: - self.logger.warn("clip_and_scale and sigmoid cannot both be configured, choosing sigmoid") - clip_and_scale = False - - def sigm(X): - """ sigmoid that avoids float overflows for very small inputs. - expects a numpy float array. - """ - cutoff = np.log(np.finfo(X.dtype).max) - 1 - X[np.nan_to_num(X) <= -cutoff] = -cutoff - return 1. / (1. + np.exp(-X)) - - if (z_transform or clip_and_scale or sigmoid) and not dummydata: - data_z = np.empty_like(self.timeseries) - data_z[:] = np.nan - pstds = [] - for i, row in enumerate(self.timeseries): - if not np.all(np.isnan(row)): - std = np.sqrt(np.nanvar(row)) - if std > 0: - if not clip_and_scale: - row_z = (row - np.nanmean(row)) / std - if clip_and_scale: - row_z = row - np.nanmean(row) - pstd = std * 4 - row_z[np.nan_to_num(row_z) > pstd] = pstd - row_z[np.nan_to_num(row_z) < -pstd] = -pstd - row_z = ((row_z / pstd) + 1) * 0.5 - data_z[i,:] = row_z - self.timeseries = data_z if not sigmoid else sigm(data_z) - - if dummydata: - self.logger.warn("! Using dummy data") - n_ids = self.timeseries.shape[0] - self.timeseries = np.tile(np.random.rand(n_ids,1),(1,10)) - - self.len_ts = self.timeseries.shape[1] - - # todo: option to use only a subset of the data (e.g. for training/test) - - def step(self): - now = datetime.utcnow().timestamp() * 1000 - if now - self.realtime_per_entry > self.last_realtime_step: - self.current_step += 1 - for uid in self.agents: - with self.agents[uid].datasource_lock: - self.agents[uid].update() - self.last_realtime_step = now - - @property - def state(self): - t = (self.current_step - 1) % self.len_ts - if self.shuffle: - if t == 0: - idxs = np.arange(self.len_ts) - self.permutation = np.random.permutation(idxs) - t = self.permutation[t] - return self.timeseries[:, t] - - @staticmethod - def get_config_options(): - """ Returns a list of configuration-options for this world. - Expected format: - [{ - 'name': 'param1', - 'description': 'this is just an example', - 'options': ['value1', 'value2'], - 'default': 'value1' - }] - description, options and default are optional settings - """ - return [ - {'name': 'time_series_data_file', - 'description': 'The data file with the time series', - 'default': 'timeseries.npz'}, - {'name': 'shuffle', - 'description': 'Randomize order of presentation', - 'default': 'True', - 'options': ["True", "False"]}, - {'name': 'z_transform', - 'description': 'For each ID, center on mean & normalize by standard deviation', - 'default': 'False', - 'options': ["True", "False"]}, - {'name': 'clip_and_scale', - 'description': 'For each ID, center on mean & clip to 4 standard deviations and rescale to [0,1].', - 'default': 'False', - 'options': ["True", "False"]}, - {'name': 'sigmoid', - 'description': 'For each ID, z-transform and apply a sigmoid activation function', - 'default': 'False', - 'options': ["True", "False"]}, - {'name': 'realtime_per_entry', - 'description': 'Present each pattern from the data for this number of milliseconds', - 'default': '0'}, - {'name': 'dummy_data', - 'description': 'Present the same random pattern in each step (instead of the actual time series data)', - 'default': 'False', - 'options': ["True", "False"]} - ] - - def set_user_data(self, data): - """ Allow the user to set the step of this world""" - if 'step' in data: - self.last_realtime_step = datetime.utcnow().timestamp() * 1000 - self.current_step = data['step'] - for uid in self.agents: - with self.agents[uid].datasource_lock: - self.agents[uid].update() - - def get_world_view(self, step): - return { - 'first_timestamp': self.timestamps[0].isoformat(), - 'last_timestamp': self.timestamps[-1].isoformat(), - 'total_timestamps': len(self.timestamps), - 'current_timestamp': self.timestamps[self.current_step].isoformat(), - 'current_step': self.current_step, - } - - -class TimeSeriesRunner(ArrayWorldAdapter): - - def __init__(self, world, uid=None, **data): - super().__init__(world, uid, **data) - - self.available_datatargets = [] - self.available_datasources = ["update"] - - for idx, ID in enumerate(self.world.ids): - self.available_datasources.append(str(ID)) - - def get_available_datasources(self): - return self.available_datasources - - def get_available_datatargets(self): - return self.available_datatargets - - def update_data_sources_and_targets(self): - self.datasource_values = self.world.state \ No newline at end of file diff --git a/micropsi_core/world/world.py b/micropsi_core/world/world.py index 39503e3d..cee4a2c4 100644 --- a/micropsi_core/world/world.py +++ b/micropsi_core/world/world.py @@ -19,7 +19,7 @@ import logging -WORLD_VERSION = 1.0 +WORLD_VERSION = 1 class World(object): @@ -58,15 +58,11 @@ def current_step(self, current_step): self.data['current_step'] = current_step @property - def is_active(self): - return self.data.get("is_active", False) + def config(self): + return self.data['config'] - @is_active.setter - def is_active(self, is_active): - self.data['is_active'] = is_active - - @staticmethod - def get_config_options(): + @classmethod + def get_config_options(cls): """ Returns a list of configuration-options for this world. Expected format: [{ @@ -79,7 +75,16 @@ def get_config_options(): """ return [] - supported_worldadapters = ['Default'] + @classmethod + def get_supported_worldadapters(cls): + folder = cls.__module__.split('.') + folder.pop() + folder = '.'.join(folder) + return {wacls.__name__: wacls for wacls in tools.itersubclasses(worldadapter.WorldAdapter, folder=folder) if wacls.__name__ in cls.supported_worldadapters} + + supported_worldadapters = [] + supported_worldobjects = [] + is_realtime = False def __init__(self, filename, world_type="", name="", owner="", uid=None, engine=None, version=WORLD_VERSION, config={}): """Create a new MicroPsi world environment. @@ -91,27 +96,23 @@ def __init__(self, filename, world_type="", name="", owner="", uid=None, engine= uid (optional): unique handle of the world; if none is given, it will be generated """ - self.logger = logging.getLogger('world_logger') + self.logger = logging.getLogger('world') # persistent data self.data = { - "version": WORLD_VERSION, # used to check compatibility of the world data + "version": version, # used to check compatibility of the world data "objects": {}, "agents": {}, "current_step": 0, "config": config } - + self.is_active = False folder = self.__module__.split('.') folder.pop() folder = '.'.join(folder) - self.supported_worldadapters = { cls.__name__:cls for cls in tools.itersubclasses(worldadapter.WorldAdapter, folder=folder) if cls.__name__ in self.supported_worldadapters } - self.supported_worldobjects = { cls.__name__:cls for cls in tools.itersubclasses(worldobject.WorldObject, folder=folder) - if cls.__name__ not in self.supported_worldadapters} - # freaky hack. - self.supported_worldobjects.pop('WorldAdapter', None) - self.supported_worldobjects['Default'] = worldobject.WorldObject + self.supported_worldadapters = {name: cls for name, cls in micropsi_core.runtime.worldadapter_classes.items() if name in self.supported_worldadapters} + self.supported_worldobjects = {name: cls for name, cls in micropsi_core.runtime.worldobject_classes.items() if name in self.supported_worldobjects} self.uid = uid or generate_uid() self.owner = owner @@ -124,52 +125,52 @@ def __init__(self, filename, world_type="", name="", owner="", uid=None, engine= self.load() - def load(self, string=None): - """Load the world state from a file - - Arguments: - string (optional): if given, the world state is taken from the string instead. - """ + def load(self): + """ Load the world state from persistance """ # try to access file - if string: - try: - self.data.update(json.loads(string)) - except ValueError: - self.logger.warn("Could not read world data from string") - return False - else: - try: - with open(self.filename) as file: - self.data.update(json.load(file)) - except ValueError: - self.logger.warn("Could not read world data") - return False - except IOError: - self.logger.warn("Could not open world file: " + self.filename) + try: + with open(self.filename, encoding="utf-8") as file: + self.data.update(json.load(file)) + except ValueError: + self.logger.warning("Could not read world data") + return False + except IOError: + self.logger.warning("Could not open world file: " + self.filename) self.data['world_type'] = self.__class__.__name__ if "version" in self.data and self.data["version"] == WORLD_VERSION: self.initialize_world() return True else: - self.logger.warn("Wrong version of the world data") + self.logger.warning("Wrong version of the world data") return False + def simulation_started(self): + self.is_active = True + + def simulation_stopped(self): + self.is_active = False + def get_available_worldadapters(self): """ return the list of instantiated worldadapters """ return self.supported_worldadapters - def initialize_world(self): + def initialize_world(self, data=None): """Called after reading new world data. Parses the nodenet data and set up the non-persistent data structures necessary for efficient computation of the world """ - for uid, object_data in self.data['objects'].copy().items(): + if data is None: + data = self.data + for uid, object_data in data['objects'].copy().items(): if object_data['type'] in self.supported_worldobjects: self.objects[uid] = self.supported_worldobjects[object_data['type']](self, **object_data) else: - self.logger.warn('Worldobject of type %s not supported anymore. Deleting object of this type.' % object_data['type']) - del self.data['objects'][uid] + self.logger.warning('Worldobject of type %s not supported anymore. Deleting object of this type.' % object_data['type']) + del data['objects'][uid] + for uid in list(self.data['agents']): + if uid not in micropsi_core.runtime.nodenet_data: + del self.data['agents'][uid] def step(self): """ advance the simluation """ @@ -234,7 +235,7 @@ def get_world_objects(self, type=None): objects[uid] = obj return objects - def register_nodenet(self, worldadapter, nodenet_uid, nodenet_name=None): + def register_nodenet(self, worldadapter, nodenet_uid, nodenet_name=None, config={}): """Attempts to register a nodenet at this world. Returns True, spawned_agent_instance if successful, @@ -248,11 +249,11 @@ def register_nodenet(self, worldadapter, nodenet_uid, nodenet_name=None): world definition itself. """ if nodenet_uid in self.agents: - if self.agents[nodenet_uid].__class__.__name__ == worldadapter: - return True, self.agents[nodenet_uid] - else: + if self.agents[nodenet_uid].__class__.__name__ != worldadapter: return False, "Nodenet agent already exists in this world, but has the wrong type" - return self.spawn_agent(worldadapter, nodenet_uid, nodenet_name=nodenet_name) + elif config == self.agents[nodenet_uid].config: + return True, self.agents[nodenet_uid] + return self.spawn_agent(worldadapter, nodenet_uid, nodenet_name=nodenet_name, config=config) def unregister_nodenet(self, nodenet_uid): """Removes the connection between a nodenet and its incarnation in this world; may remove the corresponding @@ -265,7 +266,7 @@ def unregister_nodenet(self, nodenet_uid): if nodenet_uid in self.data['agents']: del self.data['agents'][nodenet_uid] - def spawn_agent(self, worldadapter_name, nodenet_uid, **options): + def spawn_agent(self, worldadapter_name, nodenet_uid, nodenet_name=None, config={}): """Creates an agent object, Returns True, spawned_agent_instance if successful, @@ -275,8 +276,9 @@ def spawn_agent(self, worldadapter_name, nodenet_uid, **options): self.agents[nodenet_uid] = self.supported_worldadapters[worldadapter_name]( self, uid=nodenet_uid, - name=options.get('nodenet_name', worldadapter_name), - **options) + type=worldadapter_name, + name=nodenet_name or worldadapter_name, + config=config) return True, self.agents[nodenet_uid] else: self.logger.error("World %s does not support Worldadapter %s" % (self.name, worldadapter_name)) @@ -324,38 +326,17 @@ def set_agent_properties(self, uid, position=None, orientation=None, name=None, def set_user_data(self, data): """ Sets some data from the user. Implement this in your worldclass to allow the user to set certain properties of this world""" - pass + pass # pragma: no cover + + def signal_handler(self, *args): + """ stuff to do on sigint, sigabrt, etc""" + pass # pragma: no cover def __del__(self): - """Empty destructor""" + """ Empty destructor """ pass -# imports of individual world types: -try: - from micropsi_core.world.island import island -except ImportError as e: - sys.stdout.write("Could not import island world.\nError: %s \n\n" % e.msg) - -try: - from micropsi_core.world.island.structured_objects import structured_objects -except ImportError as e: - sys.stdout.write("Could not import island world / structured objects.\nError: %s \n\n" % e.msg) - -try: - from micropsi_core.world.minecraft import minecraft -except ImportError as e: - if e.msg == "No module named 'spock'": - # ignore silently - pass - else: - sys.stdout.write("Could not import minecraft world.\nError: %s \n\n" % e.msg) - -try: - from micropsi_core.world.timeseries import timeseries -except ImportError as e: - if e.msg == "No module named 'numpy'": - # ignore silently - pass - else: - sys.stdout.write("Could not import timeseries world.\nError: %s \n\n" % e.msg) +class DefaultWorld(World): + supported_worldadapters = ['Default', 'DefaultArray'] + supported_worldobjects = ['TestObject'] diff --git a/micropsi_core/world/worldadapter.py b/micropsi_core/world/worldadapter.py index 073e56f6..ecb7a9e5 100644 --- a/micropsi_core/world/worldadapter.py +++ b/micropsi_core/world/worldadapter.py @@ -2,7 +2,7 @@ Agent types provide an interface between agents (which are implemented in node nets) and environments, such as the MicroPsi world simulator. -At each agent cycle, the activity of this actor nodes are written to data targets within the agent type, +At each agent cycle, the activity of this actuator nodes are written to data targets within the agent type, and the activity of sensor nodes is determined by the values exposed in its data sources. At each world cycle, the value of the data targets is translated into operations performed upon the world, and the value of the data sources is updated according to sensory data derived from the world. @@ -19,11 +19,56 @@ __author__ = 'joscha' __date__ = '10.05.12' +import logging +import functools +import operator +from collections import OrderedDict from threading import Lock from micropsi_core.world.worldobject import WorldObject from abc import ABCMeta, abstractmethod +class WorldAdapterMixin(object): + + """ Superclass for modular world-adapter extensions that provide + functionality reusable in several worldadapters. See examples in vrep_world.py""" + + @classmethod + def get_config_options(cls): + """ returns an array of parameters that are needed + to configure this mixin """ + return [] + + def __init__(self, world, uid=None, config={}, **kwargs): + super().__init__(world, uid=uid, config=config, **kwargs) + + def initialize(self): + """ Called after a reset of the simulation """ + pass # pragma: no cover + + def reset_simulation_state(self): + """ Called on reset """ + pass # pragma: no cover + + def update_datasources_and_targets(self): + pass # pragma: no cover + + def write_to_world(self): + pass # pragma: no cover + + def read_from_world(self): + pass # pragma: no cover + + def on_simulation_started(self): + pass # pragma: no cover + + def on_simulation_paused(self): + pass # pragma: no cover + + def shutdown(self): + pass # pragma: no cover + + class WorldAdapter(WorldObject, metaclass=ABCMeta): """Transmits data between agent and environment. @@ -31,21 +76,45 @@ class WorldAdapter(WorldObject, metaclass=ABCMeta): takes care of translating between the world and these values at each world cycle. """ - def __init__(self, world, uid=None, **data): + @classmethod + def get_config_options(cls): + return [] + + @property + def generate_flow_modules(self): + return False + + def __init__(self, world, uid=None, config={}, **data): self.datasources = {} self.datatargets = {} + self.flow_datasources = OrderedDict() + self.flow_datatargets = OrderedDict() + self.flow_datatarget_feedbacks = OrderedDict() self.datatarget_feedback = {} self.datasource_lock = Lock() + self.config = config + self.nodenet = None # will be assigned by the nodenet once it's loaded WorldObject.__init__(self, world, category='agents', uid=uid, **data) + self.logger = logging.getLogger('agent.%s' % self.uid) + if data.get('name'): + self.data['name'] = data['name'] + for item in self.__class__.get_config_options(): + if item['name'] not in config: + config[item['name']] = item.get('default') + for key in config: + setattr(self, key, config[key]) def initialize_worldobject(self, data): - for key in self.datasources: - if key in data.get('datasources', {}): - self.datasources[key] = data['datasources'][key] - for key in self.datatargets: - if key in data.get('datatargets', {}): - self.datatargets[key] = data['datatargets'][key] - self.datatarget_feedback[key] = 0 + pass + + def add_datasource(self, name, initial_value=0.0): + """ add a datasource """ + self.datasources[name] = initial_value + + def add_datatarget(self, name, initial_value=0.0): + """ add a datatarget """ + self.datatargets[name] = initial_value + self.datatarget_feedback[name] = 0.0 def get_available_datasources(self): """returns a list of identifiers of the datasources available for this world adapter""" @@ -73,15 +142,20 @@ def set_datatarget_values(self, values): for i, key in enumerate(self.get_available_datatargets()): self.datatargets[key] = values[i] + def add_datatarget_values(self, values): + """allows the agent to add a list of values to the datatargets""" + for i, key in enumerate(self.get_available_datatargets()): + self.datatargets[key] += values[i] + def get_datatarget_feedback_value(self, key): - """get feedback whether the actor-induced action succeeded""" + """get feedback whether the actuator-induced action succeeded""" return self.datatarget_feedback.get(key, 0) def get_datatarget_feedback_values(self): """allows the agent to read all datasource values""" return [float(self.datatarget_feedback[x]) for x in self.get_available_datatargets()] - def set_datatarget_feedback(self, key, value): + def set_datatarget_feedback_value(self, key, value): """set feedback for the given datatarget""" self.datatarget_feedback[key] = value @@ -98,7 +172,7 @@ def reset_datatargets(self): @abstractmethod def update_data_sources_and_targets(self): """must be implemented by concrete world adapters to read datatargets and fill datasources""" - pass + pass # pragma: no cover def is_alive(self): """called by the world to check whether the agent has died and should be removed""" @@ -109,96 +183,258 @@ class Default(WorldAdapter): """ A default Worldadapter, that provides example-datasources and -targets """ - def __init__(self, world, uid=None, **data): - super().__init__(world, uid=uid, **data) - self.datasources = dict((s, 0) for s in ['static_on', 'random', 'static_off']) - self.datatargets = {'echo': 0} - self.datatarget_feedback = {'echo': 0} + @classmethod + def get_config_options(cls): + return [ + {'name': 'foo', + 'description': 'does nothing', + 'default': 'bar'} + ] + + def __init__(self, world, uid=None, config={}, **data): + super().__init__(world, uid=uid, config=config, **data) + for s in ['static_on', 'random', 'static_off']: + self.add_datasource(s, 0) + self.add_datatarget('echo', 0) self.update_data_sources_and_targets() def update_data_sources_and_targets(self): import random - if self.datatargets['echo'] != 0: - self.datatarget_feedback['echo'] = self.datatargets['echo'] + self.datatarget_feedback['echo'] = self.datatargets['echo'] self.datasources['static_on'] = 1 self.datasources['random'] = random.uniform(0, 1) -class ArrayWorldAdapter(WorldAdapter, metaclass=ABCMeta): - """ - The ArrayWorldAdapter base class allows to avoid python dictionaries and loops for transmitting values - to nodenet engines. - Engines that bulk-query values, such as the theano_engine, will be faster. - Numpy arrays can be passed directly into the engine. - """ - def __init__(self, world, uid=None, **data): - WorldAdapter.__init__(self, world, duid=uid) - self.datasource_values = [] - self.datatarget_values = [] - self.datatarget_feedback_values = [] - - def get_datasource_value(self, key): - """allows the agent to read a value from a datasource""" - index = self.get_available_datasources().index(key) - return self.datasource_values[index] +try: + # Only available if numpy is installed + import numpy as np - def get_datasource_values(self): - """allows the agent to read all datasource values""" - return self.datasource_values + # configure dtype for value arrays. + # TODO: Move this and the config in theano_nodenet to one central point + from configuration import config as settings - def add_to_datatarget(self, key, value): - """allows the agent to write a value to a datatarget""" - index = self.get_available_datasources().index(key) - self.datatarget_values[index] += value - - def get_datatarget_feedback_value(self, key): - """get feedback whether the actor-induced action succeeded""" - index = self.get_available_datatargets().index(key) - return self.datatarget_feedback_values[index] - - def get_datatarget_feedback_values(self): - """allows the agent to read all datasource values""" - return self.datatarget_feedback_values - - def set_datatarget_feedback(self, key, value): - """set feedback for the given datatarget""" - index = self.get_available_datatargets().index(key) - self.datatarget_feedback_values[index] = value - - def set_datatarget_values(self, values): - """allows the agent to write a list of value to the datatargets""" - self.datatarget_values = values - - def reset_datatargets(self): - """ resets (zeros) the datatargets """ - pass - - @abstractmethod - def get_available_datasources(self): - """ - must be implemented by the concrete world adapater and return a list of datasource name strings, - in the same order as values returned by get_datasource_values() - """ - pass - - @abstractmethod - def get_available_datatargets(self): + class ArrayWorldAdapter(WorldAdapter, metaclass=ABCMeta): """ - must be implemented by the concrete world adapater and return a list of datatarget name strings, - in the same order as values returned by get_datatarget_feedback_values() + The ArrayWorldAdapter base class allows to avoid python dictionaries and loops for transmitting values + to nodenet engines. + Engines that bulk-query values, such as the theano_engine, will be faster. + Numpy arrays can be passed directly into the engine. """ - pass - @abstractmethod - def update_data_sources_and_targets(self): + @property + def generate_flow_modules(self): + return len(self.flow_datasources) or len(self.flow_datatargets) + + def __init__(self, world, uid=None, **data): + WorldAdapter.__init__(self, world, uid=uid, **data) + + precision = settings['theano']['precision'] + self.floatX = np.float32 + if precision == "64": + self.floatX = np.float64 + + self.datasource_names = [] + self.datatarget_names = [] + self.flow_datasources = OrderedDict() + self.flow_datatargets = OrderedDict() + self.flow_datatarget_feedbacks = OrderedDict() + self.datasource_values = np.zeros(0, dtype=self.floatX) + self.datatarget_values = np.zeros(0, dtype=self.floatX) + self.datatarget_feedback_values = np.zeros(0, dtype=self.floatX) + + def add_datasource(self, name, initial_value=0.): + """ Adds a datasource, and returns the index + where they were added""" + self.datasource_names.append(name) + self.datasource_values = np.concatenate((self.datasource_values, np.asarray([initial_value], dtype=self.floatX))) + return len(self.datasource_names) - 1 + + def add_datatarget(self, name, initial_value=0.): + """ Adds a datatarget, and returns the index + where they were added""" + self.datatarget_names.append(name) + self.datatarget_values = np.concatenate((self.datatarget_values, np.asarray([initial_value], dtype=self.floatX))) + self.datatarget_feedback_values = np.concatenate((self.datatarget_feedback_values, np.asarray([initial_value], dtype=self.floatX))) + return len(self.datatarget_names) - 1 + + def add_flow_datasource(self, name, shape, initial_values=None): + """ Add a high-dimensional datasource for flowmodules.""" + if initial_values is None: + initial_values = np.zeros(shape, dtype=self.floatX) + + self.flow_datasources[name] = initial_values + return self.flow_datasources[name] + + def add_flow_datatarget(self, name, shape, initial_values=None): + """ Add a high-dimensional datatarget for flowmodules""" + if initial_values is None: + initial_values = np.zeros(shape, dtype=self.floatX) + + self.flow_datatargets[name] = initial_values + self.flow_datatarget_feedbacks[name] = np.zeros_like(initial_values) + return self.flow_datatargets[name] + + def get_available_datasources(self): + """Returns a list of all datasource names""" + return self.datasource_names + + def get_available_datatargets(self): + """Returns a list of all datatarget names""" + return self.datatarget_names + + def get_available_flow_datasources(self): + return list(self.flow_datasources.keys()) + + def get_available_flow_datatargets(self): + return list(self.flow_datatargets.keys()) + + def get_datasource_index(self, name): + """Returns the index of the given datasource in the value array""" + return self.datasource_names.index(name) + + def get_datatarget_index(self, name): + """Returns the index of the given datatarget in the value array""" + return self.datatarget_names.index(name) + + def get_datasource_value(self, key): + """allows the agent to read a value from a datasource""" + index = self.get_datasource_index(key) + return self.datasource_values[index] + + def get_datatarget_value(self, key): + """allows the agent to read a value from a datatarget""" + index = self.get_datatarget_index(key) + return self.datatarget_values[index] + + def get_datatarget_feedback_value(self, key): + """allows the agent to read a value from a datatarget""" + index = self.get_datatarget_index(key) + return self.datatarget_feedback_values[index] + + def get_datasource_values(self): + """allows the agent to read all datasource values""" + return self.datasource_values + + def get_datatarget_values(self): + """allows the agent to read all datatarget values""" + return self.datatarget_values + + def get_datatarget_feedback_values(self): + """allows the agent to read all datatarget_feedback values""" + return self.datatarget_feedback_values + + def get_flow_datasource(self, name): + """ return the array/matrix for the given flow datasource""" + return self.flow_datasources[name] + + def get_flow_datatarget(self, name): + """ return the array/matrix for the given flow datatarget""" + return self.flow_datatargets[name] + + def get_flow_datatarget_feedback(self, name): + """ return the array/matrix for the given flow datatarget_feedback""" + return self.flow_datatarget_feedbacks[name] + + def set_datasource_value(self, key, value): + """Sets the given datasource value""" + idx = self.get_datasource_index(key) + self.datasource_values[idx] = value + + def set_datatarget_value(self, key, value): + """Sets the given datasource value""" + idx = self.get_datatarget_index(key) + self.datatarget_values[idx] = value + + def add_to_datatarget(self, key, value): + """Adds the given value to the given datatarget""" + idx = self.get_datatarget_index(key) + self.datatarget_values[idx] += value + + def set_datatarget_feedback_value(self, key, value): + """Sets the given datatarget_feedback value""" + idx = self.get_datatarget_index(key) + self.datatarget_feedback_values[idx] = value + + def set_flow_datasource(self, name, values): + """Set the values of the given flow_datasource """ + assert isinstance(values, np.ndarray), "must provide numpy array" + assert values.dtype == self.floatX + assert self.flow_datasources[name].shape == values.shape + self.flow_datasources[name] = values + + def add_to_flow_datatarget(self, name, values): + """Add the given values to the given flow_datatarget """ + assert isinstance(values, np.ndarray), "must provide numpy array" + assert values.dtype == self.floatX + assert self.flow_datatargets[name].shape == values.shape + self.flow_datatargets[name] += values + + def set_flow_datatarget_feedback(self, name, values): + """Set the values of the given flow_datatarget_feedback """ + assert isinstance(values, np.ndarray), "must provide numpy array" + assert values.dtype == self.floatX + assert self.flow_datatarget_feedbacks[name].shape == values.shape + self.flow_datatarget_feedbacks[name] = values + + def set_datasource_values(self, values): + """sets the complete datasources to new values""" + assert len(values) == len(self.datasource_values) + self.datasource_values = values + + def set_datatarget_values(self, values): + """sets the complete datatargets to new values""" + assert len(values) == len(self.datatarget_values) + self.datatarget_values = values + + def add_datatarget_values(self, values): + """sets the complete datatargets to new values""" + assert len(values) == len(self.datatarget_values) + self.datatarget_values += values + + def set_datatarget_feedback_values(self, values): + """sets the complete datatargets_feedback to new values""" + assert len(values) == len(self.datatarget_feedback_values) + self.datatarget_feedback_values = values + + def reset_datatargets(self): + """ resets (zeros) the datatargets """ + self.datatarget_values = np.zeros_like(self.datatarget_values) + for name in self.flow_datatargets: + self.flow_datatargets[name] = np.zeros_like(self.flow_datatargets[name]) + + @abstractmethod + def update_data_sources_and_targets(self): + """ + must be implemented by concrete world adapters to read and set the following arrays: + datasource_values + datatarget_values + datatarget_feedback_values + + Arrays sizes need to be equal to the corresponding responses of get_available_datasources() and + get_available_datatargets(). + Values of the superclass' dict objects will be bypassed and ignored. + """ + pass # pragma: no cover + + class DefaultArray(ArrayWorldAdapter): """ - must be implemented by concrete world adapters to read and set the following arrays: - datasource_values - datatarget_values - datatarget_feedback_values - - Arrays sizes need to be equal to the corresponding responses of get_available_datasources() and - get_available_datatargets(). - Values of the superclass' dict objects will be bypassed and ignored. + A default ArrayWorldadapter, that provides example-datasources and -targets """ - pass + def __init__(self, world, uid=None, config={}, **data): + super().__init__(world, uid=uid, config=config, **data) + self.add_datasource("test", initial_value=0) + self.add_flow_datasource("vision", (3, 7)) + self.add_datatarget("test", initial_value=0) + self.add_flow_datatarget("action", (2, 3)) + self.update_data_sources_and_targets() + + def update_data_sources_and_targets(self): + import random + self.datatarget_feedback_values[:] = self.datatarget_values + self.datasource_values[:] = np.random.randn(len(self.datasource_values)) + self.flow_datasources['vision'][:] = np.random.randn(*self.flow_datasources['vision'].shape) + self.flow_datatargets['action'][:] = np.zeros_like(self.flow_datatargets['action']) + + +except ImportError: # pragma: no cover + pass diff --git a/micropsi_core/world/worldobject.py b/micropsi_core/world/worldobject.py index ca4d611d..4089e77e 100644 --- a/micropsi_core/world/worldobject.py +++ b/micropsi_core/world/worldobject.py @@ -37,6 +37,14 @@ def name(self): def name(self, name): self.data['name'] = name + @property + def parameters(self): + return self.data.get('parameters', {}) + + @parameters.setter + def parameters(self, parameters={}): + self.data['parameters'] = parameters + @property def uid(self): return self.data['uid'] @@ -61,3 +69,7 @@ def initialize_worldobject(self, data): def update(self): """ Called by the world at each world iteration """ pass + + +class TestObject(WorldObject): + pass diff --git a/micropsi_server/mesh_startup.py b/micropsi_server/mesh_startup.py new file mode 100644 index 00000000..8736209e --- /dev/null +++ b/micropsi_server/mesh_startup.py @@ -0,0 +1,21 @@ + +def no_exit(code): + pass + + +def mesh_startup(port=7543): + + import sys + from os import walk + + path = sys.path.copy() + for p in path: + for root,dirs,files in walk(p): + if p is not root: + sys.path.append(root) + + sys.exit = no_exit + + import micropsi_server.micropsi_app + + micropsi_server.micropsi_app.main(None, port) \ No newline at end of file diff --git a/micropsi_server/micropsi_app.py b/micropsi_server/micropsi_app.py index 10c22f60..3b458a96 100755 --- a/micropsi_server/micropsi_app.py +++ b/micropsi_server/micropsi_app.py @@ -7,6 +7,8 @@ This version of MicroPsi is meant to be deployed as a web server, and accessed through a browser. For local use, simply start this server and point your browser to "http://localhost:6543". The latter parameter is the default port and can be changed as needed. + +The path to the JSON API is `/rpc` """ __author__ = 'joscha' @@ -44,33 +46,43 @@ bottle.TEMPLATE_PATH.insert(0, os.path.join(APP_PATH, 'view', '')) bottle.TEMPLATE_PATH.insert(1, os.path.join(APP_PATH, 'static', '')) +bottle.BaseRequest.MEMFILE_MAX = 5 * 1024 * 1024 + +theano_available = True +try: + import theano +except ImportError: + theano_available = False + +bottle.BaseTemplate.defaults['theano_available'] = theano_available + # runtime = micropsi_core.runtime.MicroPsiRuntime() usermanager = usermanagement.UserManager() def rpc(command, route_prefix="/rpc/", method="GET", permission_required=None): - """Defines a decorator for accessing API calls. Use it by specifying the - API method, followed by the permissions necessary to execute the method. - Within the calling web page, use http:///rpc/(arg1="val1", arg2="val2", ...) - Import these arguments into your decorated function: - @rpc("my_method") - def this_is_my_method(arg1, arg2): - pass - - This will return a JSON object, containing `status` and `data` - status will either be "success" or "error", and data can be either empty, contain the requested information, or the error message, if status==error - The decorated function can optionally import the following parameters (by specifying them in its signature): - argument: the original argument string - token: the current session token - user_id: the id of the user associated with the current session token - permissions: the set of permissions associated with the current session token - - Arguments: - command: the command against which we want to match - method (optional): the request method - permission_required (optional): the type of permission necessary to execute the method; - if omitted, permissions won't be tested by the decorator - """ + # Defines a decorator for accessing API calls. Use it by specifying the + # API method, followed by the permissions necessary to execute the method. + # Within the calling web page, use http:///rpc/(arg1="val1", arg2="val2", ...) + # Import these arguments into your decorated function: + # @rpc("my_method") + # def this_is_my_method(arg1, arg2): + # pass + + # This will return a JSON object, containing `status` and `data` + # status will either be "success" or "error", and data can be either empty, contain the requested information, or the error message, if status==error + # The decorated function can optionally import the following parameters (by specifying them in its signature): + # argument: the original argument string + # token: the current session token + # user_id: the id of the user associated with the current session token + # permissions: the set of permissions associated with the current session token + + # Arguments: + # command: the command against which we want to match + # method (optional): the request method + # permission_required (optional): the type of permission necessary to execute the method; + # if omitted, permissions won't be tested by the decorator + def _decorator(func): @micropsi_app.route(route_prefix + command, "POST") @micropsi_app.route(route_prefix + command + "()", method) @@ -97,7 +109,12 @@ def _wrapper(argument=None): kwargs = request.json except ValueError: if len(request.params) > 0: - kwargs = dict((key.strip('[]'), json.loads(val)) for key, val in request.params.iteritems()) + try: + kwargs = dict((key.strip('[]'), json.loads(val)) for key, val in request.params.iteritems()) + except json.JSONDecodeError: + response.status = 400 + return {'status': 'error', 'data': "Malformed arguments for remote procedure call: %s" % str(request.params.__dict__)} + user_id, permissions, token = get_request_data() if permission_required and permission_required not in permissions: response.status = 401 @@ -105,7 +122,8 @@ def _wrapper(argument=None): else: # kwargs.update({"argument": argument, "permissions": permissions, "user_id": user_id, "token": token}) if kwargs is not None: - arguments = dict((name, kwargs[name]) for name in inspect.getargspec(func).args if name in kwargs) + signature = inspect.signature(func) + arguments = dict((name, kwargs[name]) for name in signature.parameters if name in kwargs) arguments.update(kwargs) else: arguments = {} @@ -123,7 +141,20 @@ def _wrapper(argument=None): response.status = 500 import traceback logging.getLogger('system').error("Error: " + str(err) + " \n " + traceback.format_exc()) - return {'status': 'error', 'data': str(err), 'traceback': traceback.format_exc()} + + # either drop to debugger in the offending stack frame, or just display a message and the trace. + on_exception = cfg['micropsi2'].get('on_exception', None) + if on_exception == 'debug': + import sys + # use the nice ipdb if it is there, but don't throw a fit if it isnt: + try: + import ipdb as pdb + except ImportError: + import pdb + _, _, tb = sys.exc_info() + pdb.post_mortem(tb) + else: + return {'status': 'error', 'data': str(err), 'traceback': traceback.format_exc()} # except TypeError as err: # response.status = 400 @@ -133,7 +164,7 @@ def _wrapper(argument=None): def get_request_data(): - """Helper function to determine the current user, permissions and token""" + # Helper function to determine the current user, permissions and token if request.get_cookie("token"): token = request.get_cookie("token") else: @@ -153,14 +184,26 @@ def _add_world_list(template_name, **params): response.set_cookie('selected_world', current_world) else: current_world = request.get_cookie('selected_world') - if current_world in worlds and hasattr(worlds[current_world], 'assets'): - world_assets = worlds[current_world].assets - else: - world_assets = {} + world_type = "" + world_assets = {} + world_template = "" + if current_world: + world_obj = runtime.load_world(current_world) + world_type = world_obj.__class__.__name__ + if hasattr(world_obj, 'assets'): + world_assets = world_obj.assets + if 'template' in world_assets: + import inspect + basedir = os.path.dirname(inspect.getfile(world_obj.__class__)) + with open(os.path.join(basedir, world_assets['template'])) as fp: + world_template = template(fp.read(), world_assets=world_assets) return template(template_name, current=current_world, - mine=dict((uid, worlds[uid]) for uid in worlds if worlds[uid].owner == params['user_id']), - others=dict((uid, worlds[uid]) for uid in worlds if worlds[uid].owner != params['user_id']), - world_assets=world_assets, **params) + mine=dict((uid, worlds[uid]) for uid in worlds if worlds[uid].get('owner') == params['user_id']), + others=dict((uid, worlds[uid]) for uid in worlds if worlds[uid].get('owner') != params['user_id']), + world_type=world_type, + world_assets=world_assets, + world_template=world_template, + **params) @micropsi_app.route('/static/') @@ -168,14 +211,28 @@ def server_static(filepath): return static_file(filepath, root=os.path.join(APP_PATH, 'static')) +@micropsi_app.route('/world_assets//') +def server_static_world_asset(wtype, filepath): + import inspect + world = runtime.get_world_class_from_name(wtype, case_sensitive=False) + return static_file(filepath, root=os.path.dirname(inspect.getfile(world))) + + @micropsi_app.route("/") def index(): first_user = usermanager.users == {} user_id, permissions, token = get_request_data() - return _add_world_list("viewer", mode="all", first_user=first_user, logging_levels=runtime.get_logging_levels(), version=VERSION, user_id=user_id, permissions=permissions, console=INCLUDE_CONSOLE) + return _add_world_list("viewer", + mode="all", + first_user=first_user, + logging_levels=runtime.get_logging_levels(), + version=VERSION, + user_id=user_id, + permissions=permissions, + console=INCLUDE_CONSOLE) -@micropsi_app.route("/nodenet") +@micropsi_app.route("/agent") def nodenet(): user_id, permissions, token = get_request_data() return template("viewer", mode="nodenet", version=VERSION, user_id=user_id, permissions=permissions, console=INCLUDE_CONSOLE) @@ -187,14 +244,35 @@ def monitors(): return template("viewer", mode="monitors", logging_levels=runtime.get_logging_levels(), version=VERSION, user_id=user_id, permissions=permissions) +@micropsi_app.route('/minidoc') +def minidoc_base(): + return template("minidoc", + navi=minidoc.get_navigation(), + content=minidoc.get_documentation_body(), title="Minidoc") + + @micropsi_app.route('/minidoc/') -def document(filepath): +def minidoc_file(filepath): return template("minidoc", navi=minidoc.get_navigation(), content=minidoc.get_documentation_body(filepath), title="Minidoc: " + filepath) -@micropsi_app.route("/world") +@micropsi_app.route('/apidoc') +def apidoc_base(): + return template("minidoc", + navi=minidoc.get_api_navigation(), + content=minidoc.get_api_doc(), title="Api Documentation") + + +@micropsi_app.route('/apidoc/') +def apidoc_file(filepath): + return template("minidoc", + navi=minidoc.get_api_navigation(), + content=minidoc.get_api_doc(filepath), title="Api Documentation: " + filepath) + + +@micropsi_app.route("/environment") def world(): user_id, permissions, token = get_request_data() return _add_world_list("viewer", mode="world", version=VERSION, user_id=user_id, permissions=permissions) @@ -208,7 +286,7 @@ def error_page(error): "status": "error", "data": "Function not found" }) - return template("error.tpl", error=error, msg="Page not found.", img="/static/img/brazil.gif") + return template("error.tpl", error=error, msg="Page not found.") @micropsi_app.error(405) @@ -219,18 +297,18 @@ def error_page_405(error): "status": "error", "data": "Method not allowed" }) - return template("error.tpl", error=error, msg="Method not allowed.", img="/static/img/strangelove.gif") + return template("error.tpl", error=error, msg="Method not allowed.") @micropsi_app.error(500) def error_page_500(error): - return template("error.tpl", error=error, msg="Internal server error.", img="/static/img/brainstorm.gif") + return template("error.tpl", error=error, msg="Internal server error.") @micropsi_app.route("/about") def about(): user_id, permissions, token = get_request_data() - return template("about", version=VERSION, user_id=user_id, permissions=permissions) + return template("about", version=VERSION, user_id=user_id, permissions=permissions, config=runtime.runtime_info()) @micropsi_app.route("/logout") @@ -467,7 +545,7 @@ def login_as_user(userid): return template("error", msg="Insufficient rights to access user console") -@micropsi_app.route("/nodenet_mgt") +@micropsi_app.route("/agent_mgt") def nodenet_mgt(): user_id, permissions, token = get_request_data() if "manage nodenets" in permissions: @@ -478,7 +556,7 @@ def nodenet_mgt(): return template("nodenet_mgt", version=VERSION, permissions=permissions, user_id=user_id, nodenet_list=runtime.get_available_nodenets(), notification=notification) - return template("error", msg="Insufficient rights to access nodenet console") + return template("error", msg="Insufficient rights to access agent console") @micropsi_app.route("/select_nodenet_from_console/") @@ -486,7 +564,7 @@ def select_nodenet_from_console(nodenet_uid): user_id, permissions, token = get_request_data() result, uid = runtime.load_nodenet(nodenet_uid) if not result: - return template("error", msg="Could not select nodenet") + return template("error", msg="Could not select agent") response.set_cookie("selected_nodenet", nodenet_uid + "/", path="/") redirect("/") @@ -496,9 +574,9 @@ def delete_nodenet_from_console(nodenet_uid): user_id, permissions, token = get_request_data() if "manage nodenets" in permissions: runtime.delete_nodenet(nodenet_uid) - response.set_cookie('notification', '{"msg":"Nodenet deleted", "status":"success"}', path='/') - redirect('/nodenet_mgt') - return template("error", msg="Insufficient rights to access nodenet console") + response.set_cookie('notification', '{"msg":"Agent deleted", "status":"success"}', path='/') + redirect('/agent_mgt') + return template("error", msg="Insufficient rights to access agent console") @micropsi_app.route("/save_all_nodenets") @@ -507,161 +585,203 @@ def save_all_nodenets(): if "manage nodenets" in permissions: for uid in runtime.nodenets: runtime.save_nodenet(uid) - response.set_cookie('notification', '{"msg":"All nodenets saved", "status":"success"}', path='/') - redirect('/nodenet_mgt') - return template("error", msg="Insufficient rights to access nodenet console") + response.set_cookie('notification', '{"msg":"All agents saved", "status":"success"}', path='/') + redirect('/agent_mgt') + return template("error", msg="Insufficient rights to access agent console") -@micropsi_app.route("/nodenet/import") +@micropsi_app.route("/agent/import") def import_nodenet_form(): token = request.get_cookie("token") - return template("upload.tpl", title='Import Nodenet', message='Select a file to upload and use for importing', action='/nodenet/import', + return template("upload.tpl", title='Import Agent', message='Select a file to upload and use for importing', action='/agent/import', version=VERSION, userid=usermanager.get_user_id_for_session_token(token), permissions=usermanager.get_permissions_for_session_token(token)) -@micropsi_app.route("/nodenet/import", method="POST") +@micropsi_app.route("/agent/import", method="POST") def import_nodenet(): user_id, p, t = get_request_data() data = request.files['file_upload'].file.read() data = data.decode('utf-8') nodenet_uid = runtime.import_nodenet(data, owner=user_id) - return dict(status='success', msg="Nodenet imported", nodenet_uid=nodenet_uid) + return dict(status='success', msg="Agent imported", nodenet_uid=nodenet_uid) -@micropsi_app.route("/nodenet/merge/") +@micropsi_app.route("/agent/merge/") def merge_nodenet_form(nodenet_uid): token = request.get_cookie("token") - return template("upload.tpl", title='Merge Nodenet', message='Select a file to upload and use for merging', - action='/nodenet/merge/%s' % nodenet_uid, + return template("upload.tpl", title='Merge Agent', message='Select a file to upload and use for merging', + action='/agent/merge/%s' % nodenet_uid, version=VERSION, userid=usermanager.get_user_id_for_session_token(token), permissions=usermanager.get_permissions_for_session_token(token)) -@micropsi_app.route("/nodenet/merge/", method="POST") +@micropsi_app.route("/agent/merge/", method="POST") def merge_nodenet(nodenet_uid): data = request.files['file_upload'].file.read() data = data.decode('utf-8') runtime.merge_nodenet(nodenet_uid, data) - return dict(status='success', msg="Nodenet merged") + return dict(status='success', msg="Agent merged") -@micropsi_app.route("/nodenet/export/") +@micropsi_app.route("/agent/export/") def export_nodenet(nodenet_uid): response.set_header('Content-type', 'application/json') - response.set_header('Content-Disposition', 'attachment; filename="nodenet.json"') + response.set_header('Content-Disposition', 'attachment; filename="agent.json"') return runtime.export_nodenet(nodenet_uid) -@micropsi_app.route("/nodenet/edit") +@micropsi_app.route("/recorder/export/-") +def export_recorder(nodenet_uid, recorder_uid): + data = runtime.export_recorders(nodenet_uid, [recorder_uid]) + recorder = runtime.get_recorder(nodenet_uid, recorder_uid) + response.set_header('Content-type', 'application/octet-stream') + response.set_header('Content-Disposition', 'attachment; filename="recorder_%s.npz"' % recorder.name) + return data + + +@micropsi_app.route("/recorder/export/", method="POST") +def export_recorders(nodenet_uid): + uids = [] + for param in request.params.allitems(): + if param[0] == 'recorder_uids[]': + uids.append(param[1]) + data = runtime.export_recorders(nodenet_uid, uids) + response.set_header('Content-type', 'application/octet-stream') + response.set_header('Content-Disposition', 'attachment; filename="recorders_%s.npz"' % nodenet_uid) + return data + + +@micropsi_app.route("/agent/edit") def edit_nodenet(): user_id, permissions, token = get_request_data() - # nodenet_id = request.params.get('id', None) - title = 'Edit Nodenet' if id is not None else 'New Nodenet' - - theano_available = True - try: - import theano - except ImportError: - theano_available = False + nodenet_uid = request.params.get('id') + title = 'Edit Agent' if nodenet_uid is not None else 'New Agent' return template("nodenet_form.tpl", title=title, # nodenet_uid=nodenet_uid, nodenets=runtime.get_available_nodenets(), + worldtypes=runtime.get_available_world_types(), templates=runtime.get_available_nodenets(), worlds=runtime.get_available_worlds(), - version=VERSION, user_id=user_id, permissions=permissions, theano_available=theano_available) + version=VERSION, user_id=user_id, permissions=permissions) -@micropsi_app.route("/nodenet/edit", method="POST") +@micropsi_app.route("/agent/edit", method="POST") def write_nodenet(): user_id, permissions, token = get_request_data() params = dict((key, request.forms.getunicode(key)) for key in request.forms) + worldadapter_name = params['nn_worldadapter'] + wa_params = {} + for key in params: + if key.startswith('worldadapter_%s_' % worldadapter_name): + strip = len("worldadapter_%s_" % worldadapter_name) + wa_params[key[strip:]] = params[key] if "manage nodenets" in permissions: - result, nodenet_uid = runtime.new_nodenet(params['nn_name'], engine=params['nn_engine'], worldadapter=params['nn_worldadapter'], template=params.get('nn_template'), owner=user_id, world_uid=params.get('nn_world'), use_modulators=params.get('nn_modulators', False)) + result, nodenet_uid = runtime.new_nodenet( + params['nn_name'], + engine=params['nn_engine'], + worldadapter=params['nn_worldadapter'], + template=params.get('nn_template'), + owner=user_id, + world_uid=params.get('nn_world'), + use_modulators=params.get('nn_modulators', False), + worldadapter_config=wa_params) if result: - return dict(status="success", msg="Nodenet created", nodenet_uid=nodenet_uid) + return dict(status="success", msg="Agent created", nodenet_uid=nodenet_uid) else: - return dict(status="error", msg="Error saving nodenet: %s" % nodenet_uid) - return dict(status="error", msg="Insufficient rights to write nodenet") + return dict(status="error", msg="Error saving agent: %s" % nodenet_uid) + return dict(status="error", msg="Insufficient rights to write agent") -@micropsi_app.route("/world/import") +@micropsi_app.route("/environment/import") def import_world_form(): token = request.get_cookie("token") - return template("upload.tpl", title='World import', message='Select a file to upload and use for importing', - action='/world/import', + return template("upload.tpl", title='Environment import', message='Select a file to upload and use for importing', + action='/environment/import', version=VERSION, user_id=usermanager.get_user_id_for_session_token(token), permissions=usermanager.get_permissions_for_session_token(token)) -@micropsi_app.route("/world/import", method="POST") +@micropsi_app.route("/environment/import", method="POST") def import_world(): user_id, p, t = get_request_data() data = request.files['file_upload'].file.read() data = data.decode('utf-8') world_uid = runtime.import_world(data, owner=user_id) - return dict(status='success', msg="World imported", world_uid=world_uid) + return dict(status='success', msg="Environment imported", world_uid=world_uid) -@micropsi_app.route("/world/export/") +@micropsi_app.route("/environment/export/") def export_world(world_uid): response.set_header('Content-type', 'application/json') - response.set_header('Content-Disposition', 'attachment; filename="world.json"') + response.set_header('Content-Disposition', 'attachment; filename="environment.json"') return runtime.export_world(world_uid) -@micropsi_app.route("/world/edit") +@micropsi_app.route("/environment/edit") def edit_world_form(): token = request.get_cookie("token") - id = request.params.get('id', None) - title = 'Edit World' if id is not None else 'New World' + world_uid = request.params.get('id', None) + world = None + if world_uid: + world = runtime.worlds.get(world_uid) + title = 'Edit Environment' if world is not None else 'New Environment' worldtypes = runtime.get_available_world_types() return template("world_form.tpl", title=title, worldtypes=worldtypes, + world=world, version=VERSION, user_id=usermanager.get_user_id_for_session_token(token), permissions=usermanager.get_permissions_for_session_token(token)) -@micropsi_app.route("/world/edit", method="POST") +@micropsi_app.route("/environment/edit", method="POST") def edit_world(): params = dict((key, request.forms.getunicode(key)) for key in request.forms) - type = params['world_type'] + world_uid = params.get('world_uid') + if world_uid: + world_type = runtime.worlds[world_uid].__class__.__name__ + else: + world_type = params['world_type'] config = {} for p in params: - if p.startswith(type + '_'): - config[p[len(type) + 1:]] = params[p] + if p.startswith(world_type + '_'): + config[p[len(world_type) + 1:]] = params[p] user_id, permissions, token = get_request_data() if "manage worlds" in permissions: - result, uid = runtime.new_world(params['world_name'], params['world_type'], user_id, config=config) - if result: - return dict(status="success", msg="World created", world_uid=uid) + if world_uid: + runtime.set_world_properties(world_uid, world_name=params['world_name'], config=config) + return dict(status="success", msg="Environment changes saved") else: - return dict(status="error", msg=": %s" % result) - return dict(status="error", msg="Insufficient rights to create world") + result, uid = runtime.new_world(params['world_name'], world_type, user_id, config=config) + if result: + return dict(status="success", msg="Environment created", world_uid=uid) + else: + return dict(status="error", msg=": %s" % result) + return dict(status="error", msg="Insufficient rights to create environment") -@micropsi_app.route("/nodenet_list/") -@micropsi_app.route("/nodenet_list/") +@micropsi_app.route("/agent_list/") +@micropsi_app.route("/agent_list/") def nodenet_list(current_nodenet=None): user_id, permissions, token = get_request_data() nodenets = runtime.get_available_nodenets() - return template("nodenet_list", type="nodenet", user_id=user_id, + return template("nodenet_list", type="agent", user_id=user_id, current=current_nodenet, mine=dict((uid, nodenets[uid]) for uid in nodenets if nodenets[uid].owner == user_id), others=dict((uid, nodenets[uid]) for uid in nodenets if nodenets[uid].owner != user_id)) -@micropsi_app.route("/world_list/") -@micropsi_app.route("/world_list/") +@micropsi_app.route("/environment_list/") +@micropsi_app.route("/environment_list/") def world_list(current_world=None): user_id, permissions, token = get_request_data() worlds = runtime.get_available_worlds() - return template("nodenet_list", type="world", user_id=user_id, + return template("nodenet_list", type="environment", user_id=user_id, current=current_world, mine=dict((uid, worlds[uid]) for uid in worlds if worlds[uid].owner == user_id), others=dict((uid, worlds[uid]) for uid in worlds if worlds[uid].owner != user_id)) @@ -672,27 +792,19 @@ def world_list(current_world=None): def edit_runner_properties(): user_id, permissions, token = get_request_data() if len(request.params) > 0: - runtime.set_runner_properties(int(request.params['timestep']), int(request.params['factor'])) + runtime.set_runner_properties(int(request.params['timestep']), bool(request.params.get('infguard'))) return dict(status="success", msg="Settings saved") else: return template("runner_form", action="/config/runner", value=runtime.get_runner_properties()) -@micropsi_app.route("/create_new_nodenet_form") -def create_new_nodenet_form(): - user_id, permissions, token = get_request_data() - nodenets = runtime.get_available_nodenets() - worlds = runtime.get_available_worlds() - return template("nodenet_form", user_id=user_id, template="None", - nodenets=nodenets, worlds=worlds) - - @micropsi_app.route("/create_worldadapter_selector/") def create_worldadapter_selector(world_uid): - nodenets = runtime.get_available_nodenets() - worlds = runtime.get_available_worlds() - return template("worldadapter_selector", world_uid=world_uid, - nodenets=nodenets, worlds=worlds) + return template("worldadapter_selector", + world_uid=world_uid, + nodenets=runtime.get_available_nodenets(), + worlds=runtime.get_available_worlds(), + worldtypes=runtime.get_available_world_types()) @micropsi_app.route("/dashboard") @@ -716,16 +828,20 @@ def show_dashboard(): @rpc("get_nodenet_metadata") def get_nodenet_metadata(nodenet_uid, nodespace='Root', include_links=True): - return True, runtime.get_nodenet_metadata(nodenet_uid) + """ Return metadata for the given nodenet_uid """ + return runtime.get_nodenet_metadata(nodenet_uid) @rpc("get_nodes") -def get_nodes(nodenet_uid, nodespaces=[], include_links=True): - return True, runtime.get_nodes(nodenet_uid, nodespaces, include_links) +def get_nodes(nodenet_uid, nodespaces=[], include_links=True, links_to_nodespaces=[]): + """ Return content of the given nodenet, filtered by nodespaces. + Optionally also returns links to and from the nodespaces listed in `links_to_nodespaces` """ + return True, runtime.get_nodes(nodenet_uid, nodespaces, include_links, links_to_nodespaces=links_to_nodespaces) @rpc("new_nodenet") -def new_nodenet(name, owner=None, engine='dict_engine', template=None, worldadapter=None, world_uid=None, use_modulators=None): +def new_nodenet(name, owner=None, engine='dict_engine', template=None, worldadapter=None, world_uid=None, use_modulators=None, worldadapter_config={}): + """ Create a new nodenet with the given configuration """ if owner is None: owner, _, _ = get_request_data() return runtime.new_nodenet( @@ -735,27 +851,44 @@ def new_nodenet(name, owner=None, engine='dict_engine', template=None, worldadap template=template, owner=owner, world_uid=world_uid, - use_modulators=use_modulators) + use_modulators=use_modulators, + worldadapter_config=worldadapter_config) @rpc("get_calculation_state") -def get_calculation_state(nodenet_uid, nodenet=None, nodenet_diff=None, world=None, monitors=None, dashboard=None): - return runtime.get_calculation_state(nodenet_uid, nodenet=nodenet, nodenet_diff=nodenet_diff, world=world, monitors=monitors, dashboard=dashboard) +def get_calculation_state(nodenet_uid, nodenet=None, nodenet_diff=None, world=None, monitors=None, dashboard=None, recorders=None): + """ Return the current simulation state for any of the following: the given nodenet, world, monitors, dashboard, recorders + Return values depend on the parameters: + if you provide the nodenet-parameter (a dict, with all-optional keys: nodespaces, include_links, links_to_nodespaces) you will get the contents of the nodenet + if you provide the nodenet_diff-parameter (a dict, with key "step" (the step to which the diff is calculated, and optional nodespaces) you will get a diff of the nodenet + if you provide the world-parameter (anything) you will get the state of the nodenet's environment + if you provide the monitor-parameter (anything), you will get data of all monitors registered in the nodenet + if you provide the dashboard-parameter (anything) you will get a dict of dashboard data + if you provide the recorder-parameter (anything), you will get data of all recorders registered in the nodenet + """ + return runtime.get_calculation_state(nodenet_uid, nodenet=nodenet, nodenet_diff=nodenet_diff, world=world, monitors=monitors, dashboard=dashboard, recorders=recorders) @rpc("get_nodenet_changes") def get_nodenet_changes(nodenet_uid, nodespaces=[], since_step=0): - return runtime.get_nodenet_changes(nodenet_uid, nodespaces=nodespaces, since_step=since_step) + """ Return a diff of the nodenets state between the given since_step and the current state. optionally filtered by nodespaces""" + data = runtime.get_nodenet_activation_data(nodenet_uid, nodespaces=nodespaces, last_call_step=since_step) + if data['has_changes']: + data['changes'] = runtime.get_nodespace_changes(nodenet_uid, nodespaces=nodespaces, since_step=since_step) + else: + data['changes'] = {} + return True, data @rpc("generate_uid") def generate_uid(): + """ Return a unique identifier""" return True, tools.generate_uid() @rpc("create_auth_token") def create_auth_token(user, password, remember=True): - # log in new user + """ Create a session for the user, and returns a token for identification""" token = usermanager.start_session(user, password, remember) if token: return True, token @@ -765,14 +898,17 @@ def create_auth_token(user, password, remember=True): else: return False, "User unknown" + @rpc("invalidate_auth_token") def invalidate_auth_token(token): + """ Terminate the session of the user associated with this token""" usermanager.end_session(token) return True @rpc("get_available_nodenets") def get_available_nodenets(user_id=None): + """ Return a dict of available nodenets, optionally filtered by owner""" if user_id and user_id not in usermanager.users: return False, 'User not found' return True, runtime.get_available_nodenets(owner=user_id) @@ -780,16 +916,19 @@ def get_available_nodenets(user_id=None): @rpc("delete_nodenet", permission_required="manage nodenets") def delete_nodenet(nodenet_uid): + """ Delete the given nodenet """ return runtime.delete_nodenet(nodenet_uid) @rpc("set_nodenet_properties", permission_required="manage nodenets") -def set_nodenet_properties(nodenet_uid, nodenet_name=None, worldadapter=None, world_uid=None, owner=None): - return runtime.set_nodenet_properties(nodenet_uid, nodenet_name=nodenet_name, worldadapter=worldadapter, world_uid=world_uid, owner=owner) +def set_nodenet_properties(nodenet_uid, nodenet_name=None, worldadapter=None, world_uid=None, owner=None, worldadapter_config={}): + """ Set the nodenet's properties. """ + return runtime.set_nodenet_properties(nodenet_uid, nodenet_name=nodenet_name, worldadapter=worldadapter, world_uid=world_uid, owner=owner, worldadapter_config=worldadapter_config) @rpc("set_node_state") def set_node_state(nodenet_uid, node_uid, state): + """ Set a state-value of the given node """ if state == "": state = None return runtime.set_node_state(nodenet_uid, node_uid, state) @@ -797,16 +936,22 @@ def set_node_state(nodenet_uid, node_uid, state): @rpc("set_node_activation") def set_node_activation(nodenet_uid, node_uid, activation): + """ Set the node's activation (aka the activation of the first gate) """ return runtime.set_node_activation(nodenet_uid, node_uid, activation) @rpc("start_calculation", permission_required="manage nodenets") def start_calculation(nodenet_uid): + """ Start the runner of the given nodenet """ return runtime.start_nodenetrunner(nodenet_uid) @rpc("set_runner_condition", permission_required="manage nodenets") def set_runner_condition(nodenet_uid, steps=-1, monitor=None): + """ Register a stop-condition for the nodenet-runner, depending on the parameter: + steps (int): Stop runner after having calculated this many steps + monitor (dict, containing "uid", and "value"): Stop if the monitor with the given uid has the given value + """ if monitor and 'value' in monitor: monitor['value'] = float(monitor['value']) if steps: @@ -818,62 +963,81 @@ def set_runner_condition(nodenet_uid, steps=-1, monitor=None): @rpc("remove_runner_condition", permission_required="manage nodenets") def remove_runner_condition(nodenet_uid): + """ Remove a configured stop-condition""" return runtime.remove_runner_condition(nodenet_uid) @rpc("set_runner_properties", permission_required="manage server") -def set_runner_properties(timestep, factor): - return runtime.set_runner_properties(timestep, factor) +def set_runner_properties(timestep, infguard): + """ Configure the server-settings: + timestep: miliseconds per nodenet-step""" + return runtime.set_runner_properties(timestep, infguard) @rpc("get_runner_properties") def get_runner_properties(): + """ Return the server-settings, returning timestep in a dict""" return True, runtime.get_runner_properties() @rpc("get_is_calculation_running") def get_is_calculation_running(nodenet_uid): + """ Return True if the given calculation of the given nodenet is currentyly runnning """ return True, runtime.get_is_nodenet_running(nodenet_uid) @rpc("stop_calculation", permission_required="manage nodenets") def stop_calculation(nodenet_uid): + """ Stop the given nodenet's calculation""" return runtime.stop_nodenetrunner(nodenet_uid) @rpc("step_calculation", permission_required="manage nodenets") def step_calculation(nodenet_uid): + """ Manually advance the calculation of the given nodenet by 1 step""" return True, runtime.step_nodenet(nodenet_uid) @rpc("revert_calculation", permission_required="manage nodenets") def revert_calculation(nodenet_uid): + """ Revert the state of the nodenet and its world to the persisted one""" return runtime.revert_nodenet(nodenet_uid, True) @rpc("revert_nodenet", permission_required="manage nodenets") def revert_nodenet(nodenet_uid): + """ Revert the state of the nodenet to the persisted one""" return runtime.revert_nodenet(nodenet_uid) +@rpc("reload_and_revert", permission_required="manage nodenets") +def reload_and_revert(nodenet_uid): + """ reload code, and revert calculation""" + return runtime.reload_and_revert(nodenet_uid) + + @rpc("save_nodenet", permission_required="manage nodenets") def save_nodenet(nodenet_uid): + """ Persist the current state of the nodenet""" return runtime.save_nodenet(nodenet_uid) @rpc("export_nodenet") def export_nodenet_rpc(nodenet_uid): + """ Return a json dump of the nodenet""" return True, runtime.export_nodenet(nodenet_uid) @rpc("import_nodenet", permission_required="manage nodenets") def import_nodenet_rpc(nodenet_data): + """ Import a json dump of a whole nodenet""" user_id, _, _ = get_request_data() return True, runtime.import_nodenet(nodenet_data, user_id) @rpc("merge_nodenet", permission_required="manage nodenets") def merge_nodenet_rpc(nodenet_uid, nodenet_data): + """ Merge a json dump into the given nodenet""" return runtime.merge_nodenet(nodenet_uid, nodenet_data) @@ -881,19 +1045,32 @@ def merge_nodenet_rpc(nodenet_uid, nodenet_data): @rpc("step_nodenets_in_world") def step_nodenets_in_world(world_uid, nodenet_uid=None, steps=1): + """ Advance all nodenets registered in the given world + (or, only the given nodenet) by the given number of steps""" return runtime.step_nodenets_in_world(world_uid, nodenet_uid=nodenet_uid, steps=steps) @rpc("get_available_worlds") def get_available_worlds(user_id=None): + """ Return a dict of available worlds, optionally filtered by owner)""" data = {} for uid, world in runtime.get_available_worlds(user_id).items(): - data[uid] = {'name': world.name} # fixme + data[uid] = dict( + uid=world.uid, + name=world.name, + world_type=world.world_type, + filename=world.filename, + config={}, + owner=world.owner) # fixme + # ok I might but couldcha tell me more about wat is broken wid ya? + if hasattr(world, 'config'): + data[uid]['config'] = world.config return True, data @rpc("get_world_properties") def get_world_properties(world_uid): + """ Return a bunch of properties for the given world (name, type, config, agents, ...)""" try: return True, runtime.get_world_properties(world_uid) except KeyError: @@ -902,14 +1079,14 @@ def get_world_properties(world_uid): @rpc("get_worldadapters") def get_worldadapters(world_uid, nodenet_uid=None): - try: - return True, runtime.get_worldadapters(world_uid, nodenet_uid=nodenet_uid) - except KeyError: - return False, 'World %s not found' % world_uid + """ Return the world adapters available in the given world. Provide an optional nodenet_uid of an agent + in the given world to obtain datasources and datatargets for the agent's worldadapter """ + return True, runtime.get_worldadapters(world_uid, nodenet_uid=nodenet_uid) @rpc("get_world_objects") def get_world_objects(world_uid, type=None): + """ Returns a dict of worldobjects present in the world, optionally filtered by type """ try: return True, runtime.get_world_objects(world_uid, type) except KeyError: @@ -917,80 +1094,97 @@ def get_world_objects(world_uid, type=None): @rpc("add_worldobject") -def add_worldobject(world_uid, type, position, orientation=0.0, name="", parameters=None, uid=None): - return runtime.add_worldobject(world_uid, type, position, orientation=orientation, name=name, parameters=parameters, uid=uid) +def add_worldobject(world_uid, type, position, orientation=0.0, name="", parameters=None): + """ Add a worldobject of the given type """ + return runtime.add_worldobject(world_uid, type, position, orientation=orientation, name=name, parameters=parameters) @rpc("delete_worldobject") def delete_worldobject(world_uid, object_uid): + """ Delete the given worldobject """ return runtime.delete_worldobject(world_uid, object_uid) @rpc("set_worldobject_properties") def set_worldobject_properties(world_uid, uid, position=None, orientation=None, name=None, parameters=None): + """ Set the properties of a worldobject in the given world """ if runtime.set_worldobject_properties(world_uid, uid, position, int(orientation), name, parameters): return dict(status="success") else: - return dict(status="error", msg="unknown world or world object") + return dict(status="error", msg="unknown environment or world object") @rpc("set_worldagent_properties") def set_worldagent_properties(world_uid, uid, position=None, orientation=None, name=None, parameters=None): + """ Set the properties of an agent in the given world """ if runtime.set_worldagent_properties(world_uid, uid, position, orientation, name, parameters): return dict(status="success") else: - return dict(status="error", msg="unknown world or world object") + return dict(status="error", msg="unknown environment or world object") @rpc("new_world", permission_required="manage worlds") -def new_world(world_name, world_type, owner=None): +def new_world(world_name, world_type, owner=None, config={}): + """ Create a new world with the given name, of the given type """ if owner is None: owner, _, _ = get_request_data() - return runtime.new_world(world_name, world_type, owner) + return runtime.new_world(world_name, world_type, owner=owner, config=config) @rpc("get_available_world_types") def get_available_world_types(): - return True, sorted(runtime.get_available_world_types().keys()) + """ Return a dict with world_types as keys and their configuration-dicts as value """ + data = runtime.get_available_world_types() + for key in data: + del data[key]['class'] # remove class reference for json + return True, data @rpc("delete_world", permission_required="manage worlds") def delete_world(world_uid): + """ Delete the given world """ return runtime.delete_world(world_uid) @rpc("get_world_view") def get_world_view(world_uid, step): + """ Return a dict containing current_step, agents, objetcs""" return True, runtime.get_world_view(world_uid, step) @rpc("set_world_properties", permission_required="manage worlds") -def set_world_properties(world_uid, world_name=None, owner=None): - return runtime.set_world_properties(world_uid, world_name, owner) +def set_world_properties(world_uid, world_name=None, owner=None, config=None): + """ Set the properties of the given world """ + return runtime.set_world_properties(world_uid, world_name, owner, config) @rpc("set_world_data") def set_world_data(world_uid, data): + """ Set user-data for the given world. Format and content depends on the world's implementation""" return runtime.set_world_data(world_uid, data) @rpc("revert_world", permission_required="manage worlds") def revert_world(world_uid): + """ Revert the world to the persisted state """ return runtime.revert_world(world_uid) @rpc("save_world", permission_required="manage worlds") def save_world(world_uid): + """ Persist the current world state""" return runtime.save_world(world_uid) @rpc("export_world") def export_world_rpc(world_uid): + """ Return a complete json dump of the world's state""" return True, runtime.export_world(world_uid) @rpc("import_world", permission_required="manage worlds") def import_world_rpc(worlddata): + """ Import a new world from the provided json dump""" user_id, _, _ = get_request_data() return True, runtime.import_world(worlddata, user_id) @@ -998,277 +1192,378 @@ def import_world_rpc(worlddata): # Monitor @rpc("add_gate_monitor") -def add_gate_monitor(nodenet_uid, node_uid, gate, sheaf=None, name=None, color=None): - return True, runtime.add_gate_monitor(nodenet_uid, node_uid, gate, sheaf=sheaf, name=name, color=color) +def add_gate_monitor(nodenet_uid, node_uid, gate, name=None, color=None): + """ Add a gate monitor to the given node, recording outgoing activation""" + return True, runtime.add_gate_monitor(nodenet_uid, node_uid, gate, name=name, color=color) @rpc("add_slot_monitor") -def add_slot_monitor(nodenet_uid, node_uid, slot, sheaf=None, name=None, color=None): - return True, runtime.add_slot_monitor(nodenet_uid, node_uid, slot, sheaf=sheaf, name=name, color=color) +def add_slot_monitor(nodenet_uid, node_uid, slot, name=None, color=None): + """ Add a slot monitor to the given node, recording incoming activation""" + return True, runtime.add_slot_monitor(nodenet_uid, node_uid, slot, name=name, color=color) @rpc("add_link_monitor") -def add_link_monitor(nodenet_uid, source_node_uid, gate_type, target_node_uid, slot_type, property, name, color=None): - return True, runtime.add_link_monitor(nodenet_uid, source_node_uid, gate_type, target_node_uid, slot_type, property, name, color=color) +def add_link_monitor(nodenet_uid, source_node_uid, gate_type, target_node_uid, slot_type, name, color=None): + """ Add a link monitor to the given link, recording the link's weight""" + return True, runtime.add_link_monitor(nodenet_uid, source_node_uid, gate_type, target_node_uid, slot_type, name, color=color) @rpc("add_modulator_monitor") def add_modulator_monitor(nodenet_uid, modulator, name, color=None): + """ Add a modulator monitor, recording the value of the emotional modulator""" return True, runtime.add_modulator_monitor(nodenet_uid, modulator, name, color=color) @rpc("add_custom_monitor") def add_custom_monitor(nodenet_uid, function, name, color=None): + """ Add a custom monitor - provide the python code as string in function.""" return True, runtime.add_custom_monitor(nodenet_uid, function, name, color=color) +@rpc("add_group_monitor") +def add_group_monitor(nodenet_uid, nodespace, name, node_name_prefix='', node_uids=[], gate='gen', color=None): + """ Add a group monitor recording the activations of the group """ + return True, runtime.add_group_monitor(nodenet_uid, nodespace, name, node_name_prefix=node_name_prefix, node_uids=node_uids, gate=gate, color=color) + + @rpc("remove_monitor") def remove_monitor(nodenet_uid, monitor_uid): + """ Delete the given monitor""" try: runtime.remove_monitor(nodenet_uid, monitor_uid) return dict(status='success') except KeyError: - return dict(status='error', msg='unknown nodenet or monitor') + return dict(status='error', msg='unknown agent or monitor') @rpc("clear_monitor") def clear_monitor(nodenet_uid, monitor_uid): + """ Clear the monitor's history """ try: runtime.clear_monitor(nodenet_uid, monitor_uid) return dict(status='success') except KeyError: - return dict(status='error', msg='unknown nodenet or monitor') - - -@rpc("export_monitor_data") -def export_monitor_data(nodenet_uid, monitor_uid=None): - return True, runtime.export_monitor_data(nodenet_uid, monitor_uid) + return dict(status='error', msg='unknown agent or monitor') @rpc("get_monitor_data") -def get_monitor_data(nodenet_uid, step, monitor_from=0, monitor_count=-1): - return True, runtime.get_monitor_data(nodenet_uid, step, monitor_from, monitor_count) +def get_monitor_data(nodenet_uid, step=0, monitor_from=0, monitor_count=-1): + """ Return data for monitors in this nodenet """ + return True, runtime.get_monitor_data(nodenet_uid, step, from_step=monitor_from, count=monitor_count) # Nodenet @rpc("get_nodespace_list") def get_nodespace_list(nodenet_uid): - """ returns a list of nodespaces in the given nodenet.""" + """ Return a list of nodespaces in the given nodenet.""" return True, runtime.get_nodespace_list(nodenet_uid) @rpc("get_nodespace_activations") def get_nodespace_activations(nodenet_uid, nodespaces, last_call_step=-1): + """ Return a dict of uids to lists of activation values""" return True, runtime.get_nodenet_activation_data(nodenet_uid, nodespaces, last_call_step) -@rpc("get_nodespace_changes") -def get_nodespace_changes(nodenet_uid, nodespaces, since_step): - return runtime.get_nodespace_changes(nodenet_uid, nodespaces, since_step) - - @rpc("get_nodespace_properties") def get_nodespace_properties(nodenet_uid, nodespace_uid=None): + """ Return a dict of properties of the nodespace""" return True, runtime.get_nodespace_properties(nodenet_uid, nodespace_uid) @rpc("set_nodespace_properties") def set_nodespace_properties(nodenet_uid, nodespace_uid, properties): + """ Set a dict of properties of the nodespace""" return True, runtime.set_nodespace_properties(nodenet_uid, nodespace_uid, properties) @rpc("get_node") def get_node(nodenet_uid, node_uid): + """ Return the complete json data for this node""" return runtime.get_node(nodenet_uid, node_uid) @rpc("add_node", permission_required="manage nodenets") def add_node(nodenet_uid, type, position, nodespace, state=None, name="", parameters={}): + """ Create a new node""" return runtime.add_node(nodenet_uid, type, position, nodespace, state=state, name=name, parameters=parameters) @rpc("add_nodespace", permission_required="manage nodenets") -def add_nodespace(nodenet_uid, position, nodespace, name="", options=None): - return runtime.add_nodespace(nodenet_uid, position, nodespace, name=name, options=options) +def add_nodespace(nodenet_uid, nodespace, name="", options=None): + """ Create a new nodespace""" + return runtime.add_nodespace(nodenet_uid, nodespace, name=name, options=options) @rpc("clone_nodes", permission_required="manage nodenets") def clone_nodes(nodenet_uid, node_uids, clone_mode="all", nodespace=None, offset=[50, 50]): + """ Clone a bunch of nodes. The nodes will get new unique node ids, + a "copy" suffix to their name, and a slight positional offset. + To specify whether the links should be copied too, you can give the following clone-modes: + * "all" to clone all links + * "internal" to only clone links within the clone set of nodes + * "none" to not clone links at all. + + Per default, a clone of a node will appear in the same nodespace, slightly below the original node. + If you however specify a nodespace, all clones will be copied to the given nodespace.""" return runtime.clone_nodes(nodenet_uid, node_uids, clone_mode, nodespace=nodespace, offset=offset) -@rpc("set_entity_positions", permission_required="manage nodenets") -def set_entity_positions(nodenet_uid, positions): - return runtime.set_entity_positions(nodenet_uid, positions) +@rpc("set_node_positions", permission_required="manage nodenets") +def set_node_positions(nodenet_uid, positions): + """ Set the positions of the nodes. Expects a dict node_uid to new position""" + return runtime.set_node_positions(nodenet_uid, positions) @rpc("set_node_name", permission_required="manage nodenets") def set_node_name(nodenet_uid, node_uid, name): + """ Set the name of the given node""" return runtime.set_node_name(nodenet_uid, node_uid, name) @rpc("delete_nodes", permission_required="manage nodenets") def delete_nodes(nodenet_uid, node_uids): + """ Delete the given nodes. Expects a list of uids""" return runtime.delete_nodes(nodenet_uid, node_uids) @rpc("delete_nodespace", permission_required="manage nodenets") def delete_nodespace(nodenet_uid, nodespace): + """ Delete the given nodespace and all its contents""" return runtime.delete_nodespace(nodenet_uid, nodespace) @rpc("align_nodes", permission_required="manage nodenets") def align_nodes(nodenet_uid, nodespace): + """ Automatically align the nodes in the given nodespace """ return runtime.align_nodes(nodenet_uid, nodespace) @rpc("generate_netapi_fragment", permission_required="manage nodenets") def generate_netapi_fragment(nodenet_uid, node_uids): + """ Return Python code that can recreate the selected nodes and their states""" return True, runtime.generate_netapi_fragment(nodenet_uid, node_uids) @rpc("get_available_node_types") def get_available_node_types(nodenet_uid): + """ Return a dict of available built-in node types and native module types""" return True, runtime.get_available_node_types(nodenet_uid) @rpc("get_available_native_module_types") def get_available_native_module_types(nodenet_uid): + """ Return a dict of available native module types""" return True, runtime.get_available_native_module_types(nodenet_uid) @rpc("set_node_parameters", permission_required="manage nodenets") def set_node_parameters(nodenet_uid, node_uid, parameters): + """ Set the parameters of this node""" return runtime.set_node_parameters(nodenet_uid, node_uid, parameters) -@rpc("get_gatefunction") -def get_gatefunction(nodenet_uid, node_uid, gate_type): - return True, runtime.get_gatefunction(nodenet_uid, node_uid, gate_type) - - -@rpc("set_gatefunction", permission_required="manage nodenets") -def set_gatefunction(nodenet_uid, node_uid, gate_type, gatefunction=None): - return runtime.set_gatefunction(nodenet_uid, node_uid, gate_type, gatefunction=gatefunction) +@rpc("set_gate_configuration", permission_required="manage nodenets") +def set_gate_configuration(nodenet_uid, node_uid, gate_type, gatefunction=None, gatefunction_parameters=None): + """ Set the gatefunction and its parameters for the given node""" + for key in list(gatefunction_parameters.keys()): + try: + gatefunction_parameters[key] = float(gatefunction_parameters[key]) + except ValueError: + del gatefunction_parameters[key] + return runtime.set_gate_configuration(nodenet_uid, node_uid, gate_type, gatefunction, gatefunction_parameters) @rpc("get_available_gatefunctions") def get_available_gatefunctions(nodenet_uid): + """ Return a dict of possible gatefunctions and their parameters""" return True, runtime.get_available_gatefunctions(nodenet_uid) -@rpc("set_gate_parameters", permission_required="manage nodenets") -def set_gate_parameters(nodenet_uid, node_uid, gate_type, parameters): - return runtime.set_gate_parameters(nodenet_uid, node_uid, gate_type, parameters) - - @rpc("get_available_datasources") def get_available_datasources(nodenet_uid): + """ Return an ordered list of available datasources """ return True, runtime.get_available_datasources(nodenet_uid) @rpc("get_available_datatargets") def get_available_datatargets(nodenet_uid): + """ Return an ordered list of available datatargets """ return True, runtime.get_available_datatargets(nodenet_uid) @rpc("bind_datasource_to_sensor", permission_required="manage nodenets") def bind_datasource_to_sensor(nodenet_uid, sensor_uid, datasource): + """ Assign the given sensor to the given datasource """ return runtime.bind_datasource_to_sensor(nodenet_uid, sensor_uid, datasource) -@rpc("bind_datatarget_to_actor", permission_required="manage nodenets") -def bind_datatarget_to_actor(nodenet_uid, actor_uid, datatarget): - return runtime.bind_datatarget_to_actor(nodenet_uid, actor_uid, datatarget) +@rpc("bind_datatarget_to_actuator", permission_required="manage nodenets") +def bind_datatarget_to_actuator(nodenet_uid, actuator_uid, datatarget): + """ Assign the given actuator to the given datatarget""" + return runtime.bind_datatarget_to_actuator(nodenet_uid, actuator_uid, datatarget) @rpc("add_link", permission_required="manage nodenets") def add_link(nodenet_uid, source_node_uid, gate_type, target_node_uid, slot_type, weight=1): + """ Create a link between the given nodes """ return runtime.add_link(nodenet_uid, source_node_uid, gate_type, target_node_uid, slot_type, weight=weight) @rpc("set_link_weight", permission_required="manage nodenets") -def set_link_weight(nodenet_uid, source_node_uid, gate_type, target_node_uid, slot_type, weight, certainty=1): - return runtime.set_link_weight(nodenet_uid, source_node_uid, gate_type, target_node_uid, slot_type, weight, certainty) +def set_link_weight(nodenet_uid, source_node_uid, gate_type, target_node_uid, slot_type, weight): + """ Set the weight of an existing link between the given nodes """ + return runtime.set_link_weight(nodenet_uid, source_node_uid, gate_type, target_node_uid, slot_type, weight) @rpc("get_links_for_nodes") def get_links_for_nodes(nodenet_uid, node_uids=[]): + """ Return a dict, containing + "links": List of links starting or ending at one of the given nodes + "nodes": a dict of nodes that are connected by these links, but reside in other nodespaces + """ return True, runtime.get_links_for_nodes(nodenet_uid, node_uids) @rpc("delete_link", permission_required="manage nodenets") def delete_link(nodenet_uid, source_node_uid, gate_type, target_node_uid, slot_type): + """ Delete the given link""" return runtime.delete_link(nodenet_uid, source_node_uid, gate_type, target_node_uid, slot_type) -@rpc("reload_native_modules", permission_required="manage nodenets") -def reload_native_modules(): - return runtime.reload_native_modules() +@rpc("reload_code", permission_required="manage nodenets") +def reload_code(): + """ Reload the contents of the code-folder """ + return runtime.reload_code() @rpc("user_prompt_response") -def user_prompt_response(nodenet_uid, node_uid, values, resume_nodenet): - runtime.user_prompt_response(nodenet_uid, node_uid, values, resume_nodenet) +def user_prompt_response(nodenet_uid, node_uid, key, parameters, resume_nodenet): + """ Respond to a user-prompt issued by a node. """ + runtime.user_prompt_response(nodenet_uid, node_uid, key, parameters, resume_nodenet) return True # Face @rpc("get_emoexpression_parameters") def get_emoexpression_parameters(nodenet_uid): + """ Return a dict of parameters to visualize the emotional state of the agent """ nodenet = runtime.get_nodenet(nodenet_uid) return True, emoexpression.calc_emoexpression_parameters(nodenet) + +# --------- recorder -------- + + +@rpc("add_gate_activation_recorder") +def add_gate_activation_recorder(nodenet_uid, group_definition, name, interval=1): + """ Add an activation recorder to a group of nodes.""" + return runtime.add_gate_activation_recorder(nodenet_uid, group_definition, name, interval) + + +@rpc("add_node_activation_recorder") +def add_node_activation_recorder(nodenet_uid, group_definition, name, interval=1): + """ Add an activation recorder to a group of nodes.""" + return runtime.add_node_activation_recorder(nodenet_uid, group_definition, name, interval) + + +@rpc("add_linkweight_recorder") +def add_linkweight_recorder(nodenet_uid, from_group_definition, to_group_definition, name, interval=1): + """ Add a linkweight recorder to links between to groups.""" + return runtime.add_linkweight_recorder(nodenet_uid, from_group_definition, to_group_definition, name, interval) + + +@rpc("remove_recorder") +def remove_recorder(nodenet_uid, recorder_uid): + """ Delete a recorder.""" + return runtime.remove_recorder(nodenet_uid, recorder_uid) + + +@rpc("clear_recorder") +def clear_recorder(nodenet_uid, recorder_uid): + """ Clear the recorder's history """ + return runtime.clear_recorder(nodenet_uid, recorder_uid) + + +@rpc("get_recorders") +def get_recorders(nodenet_uid): + """ Return a dict of recorders""" + return runtime.get_recorder_data(nodenet_uid) + # --------- logging -------- +@rpc("get_logging_levels") +def get_logging_levels(): + """ Set the logging levels """ + return True, runtime.get_logging_levels() + + @rpc("set_logging_levels") def set_logging_levels(logging_levels): + """ Set the logging levels """ runtime.set_logging_levels(logging_levels) return True @rpc("get_logger_messages") def get_logger_messages(logger=[], after=0): + """ Get Logger messages for the given loggers, after the given timestamp """ return True, runtime.get_logger_messages(logger, after) @rpc("get_monitoring_info") -def get_monitoring_info(nodenet_uid, logger=[], after=0, monitor_from=0, monitor_count=-1): - data = runtime.get_monitoring_info(nodenet_uid, logger, after, monitor_from, monitor_count) +def get_monitoring_info(nodenet_uid, logger=[], after=0, monitor_from=0, monitor_count=-1, with_recorders=False): + """ Return monitor, logger, recorder data """ + data = runtime.get_monitoring_info(nodenet_uid, logger, after, monitor_from, monitor_count, with_recorders=with_recorders) return True, data +# --------- benchmark info -------- + +@rpc("benchmark_info") +def benchmark_info(): + """ Time some math operations to determine the speed of the underlying machine. """ + return True, runtime.benchmark_info() + + # --- user scripts --- @rpc("run_recipe") def run_recipe(nodenet_uid, name, parameters): + """ Run the recipe with the given name """ return runtime.run_recipe(nodenet_uid, name, parameters) @rpc('get_available_recipes') def get_available_recipes(): + """ Return a dict of available recipes """ return True, runtime.get_available_recipes() @rpc("run_operation") def run_operation(nodenet_uid, name, parameters, selection_uids): + """ Run an operation on the given selection of nodes """ return runtime.run_operation(nodenet_uid, name, parameters, selection_uids) @rpc('get_available_operations') def get_available_operations(): + """ Return a dict of available operations """ return True, runtime.get_available_operations() @rpc('get_agent_dashboard') def get_agent_dashboard(nodenet_uid): + """ Return a dict of data to display the agent's state in a dashboard """ return True, runtime.get_agent_dashboard(nodenet_uid) @rpc("run_netapi_command", permission_required="manage nodenets") def run_netapi_command(nodenet_uid, command): + """ Run a netapi command from the netapi console """ if INCLUDE_CONSOLE: return runtime.run_netapi_command(nodenet_uid, command) else: @@ -1277,18 +1572,45 @@ def run_netapi_command(nodenet_uid, command): @rpc("get_netapi_signatures") def get_netapi_autocomplete_data(nodenet_uid, name=None): + """ Return autocomplete-options for the netapi console. """ return True, runtime.get_netapi_autocomplete_data(nodenet_uid, name=None) +@rpc("flow") +def flow(nodenet_uid, source_uid, source_output, target_uid, target_input): + """ Create a connection between two flow_modules """ + return runtime.flow(nodenet_uid, source_uid, source_output, target_uid, target_input) + + +@rpc("unflow") +def unflow(nodenet_uid, source_uid, source_output, target_uid, target_input): + """ Remove the connection between the given flow_modules """ + return runtime.unflow(nodenet_uid, source_uid, source_output, target_uid, target_input) + + +@rpc("runtime_info") +def runtime_info(): + """ Return a dict of information about this runtime, like version and configuration""" + return True, runtime.runtime_info() + # ----------------------------------------------------------------------------------------------- + def main(host=None, port=None): host = host or cfg['micropsi2']['host'] port = port or cfg['micropsi2']['port'] - server = cfg['micropsi2']['server'] print("Starting App on Port " + str(port)) runtime.initialize() - run(micropsi_app, host=host, port=port, quiet=True, server=server) + try: + from cherrypy import wsgiserver + server = 'cherrypy' + kwargs = {'numthreads': 30} + except ImportError: + server = 'wsgiref' + kwargs = {} + + run(micropsi_app, host=host, port=port, quiet=False, server=server, **kwargs) + if __name__ == "__main__": parser = argparse.ArgumentParser(description="Start the %s server." % APPTITLE) diff --git a/micropsi_server/minidoc.py b/micropsi_server/minidoc.py index 601d5c6d..d6ed3e15 100644 --- a/micropsi_server/minidoc.py +++ b/micropsi_server/minidoc.py @@ -19,9 +19,29 @@ PROJECT_ROOT = os.path.join(os.path.dirname(__file__), "..") PREFIX = "minidoc/" FILETYPES = [".py"] -EXCLUDED_DIRS = ["..", "test", "tests", "__pycache__"] +EXCLUDED_DIRS = ["..", "test", "tests", "__pycache__", "bin", "lib", "include", "htmlcov", "cherrypy", "src", "share"] EXCLUDED_FILES = ["__init__.py"] EXCLUDE_HIDDEN = True +API_SORT = ["json_rpc_api", "netapi", "theano_netapi", "node_api"] +API_FILES = { + "netapi": { + "name": "NetAPI", + "file": "micropsi_core/nodenet/netapi.py", + }, + "theano_netapi": { + "name": "Theano NetAPI", + "file": "micropsi_core/nodenet/theano_engine/theano_netapi.py", + }, + "node_api": { + "name": "Node API", + "file": "micropsi_core/nodenet/node.py", + }, + "json_rpc_api": { + "name": "JSON RPC API", + "file": "micropsi_server/micropsi_app.py" + }, +} + ERROR = """

No documentation found at the given path

""" @@ -48,6 +68,14 @@ def get_navigation(): else: return ERROR + +def get_api_navigation(): + result = "" + for key in API_SORT: + result += """%s
""" % (key, API_FILES[key]['name']) + return result + + def get_documentation_body(path=""): """ Create documentation as simple HTML, using the supplied path, which is interpreted as the project root. @@ -89,6 +117,22 @@ def document(filepath): return ERROR +def get_api_doc(key=None): + """ + Create documentation of selected API files + Methods without docstrings will be omitted. + """ + if key is None: + return "" + + elif key in API_FILES: + file = API_FILES[key]['file'] + realpath = os.path.join(os.path.dirname(__file__), '..', file) + return _get_file_content(realpath, ignore_undocumented=True) + + return ERROR + + def _get_dir_content(realpath): """Helper function to turn a directory into HTML""" @@ -114,6 +158,14 @@ def _get_dir_list(realpath, indent = " "*4): result = "" for pathname, dirnames, filenames in os.walk(realpath): + # prune recurse directories + for x in EXCLUDED_DIRS: + if x in dirnames: + dirnames.remove(x) + if EXCLUDE_HIDDEN: + for x in dirnames.copy(): + if x.startswith('.'): + dirnames.remove(x) dir = os.path.basename(pathname) if dir not in EXCLUDED_DIRS and not (EXCLUDE_HIDDEN and dir.startswith(".")): url = _convert_path_to_url(pathname) @@ -128,7 +180,7 @@ def _get_dir_list(realpath, indent = " "*4): result += '%s%s
\n' % (indent * url.count("/"), PREFIX, url, filename) return result -def _get_file_content(realpath): +def _get_file_content(realpath, ignore_undocumented=False): """Helper function to turn a file into HTML""" result = """

Module: %s

@@ -151,9 +203,10 @@ def _get_file_content(realpath): parsed_code = visitor.get_doc() entries = [ parsed_code[key] for key in sorted(parsed_code.keys())] for entry in entries: - begin, end = entry.get("lines") - result += '
' + "".join(code[begin:end]).rstrip().rstrip(":") +"
" - result += _convert_str_to_html(entry.get("description"))+"
" + if ignore_undocumented is False or entry.get("description"): + begin, end = entry.get("lines") + result += '
' + "".join(code[begin:end]).rstrip().rstrip(":") +"
" + result += _convert_str_to_html(entry.get("description"))+"
" return result diff --git a/micropsi_server/static/css/micropsi-styles.css b/micropsi_server/static/css/micropsi-styles.css index e2b5c4c0..0fdc23c8 100644 --- a/micropsi_server/static/css/micropsi-styles.css +++ b/micropsi_server/static/css/micropsi-styles.css @@ -22,6 +22,7 @@ h3 { .monitor_list input {display:inline;} .monitor_list label {display:inline;} .monitor_list li {padding: 2px 0;} +#monitor input[type="checkbox"] {margin: 0;} #graph, #logs { overflow: auto; @@ -36,10 +37,13 @@ h3 { .log_DEBUG {color: #1700FF;} .log_INFO {color: #4b4b4b;} .log_WARNING{color: #942628;} -.log_ERROR {color: #e12628;} +.log_ERROR, .log_CRITICAL, .log_ERROR:hover, .log_CRITICAL:hover { + background-color: #c22; + color: white; +} -.delete_monitor i { opacity: 0.5; } -.delete_monitor:hover i { opacity: 1; } +.monitor_action i { opacity: 0.5; } +.monitor_action:hover i { opacity: 1; } .logentry{ display:block; @@ -632,6 +636,10 @@ p.clear { font-size: 0.9em; } +table th { + text-align: left; +} + #console_input { border-top: 0px none; border-left: 0px none; diff --git a/micropsi_server/static/img/2001.gif b/micropsi_server/static/img/2001.gif deleted file mode 100644 index 8ed30d6a..00000000 Binary files a/micropsi_server/static/img/2001.gif and /dev/null differ diff --git a/micropsi_server/static/img/brainstorm.gif b/micropsi_server/static/img/brainstorm.gif deleted file mode 100644 index 28882b3b..00000000 Binary files a/micropsi_server/static/img/brainstorm.gif and /dev/null differ diff --git a/micropsi_server/static/img/brazil.gif b/micropsi_server/static/img/brazil.gif deleted file mode 100644 index 40aecb97..00000000 Binary files a/micropsi_server/static/img/brazil.gif and /dev/null differ diff --git a/micropsi_server/static/img/brian.gif b/micropsi_server/static/img/brian.gif deleted file mode 100644 index 877669a4..00000000 Binary files a/micropsi_server/static/img/brian.gif and /dev/null differ diff --git a/micropsi_server/static/img/fahrenheit451.gif b/micropsi_server/static/img/fahrenheit451.gif deleted file mode 100644 index 81760d85..00000000 Binary files a/micropsi_server/static/img/fahrenheit451.gif and /dev/null differ diff --git a/micropsi_server/static/img/frankenstein.gif b/micropsi_server/static/img/frankenstein.gif deleted file mode 100644 index c14660f1..00000000 Binary files a/micropsi_server/static/img/frankenstein.gif and /dev/null differ diff --git a/micropsi_server/static/img/metropolis.gif b/micropsi_server/static/img/metropolis.gif deleted file mode 100644 index d0d84423..00000000 Binary files a/micropsi_server/static/img/metropolis.gif and /dev/null differ diff --git a/micropsi_server/static/img/strangelove.gif b/micropsi_server/static/img/strangelove.gif deleted file mode 100644 index 503500b7..00000000 Binary files a/micropsi_server/static/img/strangelove.gif and /dev/null differ diff --git a/micropsi_server/static/img/young_frankenstein.gif b/micropsi_server/static/img/young_frankenstein.gif deleted file mode 100644 index 35e4a991..00000000 Binary files a/micropsi_server/static/img/young_frankenstein.gif and /dev/null differ diff --git a/micropsi_server/static/island/Micropsi.png b/micropsi_server/static/island/Micropsi.png deleted file mode 100644 index e774a826..00000000 Binary files a/micropsi_server/static/island/Micropsi.png and /dev/null differ diff --git a/micropsi_server/static/island/background.jpg b/micropsi_server/static/island/background.jpg deleted file mode 100644 index 890229ce..00000000 Binary files a/micropsi_server/static/island/background.jpg and /dev/null differ diff --git a/micropsi_server/static/island/boletus-edulis.png b/micropsi_server/static/island/boletus-edulis.png deleted file mode 100644 index e937b5d7..00000000 Binary files a/micropsi_server/static/island/boletus-edulis.png and /dev/null differ diff --git a/micropsi_server/static/island/boulder.png b/micropsi_server/static/island/boulder.png deleted file mode 100644 index 79c60c6a..00000000 Binary files a/micropsi_server/static/island/boulder.png and /dev/null differ diff --git a/micropsi_server/static/island/braintree.png b/micropsi_server/static/island/braintree.png deleted file mode 100644 index f7371c27..00000000 Binary files a/micropsi_server/static/island/braintree.png and /dev/null differ diff --git a/micropsi_server/static/island/braitenberg.png b/micropsi_server/static/island/braitenberg.png deleted file mode 100644 index c3737adf..00000000 Binary files a/micropsi_server/static/island/braitenberg.png and /dev/null differ diff --git a/micropsi_server/static/island/cubensis.png b/micropsi_server/static/island/cubensis.png deleted file mode 100644 index 6d9115b6..00000000 Binary files a/micropsi_server/static/island/cubensis.png and /dev/null differ diff --git a/micropsi_server/static/island/fly-agaris.png b/micropsi_server/static/island/fly-agaris.png deleted file mode 100644 index 316d0aaf..00000000 Binary files a/micropsi_server/static/island/fly-agaris.png and /dev/null differ diff --git a/micropsi_server/static/island/island.js b/micropsi_server/static/island/island.js deleted file mode 100644 index dd58c498..00000000 --- a/micropsi_server/static/island/island.js +++ /dev/null @@ -1,732 +0,0 @@ - /* - * viewer for the world. - */ - -var canvas = $('#world'); - -var viewProperties = { - frameWidth: 1445, - zoomFactor: 1, - objectWidth: 12, - lineHeight: 15, - objectLabelColor: new Color ("#94c2f5"), - objectForegroundColor: new Color ("#000000"), - fontSize: 10, - symbolSize: 14, - highlightColor: new Color ("#ffffff"), - gateShadowColor: new Color("#888888"), - shadowColor: new Color ("#000000"), - shadowStrokeWidth: 0, - shadowDisplacement: new Point(0.5,1.5), - selectionColor: new Color("#99ccff"), - innerShadowDisplacement: new Point(0.2,0.7), - padding: 3, - typeColors: { - "other": new Color ("#94c2f5") - }, - label: { - x: 10, - y: -10 - } -}; - -available_object_types = []; - -var scale_factors = { - 'Lightsource': 1, - 'Braitenberg': 1, - 'Survivor': 1, - 'PalmTree': 0.5, - 'Maple': 0.7, - 'Braintree': 0.5, - 'Wirselkraut': 0.2, - 'Thornbush': 1, - 'Juniper': 0.4, - 'Champignon': 0.125, - 'FlyAgaric': 0.2, - 'Stone': 0.2, - 'Boulder': 0.6, - 'Menhir': 0.4, - 'Waterhole': 0.4 -} - -objects = {}; -symbols = {}; -agents = {}; - -objectLayer = new Layer(); -objectLayer.name = 'ObjectLayer'; - -currentWorldSimulationStep = -1; - -var world_data = null; - -var worldscope = paper; - -if(typeof currentWorld != 'undefined'){ - setCurrentWorld(currentWorld); -} else { - currentWorld = $.cookie('selected_world') || null; -} - -scenes = {}; - -addObjectMode = null; -addObjectGhost = null; - -var agentsList = $('#world_agents_list table'); - -function get_world_data(){ - return {step: currentWorldSimulationStep}; -} - -function set_world_data(data){ - - worldscope.activate(); - currentWorldSimulationStep = data.current_step; - $('#world_step').val(currentWorldSimulationStep); - $('#world_status').val(data.status_message); - // treat agents and objects the same - data.objects = jQuery.extend(data.objects, data.agents); - for(var key in objects){ - if(!(key in data.objects)){ - if(objects[key].representation){ - objects[key].representation.remove(); - delete objects[key]; - if(key in scenes){ - delete scenes[key]; - } - } - } else { - if(data.objects[key].position && data.objects[key].position.length == 2){ - if(!(path && path.objectMoved && path.name == key)){ - objects[key].x = data.objects[key].position[0]; - objects[key].y = data.objects[key].position[1]; - objects[key].representation.position = new Point(objects[key].x, objects[key].y); - } - if(data.objects[key].orientation){ - objects[key].representation.rotate(data.objects[key].orientation - objects[key].orientation); - } - objects[key].orientation = data.objects[key].orientation; - if(key in scenes){ - scenes[key] = data.objects[key].scene; - } - } else { - console.log('obj has no pos: ' + key); - } - } - delete data.objects[key]; - } - for(key in data.objects){ - if(data.objects[key].position && data.objects[key].position.length == 2){ - if(key in data.agents){ - addAgent(new WorldObject(key, data.objects[key].position[0], data.objects[key].position[1], data.objects[key].orientation, data.objects[key].name, data.objects[key].type)); - agents[key] = objects[key]; - if('scene' in data.agents[key]){ - scenes[data.agents[key].uid] = data.agents[key].scene; - } - } else { - addObject(new WorldObject(key, data.objects[key].position[0], data.objects[key].position[1], data.objects[key].orientation, data.objects[key].name, data.objects[key].type)); - } - } else { - console.log('obj has no pos ' + key); - } - } - // purge agent list - for(key in agents){ - if(!(key in data.agents)){ - $("#world_agents_list a[data='" + key + "']").parent().parent().remove(); - } - } - - updateSceneViewer(); - updateViewSize(); -} - -register_stepping_function('world', get_world_data, set_world_data); - -refreshWorldView = function(){ - api.call('get_world_view', - {world_uid: currentWorld, step: currentWorldSimulationStep}, - success = set_world_data, - error=function(data){ - $.cookie('selected_world', '', {expires:-1, path:'/'}); - dialogs.notification(data.Error, 'error'); - } - ) -}; - -function updateSceneViewer(){ - var selector = $('#scene_viewer_agent'); - var selected = selector.val(); - var selector_html = ''; - for(var key in scenes){ - selector_html += ''; - } - if(selector_html != ''){ - selector_html = '' + selector_html; - $('.scene_viewer_section').addClass('form-default').show(); - } else{ - $('.scene_viewer_section').removeClass('form-default').hide(); - } - selector.html(selector_html); - var keys = Object.keys(scenes); - if(!selected && keys.length == 1){ - selected = keys[0]; - selector.val(selected); - } else { - selector.val(selected); - } - if(selected){ - refreshSceneView(); - } -} - -function refreshSceneView(event){ - var selector = $('#scene_viewer_agent'); - var scene = scenes[selector.val()]; - var viewer = $('#scene_viewer'); - var html = ''; - var grid_factor = {}; - if(scene){ - grid_factor['y'] = scene.shape_grid.length - 1; - grid_factor['x'] = scene.shape_grid[0].length - 1; - for(var row in scene.shape_grid){ - for(var col in scene.shape_grid[row]){ - var classnames = []; - if((scene.fovea_x + (grid_factor.x/2) == col) && - Math.abs(scene.fovea_y - (grid_factor.y/2)) == row){ - classnames.push('active'); - } - if(scene.shape_grid[row][col]){ - for(var prop in scene.shape_grid[row][col]){ - classnames.push(scene.shape_grid[row][col][prop]); - } - html += ''; - } else { - html += ' '; - } - } - html += '
'; - } - } - viewer.html(html); -} - -function setCurrentWorld(uid){ - currentWorld = uid; - $.cookie('selected_world', currentWorld, {expires:7, path:'/'}); - loadWorldInfo(); -} - -function loadWorldInfo(){ - api.call('get_world_properties', { - world_uid: currentWorld - }, success=function(data){ - available_object_types = data.available_worldobjects.sort(); - initializeControls(); - refreshWorldView(); - world_data = data; - worldRunning = data.is_active; - currentWorldSimulationStep = data.current_step; - if('assets' in data){ - var iconhtml = ''; - for(var key in data.assets.icons){ - iconhtml += ' '; - } - $('#world_objects_icons').html(iconhtml); - if(data.assets.x && data.assets.y){ - view.viewSize = new Size(data.assets.x, data.assets.y); - } - canvas.css('background', 'url("/static/'+ data.assets.background + '") no-repeat top left'); - } - }, error=function(data){ - $.cookie('selected_world', '', {expires:-1, path:'/'}); - dialogs.notification(data.Error, 'error'); - }); -} - - -function updateViewSize() { - view.draw(true); -} - - -function WorldObject(uid, x, y, orientation, name, type, parameters){ - this.uid = uid; - this.x = x; - this.y = y; - this.orientation = orientation || 0; - this.name = name || ""; - this.type = type || ""; - this.parameters = parameters; -} - -function addObject(worldobject){ - if(! (worldobject.uid in objects)) { - renderObject(worldobject); - objects[worldobject.uid] = worldobject; - } else { - redrawObject(objects[worldobject.uid]); - } - return worldobject; -} - -function addAgent(worldobject){ - if(! (worldobject.uid in objects)) { - renderObject(worldobject); - objects[worldobject.uid] = worldobject; - } else { - redrawObject(objects[worldobject.uid]); - } - objects[worldobject.uid] = worldobject; - agentsList.html(agentsList.html() + ''+worldobject.name+' ('+worldobject.type+')'); - return worldobject; -} - -function redrawObject(obj){ - if(objects[obj.uid].representation){ - objects[obj.uid].representation.remove(); - } - renderObject(obj); -} - -function renderObject(worldobject){ - if(!(worldobject.type in symbols)){ - var bounds = calculateObjectBounds(worldobject); - var path = createObjectShape(worldobject, bounds); - symbols[worldobject.type] = new Symbol(path); - //objectLayer.addChild(symbols[worldobject.type]); - } - worldobject.representation = symbols[worldobject.type].place(); - if(worldobject.orientation){ - worldobject.representation.rotate(worldobject.orientation); - } - worldobject.representation.position = new Point(worldobject.x, worldobject.y); - worldobject.representation.name = worldobject.uid; - objectLayer.addChild(worldobject.representation); -} - -function createObjectShape(worldobject, bounds){ - var raster = new Raster(getObjectIcon(worldobject)); - if(worldobject.type in scale_factors){ - raster.scale(scale_factors[worldobject.type]); - } - raster.position = new Point(bounds.x + raster.width/2, bounds.y+bounds.height/2); - return raster; -} - -function getObjectIcon(worldobject){ - switch(worldobject.type){ - case "Lightsource": - case "Braitenberg": - case "Survivor": - case "PalmTree": - case "Maple": - case "Braintree": - case "Wirselkraut": - case "Thornbush": - case "Juniper": - case "Champignon": - case "FlyAgaric": - case "Stone": - case "Boulder": - case "Menhir": - case "Waterhole": - return 'icon_'+worldobject.type; - default: - if(worldobject.uid && worldobject.uid in agents){ - return 'icon_default_agent'; - } else { - return 'icon_default_object'; - } - } -} - -function calculateObjectBounds(worldobject){ - var size = viewProperties.objectWidth * viewProperties.zoomFactor; - return { - x: worldobject.x*viewProperties.zoomFactor - size/2, - y: worldobject.y*viewProperties.zoomFactor - size/2, - width: size, - height: size - }; -} - -function getLegend(worldobject){ - var legend = new Group(); - legend.name = 'objectLegend'; - var bounds = worldobject.representation.bounds; - var height = (viewProperties.fontSize*viewProperties.zoomFactor + 2*viewProperties.padding); - var point = new Point( - bounds.x + (viewProperties.label.x * viewProperties.zoomFactor), - Math.max(height, bounds.y + (viewProperties.label.y * viewProperties.zoomFactor))); - var text = new PointText(point); - text.justification = 'left'; - var content = ''; - if(worldobject.uid in agents){ - content = (worldobject.name) ? worldobject.name : worldobject.uid; - } else { - content = worldobject.type; - } - content += ' ('+parseInt(worldobject.x)+'/'+parseInt(worldobject.y)+')'; - text.content = content; - text.characterStyle = { - fillColor: 'black', - fontSize: viewProperties.fontSize*viewProperties.zoomFactor - }; - if(point.x + text.bounds.width + 2*viewProperties.padding > view.viewSize.width){ - point = new Point( - view.viewSize.width - (text.bounds.width + 3*viewProperties.padding), - point.y); - text.point = point; - } - var container = new Path.Rectangle(new Point(point.x - viewProperties.padding, point.y + viewProperties.padding), new Size(text.bounds.width + 2*viewProperties.padding, -height)); - container.fillColor = 'white'; - legend.addChild(container); - legend.addChild(text); - return legend; -} - -// -------------------------- mouse/ key listener --------------------------------------------// - -hoverUid = false; -label = false; - -movePath = false; -path = null; - -clickLabel = false; -clickHighlight = false; -clickPosition = null; - -selected = null; - -$('body').mousedown(function(event){ - if(addObjectMode && event.target != canvas[0] && event.target != $('#set_worldobject_sprinkle_mode')[0]){ - unsetAddObjectMode(); - } -}); - -function setAddObjectMode(objecttype){ - addObjectMode = objecttype; - addObjectGhost = new Raster(getObjectIcon({type:addObjectMode})); - addObjectGhost.scale(scale_factors[addObjectMode] / 2); - addObject.position = new Point(-100, -100); - objectLayer.addChild(addObjectGhost); - $('#set_worldobject_sprinkle_mode').text("Done").addClass('active'); -} - -function unsetAddObjectMode(){ - addObjectMode = null; - addObjectGhost.remove(); - addObjectGhost = null; - $('#set_worldobject_sprinkle_mode').text("add objects").removeClass('active').blur(); -} - -function onKeyDown(event) { - if(!addObjectMode){ - if (event.key == "backspace" || event.key == "delete") { - if (event.event.target.tagName == "BODY") { - event.preventDefault(); // browser-back - if(selected){ - if(!(selected.uid in agents)){ - deleteWorldObject(selected); - unselectObject(); - selected = null; - } - } - } - } - } else if(event.key == 'escape'){ - unsetAddObjectMode(); - } -} - -function onMouseDown(event){ - clickPosition = null; - showDefaultForm(); - var p = event.point; - if(addObjectMode){ - if(event.event.button == 2){ - unsetAddObjectMode(); - return; - } else { - createWorldObject(addObjectMode, p); - return; - } - } - var hit = false; - for (var uid in objects) { - if(objects[uid].representation && objects[uid].representation.hitTest(p)){ - selected = objects[uid]; - selectObject(objects[uid]); - hit = true; - break; - } - } - if(!hit){ - unselectObject(); - } -} - -function onMouseMove(event) { - var p = event.point; - if(event.event.target == canvas[0]){ - $('#world_status').val('Pos: ' + p.x + ' / ' + p.y); - } else { - $('#world_status').val('Pos: '); - } - - if(addObjectMode && addObjectGhost){ - addObjectGhost.position = p; - } - - // hovering - if (hoverUid) { // unhover - hoverUid = null; - } - // first, check for nodes - // we iterate over all bounding boxes, but should improve speed by maintaining an index - for (var uid in objects) { - if(objects[uid].representation){ - var bounds = objects[uid].representation.bounds; - if (bounds.contains(p)) { - if (hoverUid != uid){ - hoverUid = uid; - if (label){ - label.remove(); - } - if(clickHighlight){ - removeClickHighlight(); - } - highlightObject(hoverUid); - } - path = objectLayer.children[uid]; - movePath = true; - return; - } - } - } - if (!hoverUid && label){ - label.remove(); - label = null; - movePath = null; - } -} - -function onMouseDrag(event) { - var p = event.point; - if(event.event.target == canvas[0]){ - $('#world_status').val('Pos: ' + p.x + ' / ' + p.y); - } else { - $('#world_status').val('Pos: '); - } - if (movePath) { - path.objectMoved = true; - path.position += event.delta; - var obj = objects[path.name]; - obj.x += event.delta.x/viewProperties.zoomFactor; - obj.y += event.delta.y/viewProperties.zoomFactor; - obj.bounds = calculateObjectBounds(obj); - if(label){ - var height = (viewProperties.fontSize*viewProperties.zoomFactor + 2*viewProperties.padding); - label.position = new Point( - obj.bounds.x + (viewProperties.label.x * viewProperties.zoomFactor), - Math.max(height, obj.bounds.y + (viewProperties.label.y * viewProperties.zoomFactor))); - } - if(selectionBorder){ - selectionBorder.position += event.delta; - } - } -} - -function onMouseUp(event) { - var p = event.point; - if (movePath) { - if(path.objectMoved && objects[path.name]){ - // update position on server - path.objectMoved = false; - setObjectProperties(objects[path.name], objects[path.name].x, objects[path.name].y); - movePath = false; - updateViewSize(); - } - } -} - -selectionBorder = null; -function unselectObject(){ - if(selectionBorder){ - selectionBorder.remove(); - } -} -function selectObject(worldobject){ - if(selectionBorder){ - unselectObject(); - } - var bounds = worldobject.representation.bounds; - selectionBorder = new Path.Rectangle(worldobject.x - (bounds.width / 2), worldobject.y - (bounds.height /2), bounds.width , bounds.height ); - selectionBorder.strokeWidth = 1; - selectionBorder.name = 'selectionBorder'; - selectionBorder.strokeColor = viewProperties.selectionColor; - if(worldobject.orientation){ - selectionBorder.rotate(worldobject.orientation); - } - objectLayer.addChild(selectionBorder); - -} - -function highlightObject(uid){ - label = getLegend(objects[uid]); - objectLayer.addChild(label); - view.draw(true); -} - -function highlightAgent(uid){ - label = getLegend(agents[uid]); - objectLayer.addChild(label); - view.draw(true); -} - -function removeClickHighlight(){ - if(clickHighlight) { - objects[clickHighlight].representation.scale(1/viewProperties.hoverScale); - clickHighlight = null; - } - if(label){ - label.remove(); - label = null; - } -} - -function objectInViewport(obj) { - var parent = canvas.parent(); - var bounds = obj.representation.bounds; - return ( - bounds.y > parent.scrollTop() && - bounds.x > parent.scrollLeft() && - (bounds.y + bounds.height) < (parent.innerHeight() + parent.scrollTop() - 20) && - (bounds.x + bounds.width) < (parent.innerWidth() + parent.scrollLeft() - 20) - ); -} - -function scrollToObject(obj){ - var parent = canvas.parent(); - var bounds = obj.representation.bounds; - if(bounds.y <= parent.scrollTop()) parent.scrollTop(bounds.y - 50); - else if(bounds.y + bounds.height >= (parent.innerHeight() + parent.scrollTop())) parent.scrollTop(bounds.y - parent.innerHeight() + bounds.height + 50); - if(bounds.x <= parent.scrollLeft()) parent.scrollLeft(bounds.x - 50); - else if (bounds.x + bounds.width >= (parent.innerWidth() + parent.scrollLeft())) parent.scrollLeft(bounds.x - parent.innerWidth() + bounds.width + 50); -} - - -// --------------------------- controls -------------------------------------------------------- // - -function initializeControls(){ - $('.editor_field form .controls button[type="reset"]').on('click', showDefaultForm); - - $('#available_worldobjects').html(''); - agentsList.on('click', function(event){ - event.preventDefault(); - var target = $(event.target); - if(target.attr('class') == 'world_agent' && target.attr('data')){ - highlightAgent(target.attr('data')); - scrollToObject(agents[target.attr('data')]); - } - }); - $('#scene_viewer_agent').on('change', refreshSceneView); - - $('#set_worldobject_sprinkle_mode').on('click', function(event){ - event.preventDefault(); - if(addObjectMode){ - unsetAddObjectMode(); - } else { - setAddObjectMode($('#available_worldobjects').val()); - } - }); -} - - - -// ------------------------ side bar form stuff --------------------------------------------- // - -function showDefaultForm(){ - $('#world_forms .form-horizontal').hide(); - $('#world_forms .form-default').show(); -} - -function showObjectForm(worldobject){ - if(worldobject && worldobject.uid in agents){ - return false; - } - if(!worldobject) worldobject = {}; - $('#world_forms .form-horizontal').hide(); - $('#wo_uid_input').val(worldobject.uid); - $('#wo_name_input').val(worldobject.name); - var param_table = $('#wo_parameter_list'); - var param_html = ''; - for(var key in worldobject.parameters){ - param_html += ""; - } - param_table.html(param_html); - $('#edit_worldobject').show(); -} - - -// ------------------------ API Communication --------------------------------------------------- // - -function createWorldObject(type, pos){ - api.call('add_worldobject', {world_uid: currentWorld, type: type, position: [pos.x, pos.y]}, function(result){ - addObject(new WorldObject(result, pos.x, pos.y, 0, '', type, {})); - updateViewSize(); - }); -} - -function deleteWorldObject(worldobject){ - objects[worldobject.uid].representation.remove(); - delete objects[worldobject.uid]; - api.call('delete_worldobject', {'world_uid': currentWorld, 'object_uid': worldobject.uid}, function(){ - dialogs.notification("worldobject deleted"); - }); -} - -function setObjectProperties(worldobject, x, y, name, orientation, parameters){ - if(worldobject.uid in agents){ - return setAgentProperties(worldobject, x, y, name, orientation, parameters); - } - if(x) worldobject.x = x; - if(y) worldobject.y = y; - if(name) worldobject.name = name; - if(orientation) worldobject.orientation = orientation; - if(parameters) worldobject.parameters = parameters; - data = { - world_uid: currentWorld, - uid: worldobject.uid, - position: [worldobject.x, worldobject.y], - name: worldobject.name, - orientation: worldobject.orientation, - parameters: worldobject.parameters || {} - }; - api.call('set_worldobject_properties', data, function(result){ - redrawObject(worldobject); - }, api.defaultErrorCallback); -} - -function setAgentProperties(worldobject, x, y, name, orientation, parameters){ - if(x) worldobject.x = x; - if(y) worldobject.y = y; - if(name) worldobject.name = name; - if(orientation) worldobject.orientation = orientation; - if(parameters) worldobject.parameters = parameters; - data = { - world_uid: currentWorld, - uid: worldobject.uid, - position: [worldobject.x, worldobject.y], - name: worldobject.name, - orientation: worldobject.orientation, - parameters: worldobject.parameters || {} - }; - api.call('set_worldagent_properties', data, function(result){ - redrawObject(worldobject); - }, api.defaultErrorCallback); -} diff --git a/micropsi_server/static/island/island.tpl b/micropsi_server/static/island/island.tpl deleted file mode 100644 index bb19a333..00000000 --- a/micropsi_server/static/island/island.tpl +++ /dev/null @@ -1,33 +0,0 @@ -
-
- -
- -
-
-

World Status

- -
-
-

Scene Viewer

-

- - -

-
-
-
-

Agents

-
-
-
-

World Objects

-
- - -
-
-
-
-
\ No newline at end of file diff --git a/micropsi_server/static/island/juniper-berries.png b/micropsi_server/static/island/juniper-berries.png deleted file mode 100644 index 32bfbde9..00000000 Binary files a/micropsi_server/static/island/juniper-berries.png and /dev/null differ diff --git a/micropsi_server/static/island/lamp.png b/micropsi_server/static/island/lamp.png deleted file mode 100644 index 39b4afc7..00000000 Binary files a/micropsi_server/static/island/lamp.png and /dev/null differ diff --git a/micropsi_server/static/island/maple.png b/micropsi_server/static/island/maple.png deleted file mode 100644 index 0cb11e9c..00000000 Binary files a/micropsi_server/static/island/maple.png and /dev/null differ diff --git a/micropsi_server/static/island/menhir.png b/micropsi_server/static/island/menhir.png deleted file mode 100644 index 84631ea4..00000000 Binary files a/micropsi_server/static/island/menhir.png and /dev/null differ diff --git a/micropsi_server/static/island/palm-tree.png b/micropsi_server/static/island/palm-tree.png deleted file mode 100644 index a1f261ff..00000000 Binary files a/micropsi_server/static/island/palm-tree.png and /dev/null differ diff --git a/micropsi_server/static/island/psi_1.png b/micropsi_server/static/island/psi_1.png deleted file mode 100644 index d5b5be56..00000000 Binary files a/micropsi_server/static/island/psi_1.png and /dev/null differ diff --git a/micropsi_server/static/island/rock.png b/micropsi_server/static/island/rock.png deleted file mode 100644 index db15b930..00000000 Binary files a/micropsi_server/static/island/rock.png and /dev/null differ diff --git a/micropsi_server/static/island/tree_small.png b/micropsi_server/static/island/tree_small.png deleted file mode 100644 index acaa865d..00000000 Binary files a/micropsi_server/static/island/tree_small.png and /dev/null differ diff --git a/micropsi_server/static/island/unknownbox.png b/micropsi_server/static/island/unknownbox.png deleted file mode 100644 index 9bb9263d..00000000 Binary files a/micropsi_server/static/island/unknownbox.png and /dev/null differ diff --git a/micropsi_server/static/island/well.png b/micropsi_server/static/island/well.png deleted file mode 100644 index 2d2d2d20..00000000 Binary files a/micropsi_server/static/island/well.png and /dev/null differ diff --git a/micropsi_server/static/island/wirselkraut.png b/micropsi_server/static/island/wirselkraut.png deleted file mode 100644 index 89cd5c24..00000000 Binary files a/micropsi_server/static/island/wirselkraut.png and /dev/null differ diff --git a/micropsi_server/static/js/dialogs.js b/micropsi_server/static/js/dialogs.js index 576951f0..b82b83a5 100644 --- a/micropsi_server/static/js/dialogs.js +++ b/micropsi_server/static/js/dialogs.js @@ -197,7 +197,7 @@ var api = { } msg += ''; } else { - msg = data + msg = data.data } } if(!msg){ @@ -247,19 +247,19 @@ $(function() { event.preventDefault(); dialogs.remote_form_dialog($(event.target).attr('href'), function(data){ // refreshNodenetList(); -- TODO: does not work yet (due to paperscript missing proper js integration) - dialogs.notification('Nodenet created. ID: ' + data.nodenet_uid, 'success'); + dialogs.notification('Agent created. ID: ' + data.nodenet_uid, 'success'); $.cookie('selected_nodenet', data.nodenet_uid+"/", { expires: 7, path: '/' }); window.location.reload(); }); }); $('.navbar a.nodenet_delete').on('click', function(){ - dialogs.confirm("Do you really want to delete this nodenet?", function(){ + dialogs.confirm("Do you really want to delete this agent?", function(){ api.call('delete_nodenet', {nodenet_uid: currentNodenet}, function(data){ currentNodenet=null; // refreshNodenetList(); -- TODO: does not work yet (due to paperscript missing proper js integration) $.cookie('selected_nodenet', "", { expires: 7, path: '/' }); - dialogs.notification('Nodenet deleted'); + dialogs.notification('Agent deleted'); window.location.reload(); }); }); @@ -280,24 +280,22 @@ $(function() { event.preventDefault(); $('#loading').show(); api.call('revert_nodenet', {nodenet_uid: currentNodenet}, function(data){ - dialogs.notification("nodenet reverted"); + dialogs.notification("agent reverted"); //setCurrentNodenet(nodenet_uid); -- TODO: does not work yet (due to paperscript missing proper js integration) window.location.reload(); }); }); - $('.navbar a.reload_native_modules').on('click', function(event){ + $('.navbar a.reload_code').on('click', function(event){ event.preventDefault(); if($(event.target).hasClass("reload_revert")){ - api.call('reload_native_modules', {}, function(){ - api.call('revert_nodenet', {nodenet_uid: currentNodenet}, function(){ - window.location.reload(); - }); + api.call('reload_and_revert', {nodenet_uid: currentNodenet}, function(){ + window.location.reload(); }); return } $('#loading').show(); - api.call('reload_native_modules', {}, function(){ + api.call('reload_code', {}, function(){ dialogs.notification("reload successful"); window.location.reload(); }); @@ -320,24 +318,28 @@ $(function() { $('.navbar a.world_new').on('click', function(event){ event.preventDefault(); dialogs.remote_form_dialog($(event.target).attr('href'), function(data){ - dialogs.notification('World created. ID: ' + data.world_uid, 'success'); + dialogs.notification('Environment created. ID: ' + data.world_uid, 'success'); $(document).trigger('new_world_created', data); - var url = '/world_list/' + ($.cookie('selected_world') || ''); + var url = '/environment_list/' + ($.cookie('selected_world') || ''); $.get(url, {}, function(data){ $('#world_list').html(data); }); }); }); - $('.navbar a.world_edit').on('click', remote_form); + + $('.navbar a.world_edit').on('click', function(event){ + event.preventDefault(); + dialogs.remote_form_dialog($(event.target).attr("href") + "?id=" + currentWorld); + }); $('.navbar a.world_delete').on('click', function(event){ event.preventDefault(); - dialogs.confirm("Do you really want to delete this world?", function(){ + dialogs.confirm("Do you really want to delete this environment?", function(){ api.call('delete_world', {world_uid: currentWorld}, function(){ $.cookie('selected_world', '', {expires: -1, path: '/'}); - dialogs.notification("World deleted"); + dialogs.notification("Environment deleted"); window.location.reload(); } ); @@ -353,10 +355,10 @@ $(function() { event.preventDefault(); api.call('revert_world', {world_uid: currentWorld}, function(){ - dialogs.notification("World state reverted"); + dialogs.notification("Environment state reverted"); window.location.reload(); }, function(){ - dialogs.notification('Error reverting world', 'error'); + dialogs.notification('Error reverting environment', 'error'); window.location.reload(); } ); @@ -434,7 +436,6 @@ $(function() { params['gate_type'] = $('#monitor_link_sourcegate_type_input').val(); params['target_node_uid'] = $('#monitor_link_targetnode_uid_input').val(); params['slot_type'] = $('#monitor_link_targetslot_type_input').val(); - params['property'] = 'weight'; break; case 'modulator': func = 'add_modulator_monitor'; @@ -669,6 +670,9 @@ unregister_stepping_function = function(type){ busy = false; fetch_stepping_info = function(){ + if(!currentNodenet){ + return false; + } params = { nodenet_uid: currentNodenet }; @@ -711,6 +715,7 @@ fetch_stepping_info = function(){ } var end = new Date().getTime(); + calculationRunning = data.calculation_running; if(data.calculation_running && !busy){ if(runner_properties.timestep - (end - start) > 0){ window.setTimeout(fetch_stepping_info, runner_properties.timestep - (end - start)); @@ -728,26 +733,29 @@ fetch_stepping_info = function(){ if(data.data == 'No such nodenet'){ currentNodenet = null; $.cookie('selected_nodenet', '', { expires: -1, path: '/' }); + } else { + api.defaultErrorCallback(data, outcome, type); } }); $('#nodenet_user_prompt .btn-primary').on('click', function(event){ event.preventDefault(); var form = $('#nodenet_user_prompt form'); - values = {}; + parameters = {}; var startnet = false; var fields = form.serializeArray(); for(var idx in fields){ if(fields[idx].name == 'run_nodenet'){ startnet = true; } else { - values[fields[idx].name] = fields[idx].value; + parameters[fields[idx].name] = fields[idx].value; } } api.call('user_prompt_response', { nodenet_uid: currentNodenet, node_uid: $('#user_prompt_node_uid').val(), - values: values, + key: $('#user_prompt_key').val(), + parameters: parameters, resume_nodenet: startnet }, function(data){ $(document).trigger("runner_started"); @@ -766,9 +774,9 @@ $(document).on('nodenet_changed', function(event, new_uid){ $(document).on('form_submit', function(event, data){ if(data.url == '/config/runner'){ for(var i=0; i < data.values.length; i++){ - switch(data.values[i].name){ - case 'timestep': runner_properties.timestep = parseInt(data.values[i].value); break; - case 'factor': runner_properties.timestep = parseInt(data.values[i].value); break; + if (data.values[i].name == 'timestep'){ + runner_properties.timestep = parseInt(data.values[i].value); + break; } } } @@ -779,7 +787,7 @@ api.call('get_runner_properties', {}, function(data){ }); function refreshNodenetList(){ - $.get("/nodenet_list/"+(currentNodenet || ''), function(html){ + $.get("/agent_list/"+(currentNodenet || ''), function(html){ $.each($('.nodenet_list'), function(idx, item){ $(item).html(html); $('.nodenet_select', item).on('click', function(event){ @@ -823,7 +831,7 @@ function stepNodenet(event){ $(document).trigger('runner_stepped'); }); } else { - dialogs.notification('No nodenet selected', 'error'); + dialogs.notification('No agent selected', 'error'); } } @@ -835,7 +843,7 @@ function startNodenetrunner(event){ $(document).trigger('runner_started'); }); } else { - dialogs.notification('No nodenet selected', 'error'); + dialogs.notification('No agent selected', 'error'); } } function stopNodenetrunner(event){ @@ -860,7 +868,7 @@ function revertAll(event){ } ); } else { - dialogs.notification('No nodenet selected', 'error'); + dialogs.notification('No agent selected', 'error'); } } $(function() { @@ -989,33 +997,38 @@ window.addMonitor = function(type, param, val){ function promptUser(data){ var html = ''; - html += '

Nodenet interrupted by Node ' + (data.node.name || data.node.uid) +' with message:

'; + html += '

Agent interrupted by Node ' + (data.node.name || data.node.uid) +' with message:

'; html += "

" + data.msg +"

"; html += '
'; - if (data.options){ - for(var idx in data.options){ - var item = data.options[idx]; - html += '
'; - if(item.values && typeof item.values == 'object'){ - html += '
'; + for(var val in item.options){ + html += ''; } - html += '
'; - } else if(item.type && item.type == "textarea"){ - html += '
'; + html += ''; } else { - html += '
'; + html += '
'; + } + if (item.description){ + html += '
'+item.description+'
' } + html += ''; } } - html += '
'; - html += '
'; + if (nodenetRunning){ + html += '
'; + html += '
'; + } html += ''; + html += ''; html += '
'; $('#nodenet_user_prompt .modal-body').html(html); $('#nodenet_user_prompt').modal("show"); diff --git a/micropsi_server/static/js/monitor.js b/micropsi_server/static/js/monitor.js index 4700a2d9..fa991f4d 100644 --- a/micropsi_server/static/js/monitor.js +++ b/micropsi_server/static/js/monitor.js @@ -19,6 +19,7 @@ $(function(){ var cookieval = $.cookie('selected_nodenet'); if (cookieval && cookieval.indexOf('/')){ currentNodenet = cookieval.split('/')[0]; + $('form#export_recorders').attr('action', '/recorder/export/' + currentNodenet); } var capturedLoggers = { @@ -44,27 +45,132 @@ $(function(){ var showStepInLog = true; var logs_to_add = []; + var rec_modal = $('#recorder_modal'); + var rec_type_dd = $('#recorder_type_input'); + rec_type_dd.on('change', function(event){ + var type = rec_type_dd.val(); + $('.recorder_specific').hide(); + $('.'+type).show(); + }) + $('.add_recorder').on('click', function(event){ + event.preventDefault(); + api.call('get_nodespace_list', {nodenet_uid: currentNodenet}, function(data){ + var html = ''; + for(uid in data){ + html += ''; + } + $('.recorder_nodespace_dropdown').html(html); + rec_type_dd.trigger('change'); + rec_modal.modal('show'); + }); + }); + $('#export_recorders').on('submit', function(event){ + var something_selected = false; + $('input[type=checkbox]', this).each(function(idx, el){ + if (el.checked){something_selected = true;} + }) + if (!something_selected){ + dialogs.notification("No recorders selected"); + event.preventDefault(); + } + }) + $('.btn-primary', rec_modal).on('click', function(event){ + var params = { + nodenet_uid: currentNodenet, + interval: parseInt($('#recorder_interval').val()), + name: $('#recorder_name').val(), + }; + var type = $('#recorder_type_input').val(); + var method = null; + if(type == 'gate_activation_recorder'){ + method = 'add_gate_activation_recorder'; + params['group_definition'] = { + 'nodespace_uid': $('#recorder_nodespace_uid').val(), + 'gatetype': $('#recorder_gate').val(), + } + var ids = $('#recorder_node_uids').val(); + if(ids){ + ids = ids.split(',') + for(var i in ids){ + ids[i] = ids[i].trim(); + } + params.group_definition['node_uids'] = ids; + } else{ + params.group_definition['node_name_prefix'] = $('#recorder_node_name_prefix').val(); + } + } else if(type == 'node_activation_recorder'){ + method = 'add_node_activation_recorder'; + params['group_definition'] = { + 'nodespace_uid': $('#recorder_nodespace_uid').val(), + } + var ids = $('#recorder_node_uids').val(); + if(ids){ + ids = ids.split(',') + for(var i in ids){ + ids[i] = ids[i].trim(); + } + params.group_definition['node_uids'] = ids; + } else{ + params.group_definition['node_name_prefix'] = $('#recorder_node_name_prefix').val(); + } + } else if(type == 'linkweight_recorder') { + method = "add_linkweight_recorder"; + params['from_group_definition'] = { + 'nodespace_uid': $('#recorder_from_nodespace_uid').val(), + 'gatetype': $('#recorder_from_gate').val(), + } + var ids = $('#recorder_from_node_uids'); + if(ids.val()){ + params.from_group_definition['node_uids'] = ids.split(',') + } else{ + params.from_group_definition['node_name_prefix'] = $('#recorder_from_node_name_prefix').val(); + } + params['to_group_definition'] = { + 'nodespace_uid': $('#recorder_to_nodespace_uid').val(), + 'gatetype': $('#recorder_to_gate').val(), + } + var ids = $('#recorder_to_node_uids'); + if(ids.val()){ + params.to_group_definition['node_uids'] = ids.split(',') + } else{ + params.to_group_definition['node_name_prefix'] = $('#recorder_to_node_name_prefix').val(); + } + } + api.call(method, params, function(){ + rec_modal.modal('hide'); + api.defaultSuccessCallback(); + refreshRecorders(); + }); + }) + init(); if(!$('#nodenet_editor').length && currentNodenet){ refreshMonitors(); } + var splitviewclass = 'span6'; + if(theano_available){ + splitviewclass = 'span4' + } + + var count_sections = $('.layout_field').length; $('.layoutbtn').on('click', function(event){ event.preventDefault(); var target = $(event.target); if(!target.hasClass('active')){ var layout = target.attr('data'); if(layout == 'vertical'){ - $('.layout_field').addClass('span6'); + $('.layout_field').addClass(splitviewclass); } else if(layout == 'horizontal'){ - $('.layout_field').removeClass('span6'); + $('.layout_field').removeClass(splitviewclass); } refreshMonitors(); $('.layoutbtn').removeClass('active'); target.addClass('active'); } }) + $('.layoutbtn[data="vertical"]').trigger('click'); $('#monitor_x_axis').on('change', function(){ viewProperties.xvalues = parseInt($('#monitor_x_axis').val()); @@ -88,6 +194,7 @@ $(function(){ currentNodenet = newNodenet; init(); refreshMonitors(); + $('form#export_recorders').attr('action', '/recorder/export/' + currentNodenet); }); $(document).on('nodenet_loaded', function(data, newNodenet){ currentNodenet = newNodenet; @@ -111,6 +218,7 @@ $(function(){ } }); + var monitor_list_items = []; function init() { bindEvents(); @@ -130,7 +238,8 @@ $(function(){ var params = { logger: poll, after: last_logger_call, - monitor_count: viewProperties.xvalues + monitor_count: viewProperties.xvalues, + with_recorders: true } if(fixed_position){ params['monitor_from'] = Math.max(fixed_position - (viewProperties.xvalues / 2), 1); @@ -138,13 +247,77 @@ $(function(){ return params; } + function setRecorderData(data){ + var table = $('#recorder_table'); + var html = ''; + for(var uid in data){ + var rec = data[uid]; + html += ' '+rec.name +''; + html += ' '; + html += ' '; + html += ' '; + html += ' '; + html += '' + html += ' Type:'+rec.classname+''; + html += ' Entries:'+(rec.current_index + 1)+''; + html += ' Interval:'+rec.interval+''; + if(rec.group_config){ + html += ' Group:'+rec.group_config.group_name+''; + } + if(rec.from_group_config){ + html += ' Groups:From: '+rec.from_group_config.group_name+'
To: '+rec.to_group_config.group_name+''; + } + html += ' '; + + html += ''; + } + table.html(html); + $('button', table).on('click', recorderAction); + } + + function recorderAction(event){ + event.preventDefault(); + var btn = $(event.target); + var uid = btn.attr("data-uid"); + var method_name = null; + switch(btn.attr('data-action')){ + case 'export': + return window.location.replace('/recorder/export/'+currentNodenet+'-'+uid); + case 'clear': + method_name = 'clear_recorder'; break; + case 'delete': + method_name = 'remove_recorder'; break; + } + if(method_name){ + api.call(method_name, {nodenet_uid: currentNodenet, recorder_uid: uid}, function(data){ + api.defaultSuccessCallback(data); + refreshRecorders(); + }); + } + } + function setData(data){ currentSimulationStep = data.current_step; setMonitorData(data); setLoggingData(data); + if (data.recorders){ + setRecorderData(data.recorders); + } + } + + if($('#monitor').height() > 0){ + register_stepping_function('monitors', getPollParams, setData); } + $('#monitor').on('shown', function(){ + register_stepping_function('monitors', getPollParams, setData); + if(!calculationRunning){ + $(document).trigger('runner_stepped'); + } + }); + $('#monitor').on('hidden', function(){ + unregister_stepping_function('monitors'); + }); - register_stepping_function('monitors', getPollParams, setData); function refreshMonitors(newNodenet){ params = getPollParams(); @@ -154,10 +327,16 @@ $(function(){ } } + function refreshRecorders(){ + api.call('get_recorders', {'nodenet_uid': currentNodenet}, setRecorderData); + } + function setMonitorData(data){ - updateMonitorList(data.monitors); - nodenetMonitors = data.monitors; - drawGraph(nodenetMonitors); + if(data.monitors){ + updateMonitorList(data.monitors); + nodenetMonitors = data.monitors; + drawGraph(nodenetMonitors); + } } function setLoggingData(data){ @@ -247,13 +426,22 @@ $(function(){ var html = ''; var sorted = Object.values(monitors); sorted.sort(sortByName); + var keys = Object.keys(monitors); + var changed = $(keys).not(monitor_list_items).length != 0 || $(monitor_list_items).not(keys).length != 0; + if(!changed){ + return; + } + monitor_list_items = []; + var els = $('.monitor'); for(var i = 0; i < sorted.length; i++){ var mon = sorted[i]; html += '
  • -1){ html += ' checked="checked"'; } - html += ' />
  • '; + html += ' /> '; + html += ' '; + monitor_list_items.push(mon.uid); } list.html(html); $('.monitor_checkbox', list).on('change', updateMonitorSelection); @@ -329,6 +517,12 @@ $(function(){ y2max = Math.max(y2max, monitors[uid].values[step]); y2min = Math.min(y2min, monitors[uid].values[step]); } + } else if(monitors[uid].classname == 'GroupMonitor'){ + y1values.concat(monitors[uid].values[step]); + if (step >= xstart && step <= xmax) { + y1max = Math.max(y1max, Math.max.apply(Math, monitors[uid].values[step])); + y1min = Math.min(y1min, Math.min.apply(Math, monitors[uid].values[step])); + } } else { y1values.push(monitors[uid].values[step]); if (step >= xstart && step <= xmax) { @@ -438,6 +632,39 @@ $(function(){ return ((position && d[0] == position) ? 4 : 2); }); + } else if(monitors[uid].classname == 'GroupMonitor'){ + for(var i = 0; i < monitors[uid].values[step].length; i++){ + var line = d3.svg.line() + .x(function(d) { + return x(d[0]); + }) + .y(function(d) { + return y1(d[1]); + }) + .defined(function(d){ + return d[1] == 0 || Boolean(d[1]) + }); + for (var step in monitors[uid].values) { + step = parseInt(step, 10); + if(step >= xstart && step <= xmax){ + if(monitors[uid].values[step]){ + data.push([step, parseFloat(monitors[uid].values[step][i])]); + } else { + data.push([step, null]); + } + } + } + var points = svg.selectAll(".point") + .data(data) + .enter().append("svg:circle") + .filter(function(d, i){ return d[1] == 0 || Boolean(d[1]) }) + .attr("fill", function(d, i) { return monitors[uid].color }) + .attr("cx", function(d, i) { return x(d[0]); }) + .attr("cy", function(d, i) { return y1(d[1]); }) + .attr("r", function(d) { + return ((position && d[0] == position) ? 4 : 2); + }); + } } else { var line = d3.svg.line() .x(function(d) { @@ -476,5 +703,4 @@ $(function(){ .attr("d", line); } } - }); diff --git a/micropsi_server/static/js/netapi_console.js b/micropsi_server/static/js/netapi_console.js index bb161371..532c6613 100644 --- a/micropsi_server/static/js/netapi_console.js +++ b/micropsi_server/static/js/netapi_console.js @@ -353,7 +353,15 @@ $(function(){ function autocomplete_select(event){ if(event && $(event.target).attr('id') == 'console_input'){ - var el = $('a.selected', autocomplete_container) + var el = $('a.selected', autocomplete_container); + if(el.length == 0){ + var els = $('a', autocomplete_container); + if(els.length){ + el = $(els[0]); + } else { + return + } + } } else { if(event){ var el = $(event.target); diff --git a/micropsi_server/static/js/nodenet.js b/micropsi_server/static/js/nodenet.js index 53cd1ee1..c43aa435 100644 --- a/micropsi_server/static/js/nodenet.js +++ b/micropsi_server/static/js/nodenet.js @@ -16,6 +16,7 @@ var viewProperties = { selectionColor: new Color("#0099ff"), hoverColor: new Color("#089AC7"), linkColor: new Color("#000000"), + flowConnectionColor: new Color("#1F3755"), nodeColor: new Color("#c2c2d6"), nodeForegroundColor: new Color ("#000000"), nodeFontColor: new Color ("#000000"), @@ -23,6 +24,7 @@ var viewProperties = { symbolSize: 14, nodeWidth: 84, compactNodeWidth: 32, + flowModuleWidth: 160, cornerWidth: 6, padding: 5, slotWidth: 34, @@ -47,7 +49,8 @@ var viewProperties = { yMax: 13500, xMax: 13500, copyPasteOffset: 50, - snap_to_grid: false + snap_to_grid: false, + load_link_threshold: 1000 }; var nodenetscope = paper; @@ -57,26 +60,19 @@ var nodenet_loaded = false; // hashes from uids to object definitions; we import these via json nodes = {}; links = {}; +flow_connections = {}; selection = {}; monitors = {}; -GATE_DEFAULTS = { - "minimum": -1, - "maximum": 1, - "certainty": 1, - "amplification": 1, - "threshold": -1, - "theta": 0, - "rho": 0, - "spreadsheaves": 0 -} - - +available_gatefunctions = {} gatefunction_icons = { 'sigmoid': 'Σ', + 'elu': 'E', + 'relu': 'R', 'absolute': '|x|', 'one_over_x': '1/x', - 'identity': '' + 'identity': '', + 'threshold': 'T' } gridLayer = new Layer(); @@ -93,7 +89,7 @@ viewProperties.zoomFactor = parseFloat($.cookie('zoom_factor')) || viewPropertie var nodenetcookie = $.cookie('selected_nodenet') || ''; if (nodenetcookie && nodenetcookie.indexOf('/') > 0){ - nodenetcookie = nodenetcookie.split("/"); + nodenetcookie = nodenetcookie.replace('"', '').split("/"); currentNodenet = nodenetcookie[0]; currentNodeSpace = nodenetcookie[1] || null; } else { @@ -112,8 +108,6 @@ nodespace_property_defaults = { currentWorldadapter = null; -var currentSheaf = "default"; - var selectionRectangle = new Rectangle(1,1,1,1); selectionBox = new Path.Rectangle(selectionRectangle); selectionBox.strokeWidth = 0.5; @@ -123,11 +117,14 @@ selectionBox.name = "selectionBox"; nodetypes = {}; native_modules = {}; +flow_modules = {}; native_module_categories = {}; +flow_module_categories = {}; available_gatetypes = []; nodespaces = {}; sorted_nodetypes = []; sorted_native_modules = []; +sorted_flow_modules = []; nodenet_data = null; initializeMenus(); @@ -148,7 +145,7 @@ if(currentNodenet){ } else { splash = new PointText(new Point(50, 50)); splash.characterStyle = { fontSize: 20, fillColor: "#66666" }; - splash.content = 'Create a nodenet by selecting "New..." from the "Nodenet" menu.'; + splash.content = 'Create an agent by selecting "New..." from the "Agent" menu.'; nodeLayer.addChild(splash); toggleButtons(false); } @@ -183,12 +180,17 @@ function toggleButtons(on){ function get_available_worlds(){ api.call('get_available_worlds', {}, success=function(data){ var html = ''; + worlds = []; for(var uid in data){ - html += ''; + worlds.push([uid, data[uid].name]); + } + worlds.sort(function(a, b){return a[1] - b[1]}); + for(var i in worlds){ + html += ''; } - $('#nodenet_world').html(html); + $('#nodenet_world_uid').html(html); if(currentNodenet && nodenet_data){ - $('#nodenet_world').val(nodenet_data.world); + $('#nodenet_world_uid').val(nodenet_data.world); } }); } @@ -207,15 +209,15 @@ function get_available_worldadapters(world_uid, callback){ name = keys[idx]; str += ''; } - $('#nodenet_worldadapter').html(str); + $('#nodenet_worldadapter').html(str).removeAttr('disabled'); if(callback){ - callback(); + callback(data); } }); } else { - $('#nodenet_worldadapter').html(''); + $('#nodenet_worldadapter').html('').attr('disabled', 'disabled'); if(callback){ - callback(); + callback({}); } } } @@ -223,21 +225,23 @@ function get_available_worldadapters(world_uid, callback){ function get_available_gatefunctions(){ api.call('get_available_gatefunctions', {nodenet_uid: currentNodenet}, function(data){ html = ''; - for(var i=0; i < data.length; i++){ - html += ''; + available_gatefunctions = data; + for(var key in available_gatefunctions){ + html += ''; } $('#gate_gatefunction').html(html); }); } function setNodenetValues(data){ - $('#nodenet_world').val(data.world); + $('#nodenet_world_uid').val(data.world); $('#nodenet_uid').val(currentNodenet); - $('#nodenet_name').val(data.name); - $('#nodenet_snap').attr('checked', data.snap_to_grid); + $('#nodenet_nodenet_name').val(data.name); + $('#ui_snap').attr('checked', data.snap_to_grid); if (!jQuery.isEmptyObject(worldadapters)) { var worldadapter_select = $('#nodenet_worldadapter'); worldadapter_select.val(data.worldadapter); + worldadapter_select.trigger("change"); if(worldadapter_select.val() != data.worldadapter){ dialogs.notification("The worldadapter of this nodenet is not compatible to the world. Please choose a worldadapter from the list", 'Error'); } @@ -246,7 +250,7 @@ function setNodenetValues(data){ function buildCategoryTree(item, path, idx){ if (idx < path.length){ - name = path[idx]; + var name = path[idx]; if (!item[name]){ item[name] = {}; } @@ -275,7 +279,7 @@ function setCurrentNodenet(uid, nodespace, changed){ currentNodenet = uid; currentNodeSpace = data.rootnodespace; currentWorldadapter = data.worldadapter; - nodespaceProperties = data.nodespace_ui_properties; + nodespaceProperties = data.nodespace_ui_properties || {}; for(var key in data.nodespaces){ if(!(key in nodespaceProperties)){ nodespaceProperties[key] = {}; @@ -297,6 +301,8 @@ function setCurrentNodenet(uid, nodespace, changed){ linkLayer.removeChildren(); } $(document).trigger('nodenet_loaded', uid); + $('.nodenet_step').text(data.current_step || 0); + $('.world_step').text(data.current_world_step || 0); nodenet_data = data; nodenet_data['snap_to_grid'] = $.cookie('snap_to_grid') || viewProperties.snap_to_grid; @@ -305,21 +311,22 @@ function setCurrentNodenet(uid, nodespace, changed){ $.cookie('selected_nodenet', currentNodenet+"/", { expires: 7, path: '/' }); if(nodenetChanged || jQuery.isEmptyObject(nodetypes)){ - nodetypes = data.nodetypes; - native_modules = data.native_modules; - sorted_nodetypes = Object.keys(nodetypes); - sorted_nodetypes.sort(function(a, b){ + var sortfunc = function(a, b){ if(a < b) return -1; if(a > b) return 1; return 0; - }); + }; + nodetypes = data.nodetypes; + sorted_nodetypes = Object.keys(nodetypes); + sorted_nodetypes.sort(sortfunc); + native_modules = data.native_modules; sorted_native_modules = Object.keys(native_modules); - sorted_native_modules.sort(function(a, b){ - if(a < b) return -1; - if(a > b) return 1; - return 0; - }); + sorted_native_modules.sort(sortfunc); + + flow_modules = data.flow_modules; + sorted_flow_modules = Object.keys(flow_modules); + sorted_flow_modules.sort(sortfunc); categories = []; for(var key in native_modules){ @@ -330,6 +337,15 @@ function setCurrentNodenet(uid, nodespace, changed){ for(var i =0; i < categories.length; i++){ buildCategoryTree(native_module_categories, categories[i], 0); } + flow_categories = []; + for(var key in flow_modules){ + nodetypes[key] = flow_modules[key]; + flow_categories.push(flow_modules[key].category.split('/')); + } + flow_module_categories = {} + for(var i =0; i < flow_categories.length; i++){ + buildCategoryTree(flow_module_categories, flow_categories[i], 0); + } available_gatetypes = []; for(var key in nodetypes){ @@ -398,8 +414,10 @@ function setNodespaceData(data, changed){ removeLink(links[uid]); } var links_data = {} + var flow_connections = {}; for(uid in data.nodes){ - item = new Node(uid, data.nodes[uid]['position'][0], data.nodes[uid]['position'][1], data.nodes[uid].parent_nodespace, data.nodes[uid].name, data.nodes[uid].type, data.nodes[uid].sheaves, data.nodes[uid].state, data.nodes[uid].parameters, data.nodes[uid].gate_activations, data.nodes[uid].gate_parameters, data.nodes[uid].gate_functions); + var node = data.nodes[uid] + item = new Node(uid, node['position'][0], node['position'][1], node.parent_nodespace, node.name, node.type, node.activation, node.state, node.parameters, node.gate_activations, node.gate_configuration, node.is_highdimensional, node.inlinks, node.outlinks, node.inputmap); if(uid in nodes){ if(nodeRedrawNeeded(item)) { nodes[uid].update(item); @@ -410,29 +428,33 @@ function setNodespaceData(data, changed){ } else{ addNode(item); } - for(gate in data.nodes[uid].links){ - for(var i = 0; i < data.nodes[uid].links[gate].length; i++){ - luid = uid + ":" + gate + ":" + data.nodes[uid].links[gate][i]['target_slot_name'] + ":" + data.nodes[uid].links[gate][i]['target_node_uid'] - links_data[luid] = data.nodes[uid].links[gate][i] + for(gate in node.links){ + for(var i = 0; i < node.links[gate].length; i++){ + luid = uid + ":" + gate + ":" + node.links[gate][i]['target_slot_name'] + ":" + node.links[gate][i]['target_node_uid'] + links_data[luid] = node.links[gate][i] links_data[luid].source_node_uid = uid links_data[luid].source_gate_name = gate } } - } - for(uid in data.nodespaces){ - if(!(uid in nodespaces)){ - nodespaces[uid] = data.nodespaces[uid]; - } - item = new Node(uid, data.nodespaces[uid]['position'][0], data.nodespaces[uid]['position'][1], data.nodespaces[uid].parent_nodespace, data.nodespaces[uid].name, "Nodespace", 0, data.nodespaces[uid].state); - if(uid in nodes){ - redrawNode(item); - nodes[uid].update(item); - } else{ - addNode(item); + if(node.inputmap){ + for(var name in node.inputmap){ + var source_uid = node.inputmap[name][0]; + var source_name = node.inputmap[name][1]; + if(source_uid && source_name){ + cid = source_uid + ":" + source_name + ":" + name + ":" + uid; + links_data[cid] = { + 'source_node_uid': source_uid, + 'target_node_uid': uid, + 'source_name': source_name, + 'target_name': name, + 'is_flow_connection': true + }; + } + } } } - if(nodespaceProperties[currentNodeSpace].renderlinks == 'selection'){ + if(nodespaceProperties[currentNodeSpace].renderlinks != 'none'){ loadLinksForSelection(function(data){ for(var uid in links) { if(!(uid in data)) { @@ -440,13 +462,18 @@ function setNodespaceData(data, changed){ } } addLinks(data.links); - }); - } else { + }, false, true); + for(var uid in links) { if(!(uid in links_data)) { removeLink(links[uid]); } } + for(var uid in flow_connections) { + if(!(uid in links_data)) { + removeLink(flow_connections[uid]); + } + } addLinks(links_data); } @@ -487,7 +514,7 @@ function setNodespaceDiffData(data, changed){ links_data = {} for(var uid in data.changes.nodes_dirty){ var nodedata = data.changes.nodes_dirty[uid]; - item = new Node(uid, nodedata['position'][0], nodedata['position'][1], nodedata.parent_nodespace, nodedata.name, nodedata.type, nodedata.sheaves, nodedata.state, nodedata.parameters, nodedata.gate_activations, nodedata.gate_parameters, nodedata.gate_functions); + item = new Node(uid, nodedata['position'][0], nodedata['position'][1], nodedata.parent_nodespace, nodedata.name, nodedata.type, nodedata.activation, nodedata.state, nodedata.parameters, nodedata.gate_activations, nodedata.gate_configuration, nodedata.is_highdimensional, nodedata.inlinks, nodedata.outlinks); if(uid in nodes){ for (var gateName in nodes[uid].gates) { for (linkUid in nodes[uid].gates[gateName].outgoing) { @@ -509,39 +536,45 @@ function setNodespaceDiffData(data, changed){ links_data[luid].source_gate_name = gate } } - } - addLinks(links_data); - for(var uid in data.changes.nodespaces_dirty){ - var nodespacedata = data.changes.nodespaces_dirty[uid]; - if(!(uid in nodespaces)){ - nodespaces[uid] = nodespacedata; - } - item = new Node(uid, nodespacedata['position'][0], nodespacedata['position'][1], nodespacedata.parent_nodespace, nodespacedata.name, "Nodespace", 0, nodespacedata.state); - if(uid in nodes){ - redrawNode(item); - nodes[uid].update(item); - } else{ - addNode(item); + if(nodedata.inputmap){ + for(var name in nodedata.inputmap){ + var source_uid = nodedata.inputmap[name][0]; + var source_name = nodedata.inputmap[name][1]; + if (source_uid && source_name){ + cid = source_uid + ":" + source_name + ":" + name + ":" + uid; + links_data[cid] = { + 'source_node_uid': source_uid, + 'target_node_uid': uid, + 'source_name': source_name, + 'target_name': name, + 'is_flow_connection': true + }; + } + } } } + addLinks(links_data); } // activations: - for(var uid in data.activations){ - if (uid in nodes){ + for(var uid in nodes){ + activations = false + if(uid in data.activations){ activations = data.activations[uid]; - var gen = 0 - for(var i=0; i < nodes[uid].gateIndexes.length; i++){ - var type = nodes[uid].gateIndexes[i]; - nodes[uid].gates[type].sheaves['default'].activation = activations[i]; - if(type == 'gen'){ - gen = activations[i]; - } + } + var gen = 0 + for(var i=0; i < nodes[uid].gateIndexes.length; i++){ + var type = nodes[uid].gateIndexes[i]; + var gateAct = (activations) ? activations[i] : 0; + nodes[uid].gates[type].activation = gateAct; + if(type == 'gen'){ + gen = gateAct; } - nodes[uid].sheaves['default'].activation = gen; - setActivation(nodes[uid]); - redrawNodeLinks(nodes[uid]); } + nodes[uid].activation = gen; + setActivation(nodes[uid]); + redrawNodeLinks(nodes[uid]); } + updateModulators(data.modulators); if(data.monitors){ @@ -563,14 +596,20 @@ function addLinks(link_data){ sourceId = link_data[uid]['source_node_uid']; targetId = link_data[uid]['target_node_uid']; if (sourceId in nodes && targetId in nodes && nodes[sourceId].parent == nodes[targetId].parent){ - link = new Link(uid, sourceId, link_data[uid].source_gate_name, targetId, link_data[uid].target_slot_name, link_data[uid].weight, link_data[uid].certainty); + if(link_data[uid].is_flow_connection){ + link = new Link(uid, sourceId, link_data[uid].source_name, targetId, link_data[uid].target_name, 1, true); + } else { + link = new Link(uid, sourceId, link_data[uid].source_gate_name, targetId, link_data[uid].target_slot_name, link_data[uid].weight); + } if(uid in links){ redrawLink(link); + } else if(uid in flow_connections){ + redrawFlowConnection(link); } else { addLink(link); } } else if(sourceId in nodes || targetId in nodes){ - link = new Link(uid, sourceId, link_data[uid].source_gate_name, targetId, link_data[uid].target_slot_name, link_data[uid].weight, link_data[uid].certainty); + link = new Link(uid, sourceId, link_data[uid].source_gate_name, targetId, link_data[uid].target_slot_name, link_data[uid].weight); if(targetId in nodes && nodes[targetId].linksFromOutside.indexOf(link.uid) < 0) nodes[targetId].linksFromOutside.push(link.uid); if(sourceId in nodes && nodes[sourceId].linksToOutside.indexOf(link.uid) < 0) @@ -587,6 +626,7 @@ function addLinks(link_data){ } } + function get_nodenet_params(){ return { 'nodespaces': [currentNodeSpace], @@ -598,6 +638,7 @@ function get_nodenet_diff_params(){ return { 'nodespaces': [currentNodeSpace], 'step': window.currentSimulationStep, + 'include_links': nodespaceProperties[currentNodeSpace].renderlinks == 'always' } } @@ -607,6 +648,9 @@ if($('#nodenet_editor').height() > 0){ } $('#nodenet_editor').on('shown', function(){ register_stepping_function('nodenet_diff', get_nodenet_diff_params, setNodespaceDiffData); + if(!calculationRunning){ + $(document).trigger('runner_stepped'); + } }); $('#nodenet_editor').on('hidden', function(){ unregister_stepping_function('nodenet_diff'); @@ -690,11 +734,11 @@ function updateModulators(data){ // data structure for net entities -function Node(uid, x, y, nodeSpaceUid, name, type, sheaves, state, parameters, gate_activations, gate_parameters, gatefunctions) { +function Node(uid, x, y, nodeSpaceUid, name, type, activation, state, parameters, gate_activations, gate_configuration, is_highdim, inlinks, outlinks, inputmap) { this.uid = uid; this.x = x; this.y = y; - this.sheaves = sheaves || {"default": {"uid": "default", "name": "default", "activation": 0}}; + this.activation = activation || 0; this.state = state; this.name = name; this.type = type; @@ -706,44 +750,31 @@ function Node(uid, x, y, nodeSpaceUid, name, type, sheaves, state, parameters, g this.placeholder = {}; this.parent = nodeSpaceUid; // parent nodespace, default is root this.fillColor = null; - this.parameters = parameters || {}; + this.parameters = parameters || []; this.bounds = null; // current bounding box (after scaling) this.slotIndexes = []; this.gateIndexes = []; - this.gate_parameters = gate_parameters || {}; + this.gate_configuration = gate_configuration || {}; this.gate_activations = gate_activations || {}; - this.gatefunctions = gatefunctions || {}; - if(type == "Nodespace") { - this.symbol = "NS"; - } else { - this.symbol = nodetypes[type].symbol || type.substr(0,1); - var i; - for(i in nodetypes[type].slottypes){ - this.slots[nodetypes[type].slottypes[i]] = new Slot(nodetypes[type].slottypes[i]); - } - for(i in nodetypes[type].gatetypes){ - var gatetype = nodetypes[type].gatetypes[i] - parameters = {}; - sheaves = this.gate_activations[gatetype]; - if(!sheaves) { - sheaves = {"default":{"uid":"default", "name":"default", "activation": 0}}; - } - parameters = jQuery.extend({}, GATE_DEFAULTS); - if(nodetypes[type].gate_defaults && nodetypes[type].gate_defaults[gatetype]) { - for(var key in nodetypes[type].gate_defaults[gatetype]){ - parameters[key] = nodetypes[type].gate_defaults[gatetype][key]; - } - } - if(this.gate_parameters[gatetype]){ - for(var key in this.gate_parameters[gatetype]){ - parameters[key] = this.gate_parameters[gatetype][key]; - } - } - this.gates[gatetype] = new Gate(gatetype, i, sheaves, parameters, this.gatefunctions[gatetype]); - } - this.slotIndexes = Object.keys(this.slots); - this.gateIndexes = Object.keys(this.gates); + this.is_highdim = is_highdim; + this.inlinks = inlinks || 0; + this.outlinks = outlinks || 0; + this.symbol = nodetypes[type].symbol || type.substr(0,1); + this.is_flow_module = (this.type in flow_modules) + this.inputmap = inputmap; + var i; + for(i in nodetypes[type].slottypes){ + this.slots[nodetypes[type].slottypes[i]] = new Slot(nodetypes[type].slottypes[i]); } + for(i in nodetypes[type].gatetypes){ + var gatetype = nodetypes[type].gatetypes[i] + parameters = {}; + activation = this.gate_activations[gatetype]; + var highdim = is_highdim && gatetype in nodetypes[type].dimensionality.gates; + this.gates[gatetype] = new Gate(gatetype, i, activation, this.gate_configuration[gatetype], highdim); + } + this.slotIndexes = Object.keys(this.slots); + this.gateIndexes = Object.keys(this.gates); this.update = function(item){ this.uid = item.uid; @@ -752,17 +783,18 @@ function Node(uid, x, y, nodeSpaceUid, name, type, sheaves, state, parameters, g this.y = item.y; this.parent = item.parent; this.name = item.name; - this.sheaves = item.sheaves; + this.activation = item.activation; this.state = item.state; this.parameters = item.parameters; - this.gate_parameters = jQuery.extend(jQuery.extend({}, GATE_DEFAULTS), item.gate_parameters || {});; + this.gate_configuration = item.gate_configuration || {}; this.gate_activations = item.gate_activations; - this.gatefunctions = item.gatefunctions; + this.outlinks = item.outlinks; + this.inlinks = item.inlinks; + this.inputmap = item.inputmap; for(var i in nodetypes[type].gatetypes){ var gatetype = nodetypes[type].gatetypes[i]; - this.gates[gatetype].parameters = jQuery.extend(jQuery.extend({}, GATE_DEFAULTS), this.gate_parameters[gatetype]); - this.gates[gatetype].sheaves = this.gate_activations[gatetype]; - this.gates[gatetype].gatefunction = this.gatefunctions[gatetype]; + this.gates[gatetype].gate_configuration = this.gate_configuration[gatetype]; + this.gates[gatetype].activation = this.gate_activations[gatetype]; } }; @@ -771,8 +803,8 @@ function Node(uid, x, y, nodeSpaceUid, name, type, sheaves, state, parameters, g var gatechecksum = ""; for(var i in nodetypes[type].gatetypes){ var gatetype = nodetypes[type].gatetypes[i]; - gatechecksum += "-" + this.gates[gatetype].sheaves[currentSheaf].activation; - gatechecksum += ':' + this.gates[gatetype].gatefunction; + gatechecksum += "-" + this.gates[gatetype].activation; + gatechecksum += ':' + this.gates[gatetype].gate_cgatefunction; } return gatechecksum; }; @@ -782,33 +814,37 @@ function Node(uid, x, y, nodeSpaceUid, name, type, sheaves, state, parameters, g function Slot(name) { this.name = name; this.incoming = {}; - this.sheaves = {"default": {"uid": "default", "name": "default", "activation": 0}}; + this.activation = 0; } // source for links, part of a net entity -function Gate(name, index, sheaves, parameters, gatefunction) { +function Gate(name, index, activation, gate_configuration, is_highdim) { this.name = name; this.index = index; this.outgoing = {}; - this.sheaves = sheaves; - this.gatefunction = gatefunction || 'identity'; - if(parameters){ - this.parameters = parameters; - } else { - this.parameters = jQuery.extend({}, GATE_DEFAULTS); + this.activation = activation; + this.is_highdim = is_highdim; + this.gatefunction = 'identity'; + this.gatefunction_parameters = {} + if (gate_configuration) { + if(gate_configuration.gatefunction){ + this.gatefunction = gate_configuration.gatefunction; + } + if(gate_configuration.gatefunction_parameters){ + this.gatefunction_parameters = gate_configuration.gatefunction_parameters; + } } } // link, connects two nodes, from a gate to a slot -function Link(uid, sourceNodeUid, gateName, targetNodeUid, slotName, weight, certainty){ +function Link(uid, sourceNodeUid, gateName, targetNodeUid, slotName, weight, is_flow_connection){ this.uid = uid; this.sourceNodeUid = sourceNodeUid; this.gateName = gateName; this.targetNodeUid = targetNodeUid; this.slotName = slotName; this.weight = weight; - this.certainty = certainty; - + this.is_flow_connection = is_flow_connection; this.strokeColor = null; this.strokeWidth = null; } @@ -830,16 +866,21 @@ function addLink(link) { nodes[link.targetNodeUid].slots[link.slotName].incoming[link.uid]=link; slot = true; } - if((sourceNode.uid && !gate) || (targetNode.uid && !slot)){ - console.error('Incompatible slots and gates'); + if(((sourceNode.uid && !gate) || (targetNode.uid && !slot)) && !link.is_flow_connection){ + console.error('Incompatible slots and gates: gate:'+ link.gateName + ' / slot:'+link.slotName); return; } - // check if link is visible - if (!(isOutsideNodespace(nodes[link.sourceNodeUid]) && - isOutsideNodespace(nodes[link.targetNodeUid]))) { - renderLink(link); + if(link.is_flow_connection){ + renderFlowConnection(link); + flow_connections[link.uid] = link; + } else { + links[link.uid] = link; + // check if link is visible + if (!(isOutsideNodespace(nodes[link.sourceNodeUid]) && + isOutsideNodespace(nodes[link.targetNodeUid]))) { + renderLink(link); + } } - links[link.uid] = link; } else { console.error("Error: Attempting to create link without establishing nodes first"); } @@ -848,10 +889,9 @@ function addLink(link) { function redrawLink(link, forceRedraw){ var oldLink = links[link.uid]; if (forceRedraw || !oldLink || !(link.uid in linkLayer.children) || oldLink.weight != link.weight || - oldLink.certainty != link.certainty || !nodes[oldLink.sourceNodeUid] || !nodes[link.sourceNodeUid] || - nodes[oldLink.sourceNodeUid].gates[oldLink.gateName].sheaves[currentSheaf].activation != - nodes[link.sourceNodeUid].gates[link.gateName].sheaves[currentSheaf].activation) { + nodes[oldLink.sourceNodeUid].gates[oldLink.gateName].activation != + nodes[link.sourceNodeUid].gates[link.gateName].activation) { if(link.uid in linkLayer.children){ linkLayer.children[link.uid].remove(); } @@ -859,6 +899,17 @@ function redrawLink(link, forceRedraw){ links[link.uid] = link; } } +function redrawFlowConnection(link, forceRedraw){ + var oldLink = flow_connections[link.uid]; + if (forceRedraw || !oldLink || !(link.uid in linkLayer.children) || oldLink.weight != link.weight || + !nodes[oldLink.sourceNodeUid] || !nodes[link.sourceNodeUid]) { + if(link.uid in linkLayer.children){ + linkLayer.children[link.uid].remove(); + } + renderFlowConnection(link); + flow_connections[link.uid] = link; + } +} // delete a link from the array, and from the screen function removeLink(link) { @@ -965,6 +1016,14 @@ function redrawNodeNet() { renderLink(links[i]); } } + for(uid in flow_connections){ + var sourceNode = nodes[flow_connections[uid].sourceNodeUid]; + var targetNode = nodes[flow_connections[uid].targetNodeUid]; + // check if the link is visible + if (!(isOutsideNodespace(sourceNode) && isOutsideNodespace(targetNode))) { + renderFlowConnection(flow_connections[uid]); + } + } updateViewSize(); drawGridLines(view.element); } @@ -990,9 +1049,9 @@ function nodeRedrawNeeded(node){ if(node.uid in nodeLayer.children){ if(node.x == nodes[node.uid].x && node.y == nodes[node.uid].y && - node.sheaves[currentSheaf].activation == nodes[node.uid].sheaves[currentSheaf].activation && + node.name == nodes[node.uid].name && + node.activation == nodes[node.uid].activation && node.gatechecksum() == nodes[node.uid].gatechecksum() && - Object.keys(node.sheaves).length == Object.keys(nodes[node.uid].sheaves).length && viewProperties.zoomFactor == nodes[node.uid].zoomFactor){ return false; } @@ -1022,6 +1081,19 @@ function redrawNodeLinks(node) { renderLink(links[linkUid]); } } + redrawNodeFlowConnections(node); +} +function redrawNodeFlowConnections(node) { + if(node.is_flow_module){ + for(var uid in flow_connections){ + if(flow_connections[uid].sourceNodeUid == node.uid || flow_connections[uid].targetNodeUid == node.uid){ + if(uid in linkLayer.children) { + linkLayer.children[uid].remove(); + } + renderFlowConnection(flow_connections[uid]); + } + } + } } sourceBounds = {}; @@ -1042,7 +1114,7 @@ function calculateLinkStart(sourceNode, targetNode, gateName) { } var sourcePoints, startPoint, startAngle; if (!isOutsideNodespace(sourceNode) && isCompact(sourceNode)) { - if (sourceNode.type=="Sensor" || sourceNode.type == "Actor") { + if (sourceNode.type=="Sensor" || sourceNode.type == "Actuator") { if (sourceNode.type == "Sensor") startPoint = new Point(sourceBounds.x+sourceBounds.width*0.5, sourceBounds.y); @@ -1110,7 +1182,7 @@ function calculateLinkEnd(sourceNode, targetNode, slotName, linkType) { } } if (!isOutsideNodespace(targetNode) && isCompact(targetNode)) { - if (targetNode.type=="Sensor" || targetNode.type == "Actor") { + if (targetNode.type=="Sensor" || targetNode.type == "Actuator") { endPoint = new Point(targetBounds.x + targetBounds.width*0.6, targetBounds.y); endAngle = 270; } else { @@ -1244,7 +1316,7 @@ function renderLink(link, force) { link.strokeWidth = Math.max(0.1, Math.min(1.0, Math.abs(link.weight)))*viewProperties.zoomFactor; if(sourceNode){ - link.strokeColor = activationColor(sourceNode.gates[link.gateName].sheaves[currentSheaf].activation * link.weight, viewProperties.linkColor); + link.strokeColor = activationColor(sourceNode.gates[link.gateName].activation * link.weight, viewProperties.linkColor); } else { link.strokeColor = viewProperties.linkColor; } @@ -1261,7 +1333,7 @@ function renderLink(link, force) { linkContainer.name = link.uid; if (nodespaceProperties[currentNodeSpace].activation_display == 'alpha'){ if(sourceNode){ - linkContainer.opacity = Math.max(0.1, sourceNode.sheaves[currentSheaf].activation) + linkContainer.opacity = Math.max(0.1, sourceNode.activation) } else { linkContainer.opacity = 0.1 } @@ -1269,6 +1341,43 @@ function renderLink(link, force) { linkLayer.addChild(linkContainer); } +function renderFlowConnection(link, force) { + if(nodespaceProperties[currentNodeSpace].renderlinks == 'no'){ + return; + } + if(nodespaceProperties[currentNodeSpace].renderlinks == 'selection'){ + var is_selected = selection && (link.sourceNodeUid in selection || link.targetNodeUid in selection); + if(!is_selected){ + return; + } + } + var sourceNode = nodes[link.sourceNodeUid]; + var targetNode = nodes[link.targetNodeUid]; + if(!sourceNode || !targetNode){ + // TODO: deleting nodes need to clean flowconnections + return; + } + var sourceType = flow_modules[sourceNode.type]; + var targetType = flow_modules[targetNode.type]; + + var itemlength = sourceNode.bounds.width / sourceType.outputs.length; + var idx = sourceType.outputs.indexOf(link.gateName); + var linkStart = new Point(sourceNode.bounds.x + ((idx+.5) * itemlength), sourceNode.bounds.y + viewProperties.lineHeight * 0.7 * viewProperties.zoomFactor); + itemlength = targetNode.bounds.width / targetType.inputs.length; + idx = targetType.inputs.indexOf(link.slotName); + var linkEnd = new Point(targetNode.bounds.x + ((idx+.5) * itemlength), targetNode.bounds.y + targetNode.bounds.height - viewProperties.lineHeight * 0.3 * viewProperties.zoomFactor); + + var linkPath = new Path([linkStart, linkEnd]); + linkPath.strokeColor = viewProperties.flowConnectionColor; + linkPath.strokeWidth = 10 * viewProperties.zoomFactor; + linkPath.opacity = 0.8; + linkPath.name = "path"; + linkPath.dashArray = [viewProperties.zoomFactor,viewProperties.zoomFactor]; + var linkContainer = new Group(linkPath); + linkContainer.name = link.uid; + linkLayer.addChild(linkContainer); +} + // draw the line part of the link function createLink(startPoint, startAngle, startDirection, endPoint, endAngle, endDirection, linkColor, linkWidth, linkType) { var arrowEntry = new Point(viewProperties.arrowLength*viewProperties.zoomFactor,0).rotate(endAngle)+endPoint; @@ -1354,19 +1463,119 @@ function renderFullNode(node) { var nodeItem; if(node.type == 'Comment'){ nodeItem = renderComment(node); + } else if(node.type in flow_modules){ + var skeleton = createFullNodeSkeleton(node); + var activations = createFullNodeActivations(node); + var inputs = createFlowInputs(node); + var outputs = createFlowOutputs(node); + var gateAnnotations = createGateAnnotation(node); + nodeItem = new Group([activations, skeleton, inputs, outputs, gateAnnotations]); } else { var skeleton = createFullNodeSkeleton(node); var activations = createFullNodeActivations(node); var titleBar = createFullNodeLabel(node); - var sheavesAnnotation = createSheavesAnnotation(node); var gateAnnotations = createGateAnnotation(node); - nodeItem = new Group([activations, skeleton, titleBar, gateAnnotations, sheavesAnnotation]); + nodeItem = new Group([activations, skeleton, titleBar, gateAnnotations]); } nodeItem.name = node.uid; nodeItem.isCompact = false; nodeLayer.addChild(nodeItem); } +function createFlowInputs(node){ + var inputs = flow_modules[node.type].inputs; + var num = inputs.length; + var inputshapes = []; + var itemlength = node.bounds.width / num; + for(var i = 0; i < num; i++){ + var label = new PointText(node.bounds.x + ((i+.5) * itemlength), node.bounds.y + node.bounds.height - viewProperties.lineHeight * 0.3 * viewProperties.zoomFactor); + label.content = inputs[i]; + label.name = inputs[i]; + label.paragraphStyle.justification = 'center'; + label.characterStyle = { + fillColor: viewProperties.nodeFontColor, + fontSize: viewProperties.fontSize*viewProperties.zoomFactor + } + if(num > 1 && i < num - 1){ + var border = new Path.Rectangle( + node.bounds.x + ((i+1) * itemlength), + node.bounds.y + node.bounds.height - viewProperties.lineHeight * viewProperties.zoomFactor, + viewProperties.shadowDisplacement.x * viewProperties.zoomFactor, + viewProperties.lineHeight * viewProperties.zoomFactor + ); + border.fillColor = viewProperties.shadowColor; + border.fillColor.alpha = 0.3; + inputshapes.push(new Group([label, border])); + } else { + inputshapes.push(label); + } + } + var bounds = node.bounds; + var upper = new Path.Rectangle(bounds.x+viewProperties.shadowDisplacement.x*viewProperties.zoomFactor, + bounds.y + bounds.height - (viewProperties.lineHeight - viewProperties.strokeWidth)*viewProperties.zoomFactor, + bounds.width - viewProperties.shadowDisplacement.x*viewProperties.zoomFactor, + viewProperties.innerShadowDisplacement.y*viewProperties.zoomFactor); + upper.fillColor = viewProperties.shadowColor; + upper.fillColor.alpha = 0.3; + var lower = upper.clone(); + lower.position += new Point(0, viewProperties.innerShadowDisplacement.y*viewProperties.zoomFactor); + lower.fillColor = viewProperties.highlightColor; + lower.fillColor.alpha = 0.3; + var delimiter = new Group([upper, lower]); + delimiter.name = "delimiter"; + inputshapes.push(delimiter); + var group = new Group(inputshapes); + group.name = 'flowModuleInputs'; + return group; +} + +function createFlowOutputs(node, with_delimiter){ + var outputs = flow_modules[node.type].outputs; + var num = outputs.length; + var outputshapes = []; + var itemlength = node.bounds.width / num; + for(var i = 0; i < num; i++){ + var label = new PointText(node.bounds.x + ((i+.5) * itemlength), node.bounds.y + viewProperties.lineHeight * 0.7 * viewProperties.zoomFactor); + label.content = outputs[i]; + label.name = outputs[i]; + label.paragraphStyle.justification = 'center'; + label.characterStyle = { + fillColor: viewProperties.nodeFontColor, + fontSize: viewProperties.fontSize*viewProperties.zoomFactor + } + if(num > 1 && i < num - 1){ + var border = new Path.Rectangle( + node.bounds.x + ((i+1) * itemlength), + node.bounds.y, + viewProperties.shadowDisplacement.x * viewProperties.zoomFactor, + viewProperties.lineHeight * viewProperties.zoomFactor + ); + border.fillColor = viewProperties.shadowColor; + border.fillColor.alpha = 0.3; + outputshapes.push(new Group([label, border])); + } else { + outputshapes.push(label); + } + } + if(with_delimiter){ + var bounds = node.bounds; + var upper = new Path.Rectangle(bounds.x+viewProperties.shadowDisplacement.x*viewProperties.zoomFactor, + bounds.y + (viewProperties.lineHeight - viewProperties.strokeWidth)*viewProperties.zoomFactor, + bounds.width - viewProperties.shadowDisplacement.x*viewProperties.zoomFactor, + viewProperties.innerShadowDisplacement.y*viewProperties.zoomFactor); + upper.fillColor = viewProperties.shadowColor; + upper.fillColor.alpha = 0.3; + var lower = upper.clone(); + lower.position += new Point(0, viewProperties.innerShadowDisplacement.y*viewProperties.zoomFactor); + lower.fillColor = viewProperties.highlightColor; + lower.fillColor.alpha = 0.3; + delimiter = new Group([upper, lower]); + delimiter.name = "delimiter"; + outputshapes.push(delimiter); + } + return new Group(outputshapes); +} + function renderComment(node){ var bounds = node.bounds; var commentGroup = new Group(); @@ -1399,6 +1608,16 @@ function renderCompactNode(node) { var nodeItem; if(node.type == "Comment"){ nodeItem = renderComment(node); + } else if(node.type in flow_modules){ + var skeleton = createCompactNodeSkeleton(node); + var activations = createCompactNodeActivations(node); + var label = createCompactNodeLabel(node); + var inputs = createFlowInputs(node); + var outputs = createFlowOutputs(node, true); + nodeItem = new Group([activations, skeleton, inputs, outputs]); + if (label){ + nodeItem.addChild(label); + } } else { var skeleton = createCompactNodeSkeleton(node); var activations = createCompactNodeActivations(node); @@ -1426,12 +1645,14 @@ function calculateNodeBounds(node) { if (!isCompact(node)) { width = viewProperties.nodeWidth * viewProperties.zoomFactor; height = viewProperties.lineHeight*(Math.max(node.slotIndexes.length, node.gateIndexes.length)+2)*viewProperties.zoomFactor; - if (node.type == "Nodespace"){ - height = Math.max(height, viewProperties.lineHeight*4*viewProperties.zoomFactor); - } } else { width = height = viewProperties.compactNodeWidth * viewProperties.zoomFactor; } + if(node.type in flow_modules){ + def = flow_modules[node.type]; + width = Math.max(def.inputs.length, def.outputs.length) * viewProperties.flowModuleWidth * viewProperties.zoomFactor; + height += viewProperties.lineHeight * viewProperties.zoomFactor; + } return new Rectangle(node.x*viewProperties.zoomFactor - width/2, node.y*viewProperties.zoomFactor - height/2, // center node on origin width, height); @@ -1439,7 +1660,7 @@ function calculateNodeBounds(node) { // determine shape of a full node function createFullNodeShape(node) { - if (node.type == "Nodespace" || node.type == "Comment"){ + if (node.type == "Comment"){ return new Path.Rectangle(node.bounds); } else { return new Path.RoundRectangle(node.bounds, viewProperties.cornerWidth*viewProperties.zoomFactor); @@ -1451,7 +1672,6 @@ function createCompactNodeShape(node) { var bounds = node.bounds; var shape; switch (node.type) { - case "Nodespace": case "Comment": shape = new Path.Rectangle(bounds); break; @@ -1462,7 +1682,7 @@ function createCompactNodeShape(node) { new Point(bounds.right, bounds.y-bounds.height * 0.3), bounds.bottomRight); shape.closePath(); break; - case "Actor": + case "Actuator": shape = new Path([bounds.bottomRight, new Point(bounds.x+bounds.width * 0.65, bounds.y), new Point(bounds.x+bounds.width * 0.35, bounds.y), @@ -1483,7 +1703,7 @@ function createCompactNodeShape(node) { case "Concept": // draw circle case "Pipe": // draw circle case "Script": // draw circle - case "Register": + case "Neuron": shape = new Path.Circle(new Point(bounds.x + bounds.width/2, bounds.y+bounds.height/2), bounds.width/2); break; default: @@ -1526,39 +1746,14 @@ function createFullNodeLabel(node) { return label; } -// draw the sheaves annotation of a full node -- this is rather hacky, we will want to find -// a better way of visualizing sheaves, including sheaf states -function createSheavesAnnotation(node) { - var bounds = node.bounds; - var label = new Group(); - label.name = "sheavesLabel"; - var titleText = new PointText(new Point(bounds.x+ 80*viewProperties.zoomFactor +viewProperties.padding*viewProperties.zoomFactor, - bounds.y+viewProperties.lineHeight*0.8*viewProperties.zoomFactor)); - titleText.characterStyle = { - fillColor: viewProperties.nodeFontColor, - fontSize: viewProperties.fontSize*viewProperties.zoomFactor - }; - var sheavesText = ""; - for(uid in node.sheaves) { - name = node.sheaves[uid].name; - if(name != "default") { - sheavesText += name + "\n"; - } - } - titleText.content = sheavesText; - titleText.name = "text"; - label.addChild(titleText); - return label; -} - function createGateAnnotation(node){ var labels = []; for (i = 0; i< node.gateIndexes.length; i++){ var g = node.gateIndexes[i]; var gatebounds = getGateBounds(node, i); - if (node.gatefunctions[g] && node.gatefunctions[g] != 'identity'){ + if (node.gates[g].gatefunction && node.gates[g].gatefunction != 'identity'){ var gatefuncHint = new PointText(new Point(gatebounds.right-(8*viewProperties.zoomFactor),gatebounds.center.y - 2*viewProperties.zoomFactor)); - gatefuncHint.content = gatefunction_icons[node.gatefunctions[g]]; + gatefuncHint.content = gatefunction_icons[node.gates[g].gatefunction]; gatefuncHint.fillColor = viewProperties.nodeForegroundColor; gatefuncHint.fontSize = (viewProperties.fontSize-2) * viewProperties.zoomFactor; labels.push(gatefuncHint); @@ -1726,10 +1921,10 @@ function createCompactNodeBodyLabel(node) { gatefuncHint.fillColor = viewProperties.nodeForegroundColor; gatefuncHint.fontSize = viewProperties.fontSize*viewProperties.zoomFactor; var non_standard_gatefunc = []; - for (var k in node.gatefunctions){ - if(node.gatefunctions[k] && node.gatefunctions[k] != 'identity'){ - if(non_standard_gatefunc.indexOf(node.gatefunctions[k]) < 0){ - non_standard_gatefunc.push(node.gatefunctions[k]); + for (var g in node.gates){ + if(node.gates[g].gatefunction && node.gates[g].gatefunction != 'identity'){ + if(non_standard_gatefunc.indexOf(node.gates[g].gatefunction) < 0){ + non_standard_gatefunc.push(node.gates[g].gatefunction); } } } @@ -1810,19 +2005,19 @@ function setActivation(node) { } if (node.uid in nodeLayer.children) { var nodeItem = nodeLayer.children[node.uid]; - if((nodespaceProperties[currentNodeSpace].activation_display != 'alpha') || node.sheaves[currentSheaf].activation > 0.5){ + if((nodespaceProperties[currentNodeSpace].activation_display != 'alpha') || node.activation > 0.5){ node.fillColor = nodeItem.children["activation"].children["body"].fillColor = - activationColor(node.sheaves[currentSheaf].activation, viewProperties.nodeColor); + activationColor(node.activation, viewProperties.nodeColor); } if(nodespaceProperties[currentNodeSpace].activation_display == 'alpha'){ for(var i in nodeItem.children){ if(nodeItem.children[i].name == 'labelText'){ nodeItem.children[i].opacity = 0; - if (node.sheaves[currentSheaf].activation > 0.5){ - nodeItem.children[i].opacity = node.sheaves[currentSheaf].activation; + if (node.activation > 0.5){ + nodeItem.children[i].opacity = node.activation; } } else { - nodeItem.children[i].opacity = Math.max(0.1, node.sheaves[currentSheaf].activation) + nodeItem.children[i].opacity = Math.max(0.1, node.activation) } } } @@ -1832,13 +2027,13 @@ function setActivation(node) { var type; for (type in node.slots) { nodeItem.children["activation"].children["slots"].children[i++].fillColor = - activationColor(node.slots[type].sheaves[currentSheaf].activation, + activationColor(node.slots[type].activation, viewProperties.nodeColor); } i=0; for (type in node.gates) { nodeItem.children["activation"].children["gates"].children[i++].fillColor = - activationColor(node.gates[type].sheaves[currentSheaf].activation, + activationColor(node.gates[type].activation, viewProperties.nodeColor); } } @@ -2128,10 +2323,6 @@ function onMouseDown(event) { } if (linkCreationStart) { - // todo: open dialog to link into different nodespaces - if(!(clickType == "node" && nodes[path.name].type == "Nodespace")){ - cancelLinkCreationHandler(); - } return; } @@ -2271,14 +2462,10 @@ function onDoubleClick(event) { if (nodeUid in nodes) { var node = nodes[nodeUid]; if(node.bounds.contains(p)){ - if(node.type == "Nodespace"){ - handleEnterNodespace(node.uid); - } else { - if(isCompact(nodeUid)){ - nodes[nodeUid].renderCompact = false; - redrawNode(nodes[nodeUid], true); - view.draw(); - } + if(isCompact(nodeUid)){ + nodes[nodeUid].renderCompact = false; + redrawNode(nodes[nodeUid], true); + view.draw(); } return; } @@ -2416,8 +2603,8 @@ function onMouseUp(event) { selectionRectangle.width = selectionRectangle.height = 1; selectionBox.setBounds(selectionRectangle); } - if(currentNodenet && nodenet_data && nodespaceProperties[currentNodeSpace].renderlinks == 'selection'){ - loadLinksForSelection(); + if(currentNodenet && nodenet_data){ + loadLinksForSelection(null, false, true); } } @@ -2492,23 +2679,42 @@ function updateSelection(event){ } } -function loadLinksForSelection(callback){ +function loadLinksForSelection(callback, force_load, show_node_form){ + var skiploading = false; + if(nodespaceProperties[currentNodeSpace].renderlinks == 'none'){ + skiploading = true; + } var uids = []; + var skipped = []; + var load_links = false; for(var uid in selection){ - if(uid in nodes && nodes[uid].type != 'Nodespace'){ + if(uid in nodes && (force_load || nodes[uid].inlinks < viewProperties.load_link_threshold && nodes[uid].outlinks < viewProperties.load_link_threshold)){ uids.push(uid) + } else { + skipped.push(uid) + } + if(skipped.indexOf(uid) < 0 && (nodes[uid].inlinks > 0 || nodes[uid].outlinks > 0)){ + load_links = true; } } - if(uids.length){ + if(nodespaceProperties[currentNodeSpace].renderlinks == 'always' && !load_links){ + skiploading = true; + } + if(!skiploading && uids.length){ api.call('get_links_for_nodes', {'nodenet_uid': currentNodenet, 'node_uids': uids }, function(data){ + for(var i=0; i < uids.length; i++){ + // all links loaded + nodes[uids[i]].outlinks = 0 + nodes[uids[i]].inlinks = 0 + } if(callback){ callback(data); } else { for(var uid in data.nodes){ - addNode(new Node(uid, data.nodes[uid]['position'][0], data.nodes[uid]['position'][1], data.nodes[uid].parent_nodespace, data.nodes[uid].name, data.nodes[uid].type, data.nodes[uid].sheaves, data.nodes[uid].state, data.nodes[uid].parameters, data.nodes[uid].gate_activations, data.nodes[uid].gate_parameters, data.nodes[uid].gate_functions)); + addNode(new Node(uid, data.nodes[uid]['position'][0], data.nodes[uid]['position'][1], data.nodes[uid].parent_nodespace, data.nodes[uid].name, data.nodes[uid].type, data.nodes[uid].activation, data.nodes[uid].state, data.nodes[uid].parameters, data.nodes[uid].gate_activations, data.nodes[uid].gate_configuration, data.nodes[uid].is_highdimensional, data.nodes[uid].inlinks, data.nodes[uid].outlinks)); } var linkdict = {}; for(var i = 0; i < data.links.length; i++){ @@ -2516,13 +2722,15 @@ function loadLinksForSelection(callback){ linkdict[luid] = data.links[i]; } addLinks(linkdict); - if(uids.length == 1 && uids[0] in selection && clickType != "gate"){ + if(uids.length == 1 && uids[0] in selection && clickType != "gate" && show_node_form){ showNodeForm(uids[0]); } } view.draw(true); } ); + } else if(skipped.length == 1 && skipped[0] in nodes && skipped[0] in selection && clickType != "gate" && show_node_form){ + showNodeForm(skipped[0]); } } @@ -2541,17 +2749,8 @@ function initializeMenus() { $("#edit_link_modal form").on('submit', handleEditLink); $("#nodenet").on('dblclick', onDoubleClick); $("#nodespace_up").on('click', handleNodespaceUp); + $("#nodespace_add").on('click', createNodespace); gate_form_trigger = $('.gate_additional_trigger'); - gate_params = $('.gate_additional'); - gate_form_trigger.on('click', function(){ - if(gate_params.hasClass('hide')){ - gate_form_trigger.text("Hide additional parameters"); - gate_params.removeClass('hide'); - } else { - gate_form_trigger.text("Show additional parameters"); - gate_params.addClass('hide'); - } - }); } function initializeControls(){ @@ -2683,6 +2882,21 @@ function openContextMenu(menu_id, event) { }); html += ''; } + if(Object.keys(flow_modules).length){ + html += '
  • Create Flow Module'; + html += '
  • '; + } html += '
  • = conditions.mincount) && - (conditions.maxcount < 0 || count <= conditions.maxcount)){ - applicable_operations[key] = available_operations[key] + for(var i in conditions){ + if((conditions[i].nodetypes.length == 0 || $(nodetypes).not(conditions[i].nodetypes).get().length == 0) && + (count >= conditions[i].mincount) && + (conditions[i].maxcount < 0 || count <= conditions[i].maxcount)){ + applicable_operations[key] = available_operations[key]; + } } } @@ -2813,7 +3029,7 @@ function openNodeContextMenu(menu_id, event, nodeUid) { if(node.type == "Sensor"){ html += '
  • Select datasource
  • '; } - if(node.type == "Actor"){ + if(node.type == "Actuator"){ html += '
  • Select datatarget
  • '; } html += '
  • Add Monitor
  • ' + @@ -2821,6 +3037,8 @@ function openNodeContextMenu(menu_id, event, nodeUid) { '
  • Rename node
  • ' + '
  • Delete node
  • ' + '
  • Copy node
  • '; + + html += getOperationsDropdownHTML([node.type], 1); menu.html(html); openContextMenu(menu_id, event); } @@ -2865,10 +3083,10 @@ function handleContextMenu(event) { source_select.val(nodes[clickOriginUid].parameters['datasource']).select().focus(); }; break; - case "Actor": + case "Actuator": callback = function(data){ clickOriginUid = data; - dialogs.notification('Please Select a datatarget for this actor'); + dialogs.notification('Please Select a datatarget for this actuator'); var target_select = $('#select_datatarget_modal select'); target_select.html(''); $("#select_datatarget_modal").modal("show"); @@ -2995,7 +3213,6 @@ function handleContextMenu(event) { var linkUid = clickOriginUid; if (linkUid in links) { $("#link_weight_input").val(links[linkUid].weight); - $("#link_certainty_input").val(links[linkUid].certainty); $("#link_weight_input").focus(); } break; @@ -3020,7 +3237,8 @@ function selectOperation(name){ ''; } $('fieldset', modal).html(html); - var run = function(){ + var run = function(event){ + event.preventDefault(); data = $('form', modal).serializeArray(); parameters = {}; for(var i=0; i < data.length; i++){ @@ -3029,8 +3247,8 @@ function selectOperation(name){ modal.modal('hide'); runOperation(name, parameters); }; - $('form', modal).on('submit', run); - $('.btn-primary', modal).on('click', run); + $('form', modal).off().on('submit', run); + $('.btn-primary', modal).off().on('click', run); modal.modal('show'); } else { runOperation(name); @@ -3048,6 +3266,7 @@ function runOperation(name, params){ 'parameters': params || {}, 'selection_uids': selection_uids}, function(data){ refreshNodespace(); + $(document).trigger('runner_stepped') if(!$.isEmptyObject(data)){ html = ''; if(data.content_type && data.content_type.indexOf("image") > -1){ @@ -3142,6 +3361,27 @@ function get_datatarget_options(worldadapter, value){ return html; } +function createNodespace(event){ + event.preventDefault(); + api.call("add_nodespace", { + 'nodenet_uid': currentNodenet, + 'nodespace': null, + 'name': 'new nodespace' + }, success=function(data) { + var uid = data; + nodespaceProperties[uid] = nodespace_property_defaults + nodespaces[uid] = { + 'name': 'new nodespace', + 'parent': currentNodeSpace + }; + handleEnterNodespace(uid, function(){ + dialogs.notification('Nodespace created', 'success'); + $('#nodespace_name').select().focus(); + }); + + }); +} + // let user create a new node function createNodeHandler(x, y, name, type, parameters, callback) { params = {}; @@ -3159,25 +3399,18 @@ function createNodeHandler(x, y, name, type, parameters, callback) { var method = ""; var params = { nodenet_uid: currentNodenet, - position: [x,y,0], nodespace: currentNodeSpace, name: name} - if(type == "Nodespace"){ - method = "add_nodespace"; - } else { - method = "add_node" - params.type = type; - params.parameters = parameters; - } + method = "add_node" + params.type = type; + params.position = [x,y,0]; + params.parameters = parameters; api.call(method, params, success=function(uid){ - if(type == 'Nodespace'){ - nodespaceProperties[uid] = nodespace_property_defaults - } addNode(new Node(uid, x, y, currentNodeSpace, name || '', type, null, null, parameters)); - view.draw(); selectNode(uid); if(callback) callback(uid); + view.draw(); showNodeForm(uid); getNodespaceList(); } @@ -3251,36 +3484,33 @@ function handlePasteNodes(pastemode){ } // let user delete the current node, or all selected nodes -function deleteNodeHandler(nodeUid) { - function deleteNodespaceOnServer(nodespace_uid){ - var params = { - nodenet_uid: currentNodenet, - nodespace: nodespace_uid - } - api.call("delete_nodespace", params, - success=function(data){ - dialogs.notification('nodespace deleted', 'success'); - getNodespaceList(); - refreshNodespace(currentNodeSpace, -1); - } - ); +function deleteNodespace(event, nodespace_uid){ + if(!nodespace_uid){ + nodespace_uid = currentNodeSpace; } + var params = { + nodenet_uid: currentNodenet, + nodespace: nodespace_uid + } + var parent = nodespaces[nodespace_uid].parent; + api.call("delete_nodespace", params, + success=function(data){ + dialogs.notification('nodespace deleted', 'success'); + getNodespaceList(); + refreshNodespace(parent, -1); + } + ); +} + +function deleteNodeHandler(nodeUid) { var deletedNodes = []; - var deletedNodespaces = []; for (var selected in selection) { if(selection[selected].constructor == Node){ - if(nodes[selected].type == "Nodespace"){ - deletedNodespaces.push(selected); - } else{ - deletedNodes.push(selected); - } + deletedNodes.push(selected); removeNode(nodes[selected]); delete selection[selected]; } } - for(var i=0; i < deletedNodespaces.length; i++){ - deleteNodespaceOnServer(deletedNodespaces[i]); - } if(deletedNodes.length){ api.call('delete_nodes', {nodenet_uid: currentNodenet, node_uids: deletedNodes}, function(){ dialogs.notification('nodes deleted', 'success'); @@ -3324,9 +3554,7 @@ function handleEditLink(event){ var form = event.target.form; var linkUid = clickOriginUid; var weight = parseFloat($('input[name="link_weight"]', form).val()); - var certainty = parseFloat($('input[name="link_certainty"]', form).val()); links[linkUid].weight = weight; - links[linkUid].certainty = certainty; redrawLink(links[linkUid], true); view.draw(); api.call("set_link_weight", { @@ -3336,7 +3564,6 @@ function handleEditLink(event){ target_node_uid: links[linkUid].targetNodeUid, slot_type: links[linkUid].slotName, weight: weight, - certainty: certainty }); } @@ -3394,7 +3621,7 @@ function createLinkFromDialog(sourceUid, sourceGate, targetUid, targetSlot){ 'nodenet_uid': currentNodenet, 'node_uid': targetUid }, function(data){ - nodes[targetUid] = new Node(data.uid, data.position[0], data.position[1], data.parent_nodespace, data.name, data.type, data.sheaves, data.state, data.parameters, data.gate_activations, data.gate_parameters); + nodes[targetUid] = new Node(data.uid, data.position[0], data.position[1], data.parent_nodespace, data.name, data.type, data.activation, data.state, data.parameters, data.gate_activations, data.gate_configuration, data.is_highdimensional, data.inlinks, data.outlinks); createLinkFromDialog(sourceUid, sourceGate, targetUid, targetSlot); }); } else { @@ -3412,7 +3639,7 @@ function createLinkFromDialog(sourceUid, sourceGate, targetUid, targetSlot){ 'nodenet_uid': currentNodenet, 'node_uid': targetUid }, function(data){ - nodes[targetUid] = data; + nodes[targetUid] = new Node(data.uid, data.position[0], data.position[1], data.parent_nodespace, data.name, data.type, data.activation, data.state, data.parameters, data.gate_activations, data.gate_configuration, data.is_highdimensional, data.inlinks, data.outlinks); nodes[targetUid].linksFromOutside.push(uid); }); } else if(nodes[targetUid].parent != currentNodeSpace){ @@ -3544,14 +3771,14 @@ function finalizeLinkHandler(nodeUid, slotIndex) { cancelLinkCreationHandler(); } -function createLinkIfNotExists(sourceNode, sourceGate, targetNode, targetSlot, weight, certainty){ +function createLinkIfNotExists(sourceNode, sourceGate, targetNode, targetSlot, weight){ for(var uid in sourceNode.gates[sourceGate].outgoing){ var link = sourceNode.gates[sourceGate].outgoing[uid]; if(link.targetNodeUid == targetNode.uid && link.slotName == targetSlot){ return false; } } - var newlink = new Link('tmp', sourceNode.uid, sourceGate, targetNode.uid, targetSlot, weight || 1, certainty || 1); + var newlink = new Link('tmp', sourceNode.uid, sourceGate, targetNode.uid, targetSlot, weight || 1); return newlink; } @@ -3566,7 +3793,7 @@ function cancelLinkCreationHandler() { } function moveNodesOnServer(position_data){ - api.call("set_entity_positions", { + api.call("set_node_positions", { nodenet_uid: currentNodenet, positions: position_data }); @@ -3607,13 +3834,13 @@ function handleEditNode(event){ if(name && nodes[nodeUid].name != name){ renameNode(nodeUid, name); } - if(!jQuery.isEmptyObject(parameters) && nodes[nodeUid].type != 'Nodespace'){ + if(!jQuery.isEmptyObject(parameters)){ updateNodeParameters(nodeUid, parameters); } - if(nodes[nodeUid].state != state && nodes[nodeUid].type != 'Nodespace'){ + if(nodes[nodeUid].state != state){ setNodeState(nodeUid, state); } - if(nodes[nodeUid].sheaves[currentSheaf].activation != activation && nodes[nodeUid].type != 'Nodespace'){ + if(nodes[nodeUid].activation != activation){ setNodeActivation(nodeUid, activation); } redrawNode(nodes[nodeUid], true); @@ -3633,51 +3860,44 @@ function handleEditGate(event){ node = nodes[form.attr('data-node')]; gate = node.gates[form.attr('data-gate')]; } - var data = form.serializeArray(); - var params = {}; - var old_params = gate.parameters; - for(var i in data){ - if(!data[i].value && data[i].name in GATE_DEFAULTS){ - data[i].value = GATE_DEFAULTS[data[i].name]; - } - params[data[i].name] = parseFloat(data[i].value); - } - var gatefunc = $('#gate_gatefunction').val(); - if(gatefunc != gate.gatefunction){ - api.call('set_gatefunction', { - nodenet_uid: currentNodenet, - node_uid: node.uid, - gate_type: gate.name, - gatefunction: gatefunc - }, function(data){ - node.gatefunctions[gate.name] = gatefunc; - gate.gatefunction = gatefunc; - api.defaultSuccessCallback(); - redrawNode(node, true); - view.draw(); - }, api.defaultErrorCallback); + if(gate.is_highdim) { + return false; } - api.call('set_gate_parameters', { + var data = form.serializeArray(); + params = { nodenet_uid: currentNodenet, node_uid: node.uid, gate_type: gate.name, - parameters: params - }, api.defaultSuccessCallback, function(err){ - api.defaultErrorCallback(err); - gate.parameters = old_params; - if(form.css('display') == 'block'){ - showGateForm(node, gate); + gatefunction: 'identity', + gatefunction_parameters: {} + } + for(var i=0; i < data.length; i++){ + if(data[i].name == 'gate_gatefunction'){ + params.gatefunction = data[i].value; + } else { + params.gatefunction_parameters[data[i].name] = data[i].value } - }); - gate.parameters = params; + } + api.call('set_gate_configuration', params, function(data){ + config = { + 'gatefunction': params.gatefunction, + 'gatefunction_parameters': params.gatefunction_parameters + } + node.gate_configuration[gate.name] = config; + gate.gatefunction = params.gatefunction; + gate.gatefunction_parameters = params.gatefunction_parameters; + api.defaultSuccessCallback(); + redrawNode(node, true); + view.draw(); + }, api.defaultErrorCallback); } function setNodeActivation(nodeUid, activation){ activation = activation || 0; - nodes[nodeUid].sheaves[currentSheaf].activation = activation; + nodes[nodeUid].activation = activation; //TODO not sure this is generic enough, should probably just take the 0th if(nodes[nodeUid].gates["gen"]) { - nodes[nodeUid].gates["gen"].sheaves[currentSheaf].activation = activation; + nodes[nodeUid].gates["gen"].activation = activation; } api.call('set_node_activation', { 'nodenet_uid': currentNodenet, @@ -3727,12 +3947,14 @@ function handleSelectDatasourceModal(event){ var nodeUid = clickOriginUid; var value = $('#select_datasource_modal select').val(); $("#select_datasource_modal").modal("hide"); - nodes[clickOriginUid].parameters['datasource'] = value; + nodes[nodeUid].parameters['datasource'] = value; showNodeForm(nodeUid); api.call("bind_datasource_to_sensor", { nodenet_uid: currentNodenet, sensor_uid: nodeUid, datasource: value + }, function(data){ + showNodeForm(nodeUid, true); }); } @@ -3740,20 +3962,22 @@ function handleSelectDatatargetModal(event){ var nodeUid = clickOriginUid; var value = $('#select_datatarget_modal select').val(); $("#select_datatarget_modal").modal("hide"); - nodes[clickOriginUid].parameters['datatarget'] = value; + nodes[nodeUid].parameters['datatarget'] = value; showNodeForm(nodeUid); - api.call("bind_datatarget_to_actor", { + api.call("bind_datatarget_to_actuator", { nodenet_uid: currentNodenet, - actor_uid: nodeUid, + actuator_uid: nodeUid, datatarget: value + }, function(data){ + showNodeForm(nodeUid, true); }); } // handler for entering a nodespace -function handleEnterNodespace(nodespaceUid) { - if (nodespaceUid in nodes) { +function handleEnterNodespace(nodespaceUid, callback) { + if (nodespaceUid in nodespaces) { deselectAll(); - refreshNodespace(nodespaceUid, -1); + refreshNodespace(nodespaceUid, -1, callback); } } @@ -3767,35 +3991,37 @@ function handleNodespaceUp() { function handleEditNodenet(event){ event.preventDefault(); - var form = event.target; + var form = $(event.target); var reload = false; - var params = { - nodenet_uid: currentNodenet, - nodenet_name: $('#nodenet_name', form).val() - }; - var nodenet_world = $('#nodenet_world', form).val(); - if(nodenet_world){ - params.world_uid = nodenet_world; + var data = { + "nodenet_uid": currentNodenet, + "worldadapter_config": {} } - if(nodenet_world != nodenet_data.world){ - if(typeof currentWorld != 'undefined' && (nodenet_data.world == currentWorld || nodenet_world == currentWorld)){ - reload = true; + var formvalues = form.serializeArray(); + + for(var i = 0; i < formvalues.length; i++){ + var field = formvalues[i]; + if(field.name.substr(0, 11) == "nodenet_wa_"){ + data.worldadapter_config[field.name.substr(11)] = field.value; + } else if(field.name.substr(0, 8) == "nodenet_") { + data[field.name.substr(8)] = field.value; } } - var worldadapter = $('#nodenet_worldadapter', form).val(); - if(worldadapter){ - params.worldadapter = worldadapter; + if(data.world != nodenet_data.world){ + if(typeof currentWorld != 'undefined' && (nodenet_data.world == currentWorld || data.world == currentWorld)){ + reload = true; + } } - nodenet_data.snap_to_grid = $('#nodenet_snap').attr('checked'); + nodenet_data.snap_to_grid = $('#ui_snap').attr('checked'); $.cookie('snap_to_grid', nodenet_data.snap_to_grid || '', {path: '/', expires: 7}) - api.call("set_nodenet_properties", params, + api.call("set_nodenet_properties", data, success=function(data){ dialogs.notification('Nodenet data saved', 'success'); if(reload){ window.location.reload(); } else { - // setCurrentNodenet(currentNodenet, currentNodeSpace); - refreshNodespace(); + setCurrentNodenet(currentNodenet, currentNodeSpace, true); + // refreshNodespace(); } } ); @@ -3830,16 +4056,6 @@ function handleEditNodespace(event){ } } - -function setMonitorData(uid){ - api.call('export_monitor_data', params={ - 'nodenet_uid': currentNodenet, - 'monitor_uid': uid - }, function(data){ - monitors[uid] = data; - }) -} - function removeMonitor(node, target, type){ monitor = getMonitor(node, target, type); api.call('remove_monitor', { @@ -3880,7 +4096,9 @@ function scrollToNode(node, doShowNodeForm){ canvas_container.scrollLeft(x); selectNode(node.uid); view.draw(); - if(node.uid in nodes && doShowNodeForm) showNodeForm(node.uid); + if(node.uid in nodes && doShowNodeForm) { + loadLinksForSelection(null, false, true); + } }); } else { deselectAll(); @@ -3927,24 +4145,52 @@ function initializeSidebarForms(){ $('#edit_link_form .deleteLink').on('click', handleDeleteLink); $('#edit_node_form').submit(handleEditNode); $('#edit_gate_form').submit(handleEditGate); + $('#gate_gatefunction').on('change', updateGatefunctionParams); $('#edit_nodenet_form').submit(handleEditNodenet); $('#edit_nodespace_form').submit(handleEditNodespace); + $('#edit_nodespace_form #delete_nodespace').on('click', deleteNodespace); $('#native_add_param').click(function(){ $('#native_parameters').append(''); }); - var world_selector = $("#nodenet_world"); + var world_selector = $("#nodenet_world_uid"); + var worldadapter_selector = $("#nodenet_worldadapter"); + var update_worldadapter_params = function(data){ + var html = []; + var wa = worldadapters[worldadapter_selector.val()]; + if(!wa) return ; + for(var i in wa.config_options){ + var op = wa.config_options[i] + var param = '