+.venv
.directory
__pycache__
+supcon.egg-info
--- /dev/null
+[MASTER]
+
+# A comma-separated list of package or module names from where C extensions may
+# be loaded. Extensions are loading into the active Python interpreter and may
+# run arbitrary code
+extension-pkg-whitelist=
+
+# Add files or directories to the blacklist. They should be base names, not
+# paths.
+ignore=CVS
+
+# Add files or directories matching the regex patterns to the blacklist. The
+# regex matches against base names, not paths.
+ignore-patterns=
+
+# Python code to execute, usually for sys.path manipulation such as
+# pygtk.require().
+#init-hook=
+
+# Use multiple processes to speed up Pylint.
+jobs=1
+
+# List of plugins (as comma separated values of python modules names) to load,
+# usually to register additional checkers.
+load-plugins=
+
+# Pickle collected data for later comparisons.
+persistent=yes
+
+# Specify a configuration file.
+#rcfile=
+
+# When enabled, pylint would attempt to guess common misconfiguration and emit
+# user-friendly hints instead of false-positive error messages
+suggestion-mode=yes
+
+# Allow loading of arbitrary C extensions. Extensions are imported into the
+# active Python interpreter and may run arbitrary code.
+unsafe-load-any-extension=no
+
+
+[MESSAGES CONTROL]
+
+# Only show warnings with the listed confidence levels. Leave empty to show
+# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
+confidence=
+
+# Disable the message, report, category or checker with the given id(s). You
+# can either give multiple identifiers separated by comma (,) or put this
+# option multiple times (only on the command line, not in the configuration
+# file where it should appear only once).You can also use "--disable=all" to
+# disable everything first and then reenable specific checks. For example, if
+# you want to run only the similarities checker, you can use "--disable=all
+# --enable=similarities". If you want to run only the classes checker, but have
+# no Warning level messages displayed, use"--disable=all --enable=classes
+# --disable=W"
+disable=print-statement,
+ parameter-unpacking,
+ unpacking-in-except,
+ old-raise-syntax,
+ backtick,
+ long-suffix,
+ old-ne-operator,
+ old-octal-literal,
+ import-star-module-level,
+ non-ascii-bytes-literal,
+ raw-checker-failed,
+ bad-inline-option,
+ locally-disabled,
+ locally-enabled,
+ file-ignored,
+ suppressed-message,
+ useless-suppression,
+ deprecated-pragma,
+ apply-builtin,
+ basestring-builtin,
+ buffer-builtin,
+ cmp-builtin,
+ coerce-builtin,
+ execfile-builtin,
+ file-builtin,
+ long-builtin,
+ raw_input-builtin,
+ reduce-builtin,
+ standarderror-builtin,
+ unicode-builtin,
+ xrange-builtin,
+ coerce-method,
+ delslice-method,
+ getslice-method,
+ setslice-method,
+ no-absolute-import,
+ old-division,
+ dict-iter-method,
+ dict-view-method,
+ next-method-called,
+ metaclass-assignment,
+ indexing-exception,
+ raising-string,
+ reload-builtin,
+ oct-method,
+ hex-method,
+ nonzero-method,
+ cmp-method,
+ input-builtin,
+ round-builtin,
+ intern-builtin,
+ unichr-builtin,
+ map-builtin-not-iterating,
+ zip-builtin-not-iterating,
+ range-builtin-not-iterating,
+ filter-builtin-not-iterating,
+ using-cmp-argument,
+ eq-without-hash,
+ div-method,
+ idiv-method,
+ rdiv-method,
+ exception-message-attribute,
+ invalid-str-codec,
+ sys-max-int,
+ bad-python3-import,
+ deprecated-string-function,
+ deprecated-str-translate-call,
+ deprecated-itertools-function,
+ deprecated-types-field,
+ next-method-defined,
+ dict-items-not-iterating,
+ dict-keys-not-iterating,
+ dict-values-not-iterating
+
+# Enable the message, report, category or checker with the given id(s). You can
+# either give multiple identifier separated by comma (,) or put this option
+# multiple time (only on the command line, not in the configuration file where
+# it should appear only once). See also the "--disable" option for examples.
+enable=c-extension-no-member
+
+
+[REPORTS]
+
+# Python expression which should return a note less than 10 (10 is the highest
+# note). You have access to the variables errors warning, statement which
+# respectively contain the number of errors / warnings messages and the total
+# number of statements analyzed. This is used by the global evaluation report
+# (RP0004).
+evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
+
+# Template used to display messages. This is a python new-style format string
+# used to format the message information. See doc for all details
+#msg-template=
+
+# Set the output format. Available formats are text, parseable, colorized, json
+# and msvs (visual studio).You can also give a reporter class, eg
+# mypackage.mymodule.MyReporterClass.
+output-format=text
+
+# Tells whether to display a full report or only the messages
+reports=no
+
+# Activate the evaluation score.
+score=yes
+
+
+[REFACTORING]
+
+# Maximum number of nested blocks for function / method body
+max-nested-blocks=5
+
+# Complete name of functions that never returns. When checking for
+# inconsistent-return-statements if a never returning function is called then
+# it will be considered as an explicit return statement and no message will be
+# printed.
+never-returning-functions=optparse.Values,sys.exit
+
+
+[SIMILARITIES]
+
+# Ignore comments when computing similarities.
+ignore-comments=yes
+
+# Ignore docstrings when computing similarities.
+ignore-docstrings=yes
+
+# Ignore imports when computing similarities.
+ignore-imports=no
+
+# Minimum lines number of a similarity.
+min-similarity-lines=4
+
+
+[VARIABLES]
+
+# List of additional names supposed to be defined in builtins. Remember that
+# you should avoid to define new builtins when possible.
+additional-builtins=
+
+# Tells whether unused global variables should be treated as a violation.
+allow-global-unused-variables=yes
+
+# List of strings which can identify a callback function by name. A callback
+# name must start or end with one of those strings.
+callbacks=cb_,
+ _cb
+
+# A regular expression matching the name of dummy variables (i.e. expectedly
+# not used).
+dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
+
+# Argument names that match this expression will be ignored. Default to name
+# with leading underscore
+ignored-argument-names=_.*|^ignored_|^unused_
+
+# Tells whether we should check for unused import in __init__ files.
+init-import=no
+
+# List of qualified module names which can have objects that can redefine
+# builtins.
+redefining-builtins-modules=six.moves,past.builtins,future.builtins
+
+
+[LOGGING]
+
+# Logging modules to check that the string format arguments are in logging
+# function parameter format
+logging-modules=logging
+
+
+[MISCELLANEOUS]
+
+# List of note tags to take in consideration, separated by a comma.
+notes=FIXME,
+ XXX,
+ TODO
+
+
+[SPELLING]
+
+# Limits count of emitted suggestions for spelling mistakes
+max-spelling-suggestions=4
+
+# Spelling dictionary name. Available dictionaries: none. To make it working
+# install python-enchant package.
+spelling-dict=
+
+# List of comma separated words that should not be checked.
+spelling-ignore-words=
+
+# A path to a file that contains private dictionary; one word per line.
+spelling-private-dict-file=
+
+# Tells whether to store unknown words to indicated private dictionary in
+# --spelling-private-dict-file option instead of raising a message.
+spelling-store-unknown-words=no
+
+
+[BASIC]
+
+# Naming style matching correct argument names
+argument-naming-style=snake_case
+
+# Regular expression matching correct argument names. Overrides argument-
+# naming-style
+argument-rgx=_?[a-z0-9]+([A-Z][a-z0-9]*)*
+
+# Naming style matching correct attribute names
+attr-naming-style=snake_case
+
+# Regular expression matching correct attribute names. Overrides attr-naming-
+# style
+attr-rgx=_{0,2}[a-z0-9]+([A-Z][a-z0-9]*)*
+
+# Bad variable names which should always be refused, separated by a comma
+bad-names=foo,
+ bar,
+ baz,
+ toto,
+ tutu,
+ tata
+
+# Naming style matching correct class attribute names
+class-attribute-naming-style=any
+
+# Regular expression matching correct class attribute names. Overrides class-
+# attribute-naming-style
+class-attribute-rgx=_{0,2}[a-z0-9]+([A-Z][a-z0-9]*)*
+
+# Naming style matching correct class names
+class-naming-style=PascalCase
+
+# Regular expression matching correct class names. Overrides class-naming-style
+#class-rgx=
+
+# Naming style matching correct constant names
+const-naming-style=UPPER_CASE
+
+# Regular expression matching correct constant names. Overrides const-naming-
+# style
+#const-rgx=
+
+# Minimum line length for functions/classes that require docstrings, shorter
+# ones are exempt.
+docstring-min-length=-1
+
+# Naming style matching correct function names
+function-naming-style=snake_case
+
+# Regular expression matching correct function names. Overrides function-
+# naming-style
+function-rgx=_{0,2}[a-z0-9]+([A-Z][a-z0-9]*)*
+
+# Good variable names which should always be accepted, separated by a comma
+good-names=i,
+ j,
+ k,
+ ex,
+ Run,
+ _
+
+# Include a hint for the correct naming format with invalid-name
+include-naming-hint=no
+
+# Naming style matching correct inline iteration names
+inlinevar-naming-style=any
+
+# Regular expression matching correct inline iteration names. Overrides
+# inlinevar-naming-style
+#inlinevar-rgx=
+
+# Naming style matching correct method names
+method-naming-style=camelCase
+
+# Regular expression matching correct method names. Overrides method-naming-
+# style
+method-rgx=_{0,2}[a-z0-9]+([A-Z][a-z0-9]*)*
+
+# Naming style matching correct module names
+module-naming-style=snake_case
+
+# Regular expression matching correct module names. Overrides module-naming-
+# style
+#module-rgx=
+
+# Colon-delimited sets of names that determine each other's naming style when
+# the name regexes allow several styles.
+name-group=
+
+# Regular expression which should only match function or class names that do
+# not require a docstring.
+no-docstring-rgx=^_
+
+# List of decorators that produce properties, such as abc.abstractproperty. Add
+# to this list to register other decorators that produce valid properties.
+property-classes=abc.abstractproperty
+
+# Naming style matching correct variable names
+variable-naming-style=snake_case
+
+# Regular expression matching correct variable names. Overrides variable-
+# naming-style
+variable-rgx=_{0,2}[a-z0-9]+([A-Z][a-z0-9]*)*
+
+
+[FORMAT]
+
+# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
+expected-line-ending-format=
+
+# Regexp for a line that is allowed to be longer than the limit.
+ignore-long-lines=^\s*(# )?<?https?://\S+>?$
+
+# Number of spaces of indent required inside a hanging or continued line.
+indent-after-paren=2
+
+# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
+# tab).
+indent-string=' '
+
+# Maximum number of characters on a single line.
+max-line-length=100
+
+# Maximum number of lines in a module
+max-module-lines=1000
+
+# List of optional constructs for which whitespace checking is disabled. `dict-
+# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}.
+# `trailing-comma` allows a space between comma and closing bracket: (a, ).
+# `empty-line` allows space-only lines.
+no-space-check=trailing-comma,
+ dict-separator
+
+# Allow the body of a class to be on the same line as the declaration if body
+# contains single statement.
+single-line-class-stmt=no
+
+# Allow the body of an if to be on the same line as the test if there is no
+# else.
+single-line-if-stmt=no
+
+
+[TYPECHECK]
+
+# List of decorators that produce context managers, such as
+# contextlib.contextmanager. Add to this list to register other decorators that
+# produce valid context managers.
+contextmanager-decorators=contextlib.contextmanager
+
+# List of members which are set dynamically and missed by pylint inference
+# system, and so shouldn't trigger E1101 when accessed. Python regular
+# expressions are accepted.
+generated-members=
+
+# Tells whether missing members accessed in mixin class should be ignored. A
+# mixin class is detected if its name ends with "mixin" (case insensitive).
+ignore-mixin-members=yes
+
+# This flag controls whether pylint should warn about no-member and similar
+# checks whenever an opaque object is returned when inferring. The inference
+# can return multiple potential results while evaluating a Python object, but
+# some branches might not be evaluated, which results in partial inference. In
+# that case, it might be useful to still emit no-member and other checks for
+# the rest of the inferred objects.
+ignore-on-opaque-inference=yes
+
+# List of class names for which member attributes should not be checked (useful
+# for classes with dynamically set attributes). This supports the use of
+# qualified names.
+ignored-classes=optparse.Values,thread._local,_thread._local
+
+# List of module names for which member attributes should not be checked
+# (useful for modules/projects where namespaces are manipulated during runtime
+# and thus existing member attributes cannot be deduced by static analysis. It
+# supports qualified module names, as well as Unix pattern matching.
+ignored-modules=
+
+# Show a hint with possible names when a member name was not found. The aspect
+# of finding the hint is based on edit distance.
+missing-member-hint=yes
+
+# The minimum edit distance a name should have in order to be considered a
+# similar match for a missing member name.
+missing-member-hint-distance=1
+
+# The total number of similar names that should be taken in consideration when
+# showing a hint for a missing member.
+missing-member-max-choices=1
+
+
+[CLASSES]
+
+# List of method names used to declare (i.e. assign) instance attributes.
+defining-attr-methods=__init__,
+ __new__,
+ setUp
+
+# List of member names, which should be excluded from the protected access
+# warning.
+exclude-protected=_asdict,
+ _fields,
+ _replace,
+ _source,
+ _make
+
+# List of valid names for the first argument in a class method.
+valid-classmethod-first-arg=cls
+
+# List of valid names for the first argument in a metaclass class method.
+valid-metaclass-classmethod-first-arg=mcs
+
+
+[IMPORTS]
+
+# Allow wildcard imports from modules that define __all__.
+allow-wildcard-with-all=no
+
+# Analyse import fallback blocks. This can be used to support both Python 2 and
+# 3 compatible code, which means that the block might have code that exists
+# only in one or another interpreter, leading to false positives when analysed.
+analyse-fallback-blocks=no
+
+# Deprecated modules which should not be used, separated by a comma
+deprecated-modules=optparse,tkinter.tix
+
+# Create a graph of external dependencies in the given file (report RP0402 must
+# not be disabled)
+ext-import-graph=
+
+# Create a graph of every (i.e. internal and external) dependencies in the
+# given file (report RP0402 must not be disabled)
+import-graph=
+
+# Create a graph of internal dependencies in the given file (report RP0402 must
+# not be disabled)
+int-import-graph=
+
+# Force import order to recognize a module as part of the standard
+# compatibility libraries.
+known-standard-library=
+
+# Force import order to recognize a module as part of a third party library.
+known-third-party=enchant
+
+
+[DESIGN]
+
+# Maximum number of arguments for function / method
+max-args=5
+
+# Maximum number of attributes for a class (see R0902).
+max-attributes=7
+
+# Maximum number of boolean expressions in a if statement
+max-bool-expr=5
+
+# Maximum number of branch for function / method body
+max-branches=12
+
+# Maximum number of locals for function / method body
+max-locals=15
+
+# Maximum number of parents for a class (see R0901).
+max-parents=7
+
+# Maximum number of public methods for a class (see R0904).
+max-public-methods=20
+
+# Maximum number of return / yield for function / method body
+max-returns=6
+
+# Maximum number of statements in function / method body
+max-statements=50
+
+# Minimum number of public methods for a class (see R0903).
+min-public-methods=2
+
+
+[EXCEPTIONS]
+
+# Exceptions that will emit a warning when being caught. Defaults to
+# "Exception"
+overgeneral-exceptions=Exception
--- /dev/null
+{
+ "python.pythonPath": "${workspaceFolder}/.venv/bin/python",
+ "python.linting.pylintPath": "${workspaceFolder}/.venv/bin/pylint",
+ "files.watcherExclude": {
+ "**/.git/objects/**": true,
+ "**/.git/subtree-cache/**": true,
+ "**/node_modules/**": true,
+ "**/.venv/**": true
+ },
+}
\ No newline at end of file
[dev-packages]
"e1839a8" = {path = ".", extras = ["e"], editable = true}
+pylint = "*"
+rope = "*"
[requires]
python_version = "3"
{
"_meta": {
"hash": {
- "sha256": "5222cbe0571531c1844f56e5766605155e243efab356f8b635f0ad9956893afa"
+ "sha256": "efc26f7408a3ae4838ef28011201782488852f2589dd25df9781cb72aaac03ef"
},
"pipfile-spec": 6,
"requires": {
]
},
"default": {
- "asn1crypto": {
- "hashes": [
- "sha256:2f1adbb7546ed199e3c90ef23ec95c5cf3585bac7d11fb7eb562a3fe89c64e87",
- "sha256:9d5c20441baf0cb60a4ac34cc447c6c189024b6b4c6cd7877034f4965c464e49"
- ],
- "version": "==0.24.0"
- },
- "attrs": {
- "hashes": [
- "sha256:1c7960ccfd6a005cd9f7ba884e6316b5e430a3f1a6c37c5f87d8b43f83b54ec9",
- "sha256:a17a9573a6f475c99b551c0e0a812707ddda1ec9653bed04c13841404ed6f450"
- ],
- "version": "==17.4.0"
- },
- "automat": {
- "hashes": [
- "sha256:2140297df155f7990f6f4c73b2ab0583bd8150db9ed2a1b48122abe66e9908c1",
- "sha256:3c1fd04ecf08ac87b4dd3feae409542e9bf7827257097b2b6ed5692f69d6f6a8"
- ],
- "version": "==0.6.0"
- },
- "cffi": {
- "hashes": [
- "sha256:151b7eefd035c56b2b2e1eb9963c90c6302dc15fbd8c1c0a83a163ff2c7d7743",
- "sha256:1553d1e99f035ace1c0544050622b7bc963374a00c467edafac50ad7bd276aef",
- "sha256:1b0493c091a1898f1136e3f4f991a784437fac3673780ff9de3bcf46c80b6b50",
- "sha256:2ba8a45822b7aee805ab49abfe7eec16b90587f7f26df20c71dd89e45a97076f",
- "sha256:3c85641778460581c42924384f5e68076d724ceac0f267d66c757f7535069c93",
- "sha256:3eb6434197633b7748cea30bf0ba9f66727cdce45117a712b29a443943733257",
- "sha256:4c91af6e967c2015729d3e69c2e51d92f9898c330d6a851bf8f121236f3defd3",
- "sha256:770f3782b31f50b68627e22f91cb182c48c47c02eb405fd689472aa7b7aa16dc",
- "sha256:79f9b6f7c46ae1f8ded75f68cf8ad50e5729ed4d590c74840471fc2823457d04",
- "sha256:7a33145e04d44ce95bcd71e522b478d282ad0eafaf34fe1ec5bbd73e662f22b6",
- "sha256:857959354ae3a6fa3da6651b966d13b0a8bed6bbc87a0de7b38a549db1d2a359",
- "sha256:87f37fe5130574ff76c17cab61e7d2538a16f843bb7bca8ebbc4b12de3078596",
- "sha256:95d5251e4b5ca00061f9d9f3d6fe537247e145a8524ae9fd30a2f8fbce993b5b",
- "sha256:9d1d3e63a4afdc29bd76ce6aa9d58c771cd1599fbba8cf5057e7860b203710dd",
- "sha256:a36c5c154f9d42ec176e6e620cb0dd275744aa1d804786a71ac37dc3661a5e95",
- "sha256:ae5e35a2c189d397b91034642cb0eab0e346f776ec2eb44a49a459e6615d6e2e",
- "sha256:b0f7d4a3df8f06cf49f9f121bead236e328074de6449866515cea4907bbc63d6",
- "sha256:b75110fb114fa366b29a027d0c9be3709579602ae111ff61674d28c93606acca",
- "sha256:ba5e697569f84b13640c9e193170e89c13c6244c24400fc57e88724ef610cd31",
- "sha256:be2a9b390f77fd7676d80bc3cdc4f8edb940d8c198ed2d8c0be1319018c778e1",
- "sha256:d5d8555d9bfc3f02385c1c37e9f998e2011f0db4f90e250e5bc0c0a85a813085",
- "sha256:e55e22ac0a30023426564b1059b035973ec82186ddddbac867078435801c7801",
- "sha256:e90f17980e6ab0f3c2f3730e56d1fe9bcba1891eeea58966e89d352492cc74f4",
- "sha256:ecbb7b01409e9b782df5ded849c178a0aa7c906cf8c5a67368047daab282b184",
- "sha256:ed01918d545a38998bfa5902c7c00e0fee90e957ce036a4000a88e3fe2264917",
- "sha256:edabd457cd23a02965166026fd9bfd196f4324fe6032e866d0f3bd0301cd486f",
- "sha256:fdf1c1dc5bafc32bc5d08b054f94d659422b05aba244d6be4ddc1c72d9aa70fb"
- ],
- "markers": "platform_python_implementation != 'pypy'",
- "version": "==1.11.5"
- },
- "constantly": {
- "hashes": [
- "sha256:586372eb92059873e29eba4f9dec8381541b4d3834660707faf8ba59146dfc35",
- "sha256:dd2fa9d6b1a51a83f0d7dd76293d734046aa176e384bf6e33b7e44880eb37c5d"
- ],
- "version": "==15.1.0"
- },
- "cryptography": {
- "hashes": [
- "sha256:3f3b65d5a16e6b52fba63dc860b62ca9832f51f1a2ae5083c78b6840275f12dd",
- "sha256:551a3abfe0c8c6833df4192a63371aa2ff43afd8f570ed345d31f251d78e7e04",
- "sha256:5cb990056b7cadcca26813311187ad751ea644712022a3976443691168781b6f",
- "sha256:60bda7f12ecb828358be53095fc9c6edda7de8f1ef571f96c00b2363643fa3cd",
- "sha256:6fef51ec447fe9f8351894024e94736862900d3a9aa2961528e602eb65c92bdb",
- "sha256:77d0ad229d47a6e0272d00f6bf8ac06ce14715a9fd02c9a97f5a2869aab3ccb2",
- "sha256:808fe471b1a6b777f026f7dc7bd9a4959da4bfab64972f2bbe91e22527c1c037",
- "sha256:9b62fb4d18529c84b961efd9187fecbb48e89aa1a0f9f4161c61b7fc42a101bd",
- "sha256:9e5bed45ec6b4f828866ac6a6bedf08388ffcfa68abe9e94b34bb40977aba531",
- "sha256:9fc295bf69130a342e7a19a39d7bbeb15c0bcaabc7382ec33ef3b2b7d18d2f63",
- "sha256:abd070b5849ed64e6d349199bef955ee0ad99aefbad792f0c587f8effa681a5e",
- "sha256:ba6a774749b6e510cffc2fb98535f717e0e5fd91c7c99a61d223293df79ab351",
- "sha256:c332118647f084c983c6a3e1dba0f3bcb051f69d12baccac68db8d62d177eb8a",
- "sha256:d6f46e862ee36df81e6342c2177ba84e70f722d9dc9c6c394f9f1f434c4a5563",
- "sha256:db6013746f73bf8edd9c3d1d3f94db635b9422f503db3fc5ef105233d4c011ab",
- "sha256:f57008eaff597c69cf692c3518f6d4800f0309253bb138b526a37fe9ef0c7471",
- "sha256:f6c821ac253c19f2ad4c8691633ae1d1a17f120d5b01ea1d256d7b602bc59887"
- ],
- "version": "==2.2.2"
- },
"e1839a8": {
"editable": true,
"path": "."
},
- "hyperlink": {
- "hashes": [
- "sha256:98da4218a56b448c7ec7d2655cb339af1f7d751cf541469bb4fc28c4a4245b34",
- "sha256:f01b4ff744f14bc5d0a22a6b9f1525ab7d6312cb0ff967f59414bbac52f0a306"
- ],
- "version": "==18.0.0"
- },
- "idna": {
- "hashes": [
- "sha256:2c6a5de3089009e3da7c5dde64a141dbc8551d5b7f6cf4ed7c2568d0cc520a8f",
- "sha256:8c7309c718f94b3a625cb648ace320157ad16ff131ae0af362c9f21b80ef6ec4"
- ],
- "version": "==2.6"
- },
- "incremental": {
- "hashes": [
- "sha256:717e12246dddf231a349175f48d74d93e2897244939173b01974ab6661406b9f",
- "sha256:7b751696aaf36eebfab537e458929e194460051ccad279c72b755a167eebd4b3"
- ],
- "version": "==17.5.0"
- },
"msgpack-python": {
"hashes": [
"sha256:378cc8a6d3545b532dfd149da715abae4fda2a3adb6d74e525d0d5e51f46909b"
"sha256:be1480aecf7c8aba9c92d2846ae153ad4c7e89ae188b28e4bcc579c3bc30e36b"
],
"version": "==1.40.post1"
- },
- "pyasn1": {
- "hashes": [
- "sha256:0d7f6e959fe53f3960a23d73f35e1fce61348b30915b6664309ca756de7c1f89",
- "sha256:5a0db897b311d265cde49615cf783f1c78613138605cdd0f907ecfa5b2aba3ee",
- "sha256:758cb50abddc03e4563fd9e7f03db56e3e87b58c0bd01247360326e5c0c7ffa5",
- "sha256:7d626683e3d792cccc608da02498aff37ab4f3dafd8905d6bf755d11f9b26b43",
- "sha256:a7efe807c4b83a859e2735c692b92ed7b567cfddc4163763412920041d876c2b",
- "sha256:b5a9ca48055b9a20f6d1b3d68e38692e5431c86a0f99ea602e61294e891fee5b",
- "sha256:c07d6e587b2f928366b1f67c09bda026a3e6fcc99e80a744dc67f8fca3895626",
- "sha256:d258b0a71994f7770599835249cece1caef3c70def868c4915e6e5ca49b67d15",
- "sha256:d5cd6ed995dba16fad0c521cfe31cd2d68400b53fcc2bce93326829be73ab6d1",
- "sha256:d84c2aea3cf43780e9e6a19f4e4dddee9f6976519020e64e47c57e5c7a8c3dd2",
- "sha256:e85895087905c65b5b594eb91f7522664c85545b147d5f4d4e7b1b07da8dcbdc",
- "sha256:f81c96761fca60d64b1c9b79ec2e40cf9495a745cf570613079ef324aeb9672b"
- ],
- "version": "==0.4.2"
- },
- "pyasn1-modules": {
- "hashes": [
- "sha256:041e9fbafac548d095f5b6c3b328b80792f006196e15a232b731a83c93d59493",
- "sha256:0cdca76a68dcb701fff58c397de0ef9922b472b1cb3ea9695ca19d03f1869787",
- "sha256:0cea139045c38f84abaa803bcb4b5e8775ea12a42af10019d942f227acc426c3",
- "sha256:0f2e50d20bc670be170966638fa0ae603f0bc9ed6ebe8e97a6d1d4cef30cc889",
- "sha256:47fb6757ab78fe966e7c58b2030b546854f78416d653163f0ce9290cf2278e8b",
- "sha256:598a6004ec26a8ab40a39ea955068cf2a3949ad9c0030da970f2e1ca4c9f1cc9",
- "sha256:72fd8b0c11191da088147c6e4678ec53e573923ecf60b57eeac9e97433e09fc2",
- "sha256:854700bbdd01394e2ada9c1bfbd0ed9f5d0c551350dbbd023e88b11d2771ae06",
- "sha256:af00ea8f2022b6287dc375b2c70f31ab5af83989fc6fe9eacd4976ce26cd7ccc",
- "sha256:b1f395cae2d669e0830cb023aa86f9f283b7a9aa32317d7f80d8e78aa2745812",
- "sha256:c6747146e95d2b14cc2a8399b2b0bde3f93778f8f9ec704690d2b589c376c137",
- "sha256:f53fe5bcebdf318f51399b250fe8325ef3a26d927f012cc0c8e0f9e9af7f9deb"
- ],
- "version": "==0.2.1"
- },
- "pycparser": {
- "hashes": [
- "sha256:99a8ca03e29851d96616ad0404b4aad7d9ee16f25c9f9708a11faf2810f7b226"
- ],
- "version": "==2.18"
- },
- "pyopenssl": {
- "hashes": [
- "sha256:07a2de1a54de07448732a81e38a55df7da109b2f47f599f8bb35b0cbec69d4bd",
- "sha256:2c10cfba46a52c0b0950118981d61e72c1e5b1aac451ca1bc77de1a679456773"
- ],
- "version": "==17.5.0"
- },
- "service-identity": {
- "hashes": [
- "sha256:0e76f3c042cc0f5c7e6da002cf646f59dc4023962d1d1166343ce53bdad39e17",
- "sha256:4001fbb3da19e0df22c47a06d29681a398473af4aa9d745eca525b3b2c2302ab"
- ],
- "version": "==17.0.0"
- },
- "six": {
- "hashes": [
- "sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9",
- "sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb"
- ],
- "version": "==1.11.0"
- },
- "twisted": {
- "hashes": [
- "sha256:0da1a7e35d5fcae37bc9c7978970b5feb3bc82822155b8654ec63925c05af75c",
- "sha256:716805e624f9396fcc1f47e8aef68e629fd31599a74855b6e1636122c042458d",
- "sha256:7bc3cdfd1ca5e5b84c7936db3c2cb2feb7d5b77410e713fd346da095a3b6a1d2"
- ],
- "version": "==17.9.0"
- },
- "zope.interface": {
- "hashes": [
- "sha256:21506674d30c009271fe68a242d330c83b1b9d76d62d03d87e1e9528c61beea6",
- "sha256:3d184aff0756c44fff7de69eb4cd5b5311b6f452d4de28cb08343b3f21993763",
- "sha256:467d364b24cb398f76ad5e90398d71b9325eb4232be9e8a50d6a3b3c7a1c8789",
- "sha256:57c38470d9f57e37afb460c399eb254e7193ac7fb8042bd09bdc001981a9c74c",
- "sha256:9ada83f4384bbb12dedc152bcdd46a3ac9f5f7720d43ac3ce3e8e8b91d733c10",
- "sha256:a1daf9c5120f3cc6f2b5fef8e1d2a3fb7bbbb20ed4bfdc25bc8364bc62dcf54b",
- "sha256:e6b77ae84f2b8502d99a7855fa33334a1eb6159de45626905cb3e454c023f339",
- "sha256:e881ef610ff48aece2f4ee2af03d2db1a146dc7c705561bd6089b2356f61641f",
- "sha256:f41037260deaacb875db250021fe883bf536bf6414a4fd25b25059b02e31b120"
- ],
- "version": "==4.5.0"
}
},
"develop": {
- "asn1crypto": {
+ "astroid": {
"hashes": [
- "sha256:2f1adbb7546ed199e3c90ef23ec95c5cf3585bac7d11fb7eb562a3fe89c64e87",
- "sha256:9d5c20441baf0cb60a4ac34cc447c6c189024b6b4c6cd7877034f4965c464e49"
+ "sha256:0ef2bf9f07c3150929b25e8e61b5198c27b0dca195e156f0e4d5bdd89185ca1a",
+ "sha256:fc9b582dba0366e63540982c3944a9230cbc6f303641c51483fa547dcc22393a"
],
- "version": "==0.24.0"
- },
- "attrs": {
- "hashes": [
- "sha256:1c7960ccfd6a005cd9f7ba884e6316b5e430a3f1a6c37c5f87d8b43f83b54ec9",
- "sha256:a17a9573a6f475c99b551c0e0a812707ddda1ec9653bed04c13841404ed6f450"
- ],
- "version": "==17.4.0"
- },
- "automat": {
- "hashes": [
- "sha256:2140297df155f7990f6f4c73b2ab0583bd8150db9ed2a1b48122abe66e9908c1",
- "sha256:3c1fd04ecf08ac87b4dd3feae409542e9bf7827257097b2b6ed5692f69d6f6a8"
- ],
- "version": "==0.6.0"
- },
- "cffi": {
- "hashes": [
- "sha256:151b7eefd035c56b2b2e1eb9963c90c6302dc15fbd8c1c0a83a163ff2c7d7743",
- "sha256:1553d1e99f035ace1c0544050622b7bc963374a00c467edafac50ad7bd276aef",
- "sha256:1b0493c091a1898f1136e3f4f991a784437fac3673780ff9de3bcf46c80b6b50",
- "sha256:2ba8a45822b7aee805ab49abfe7eec16b90587f7f26df20c71dd89e45a97076f",
- "sha256:3c85641778460581c42924384f5e68076d724ceac0f267d66c757f7535069c93",
- "sha256:3eb6434197633b7748cea30bf0ba9f66727cdce45117a712b29a443943733257",
- "sha256:4c91af6e967c2015729d3e69c2e51d92f9898c330d6a851bf8f121236f3defd3",
- "sha256:770f3782b31f50b68627e22f91cb182c48c47c02eb405fd689472aa7b7aa16dc",
- "sha256:79f9b6f7c46ae1f8ded75f68cf8ad50e5729ed4d590c74840471fc2823457d04",
- "sha256:7a33145e04d44ce95bcd71e522b478d282ad0eafaf34fe1ec5bbd73e662f22b6",
- "sha256:857959354ae3a6fa3da6651b966d13b0a8bed6bbc87a0de7b38a549db1d2a359",
- "sha256:87f37fe5130574ff76c17cab61e7d2538a16f843bb7bca8ebbc4b12de3078596",
- "sha256:95d5251e4b5ca00061f9d9f3d6fe537247e145a8524ae9fd30a2f8fbce993b5b",
- "sha256:9d1d3e63a4afdc29bd76ce6aa9d58c771cd1599fbba8cf5057e7860b203710dd",
- "sha256:a36c5c154f9d42ec176e6e620cb0dd275744aa1d804786a71ac37dc3661a5e95",
- "sha256:ae5e35a2c189d397b91034642cb0eab0e346f776ec2eb44a49a459e6615d6e2e",
- "sha256:b0f7d4a3df8f06cf49f9f121bead236e328074de6449866515cea4907bbc63d6",
- "sha256:b75110fb114fa366b29a027d0c9be3709579602ae111ff61674d28c93606acca",
- "sha256:ba5e697569f84b13640c9e193170e89c13c6244c24400fc57e88724ef610cd31",
- "sha256:be2a9b390f77fd7676d80bc3cdc4f8edb940d8c198ed2d8c0be1319018c778e1",
- "sha256:d5d8555d9bfc3f02385c1c37e9f998e2011f0db4f90e250e5bc0c0a85a813085",
- "sha256:e55e22ac0a30023426564b1059b035973ec82186ddddbac867078435801c7801",
- "sha256:e90f17980e6ab0f3c2f3730e56d1fe9bcba1891eeea58966e89d352492cc74f4",
- "sha256:ecbb7b01409e9b782df5ded849c178a0aa7c906cf8c5a67368047daab282b184",
- "sha256:ed01918d545a38998bfa5902c7c00e0fee90e957ce036a4000a88e3fe2264917",
- "sha256:edabd457cd23a02965166026fd9bfd196f4324fe6032e866d0f3bd0301cd486f",
- "sha256:fdf1c1dc5bafc32bc5d08b054f94d659422b05aba244d6be4ddc1c72d9aa70fb"
- ],
- "markers": "platform_python_implementation != 'pypy'",
- "version": "==1.11.5"
- },
- "constantly": {
- "hashes": [
- "sha256:586372eb92059873e29eba4f9dec8381541b4d3834660707faf8ba59146dfc35",
- "sha256:dd2fa9d6b1a51a83f0d7dd76293d734046aa176e384bf6e33b7e44880eb37c5d"
- ],
- "version": "==15.1.0"
- },
- "cryptography": {
- "hashes": [
- "sha256:3f3b65d5a16e6b52fba63dc860b62ca9832f51f1a2ae5083c78b6840275f12dd",
- "sha256:551a3abfe0c8c6833df4192a63371aa2ff43afd8f570ed345d31f251d78e7e04",
- "sha256:5cb990056b7cadcca26813311187ad751ea644712022a3976443691168781b6f",
- "sha256:60bda7f12ecb828358be53095fc9c6edda7de8f1ef571f96c00b2363643fa3cd",
- "sha256:6fef51ec447fe9f8351894024e94736862900d3a9aa2961528e602eb65c92bdb",
- "sha256:77d0ad229d47a6e0272d00f6bf8ac06ce14715a9fd02c9a97f5a2869aab3ccb2",
- "sha256:808fe471b1a6b777f026f7dc7bd9a4959da4bfab64972f2bbe91e22527c1c037",
- "sha256:9b62fb4d18529c84b961efd9187fecbb48e89aa1a0f9f4161c61b7fc42a101bd",
- "sha256:9e5bed45ec6b4f828866ac6a6bedf08388ffcfa68abe9e94b34bb40977aba531",
- "sha256:9fc295bf69130a342e7a19a39d7bbeb15c0bcaabc7382ec33ef3b2b7d18d2f63",
- "sha256:abd070b5849ed64e6d349199bef955ee0ad99aefbad792f0c587f8effa681a5e",
- "sha256:ba6a774749b6e510cffc2fb98535f717e0e5fd91c7c99a61d223293df79ab351",
- "sha256:c332118647f084c983c6a3e1dba0f3bcb051f69d12baccac68db8d62d177eb8a",
- "sha256:d6f46e862ee36df81e6342c2177ba84e70f722d9dc9c6c394f9f1f434c4a5563",
- "sha256:db6013746f73bf8edd9c3d1d3f94db635b9422f503db3fc5ef105233d4c011ab",
- "sha256:f57008eaff597c69cf692c3518f6d4800f0309253bb138b526a37fe9ef0c7471",
- "sha256:f6c821ac253c19f2ad4c8691633ae1d1a17f120d5b01ea1d256d7b602bc59887"
- ],
- "version": "==2.2.2"
+ "version": "==1.6.5"
},
"e1839a8": {
"editable": true,
"path": "."
},
- "hyperlink": {
- "hashes": [
- "sha256:98da4218a56b448c7ec7d2655cb339af1f7d751cf541469bb4fc28c4a4245b34",
- "sha256:f01b4ff744f14bc5d0a22a6b9f1525ab7d6312cb0ff967f59414bbac52f0a306"
- ],
- "version": "==18.0.0"
- },
- "idna": {
- "hashes": [
- "sha256:2c6a5de3089009e3da7c5dde64a141dbc8551d5b7f6cf4ed7c2568d0cc520a8f",
- "sha256:8c7309c718f94b3a625cb648ace320157ad16ff131ae0af362c9f21b80ef6ec4"
- ],
- "version": "==2.6"
- },
- "incremental": {
- "hashes": [
- "sha256:717e12246dddf231a349175f48d74d93e2897244939173b01974ab6661406b9f",
- "sha256:7b751696aaf36eebfab537e458929e194460051ccad279c72b755a167eebd4b3"
- ],
- "version": "==17.5.0"
+ "isort": {
+ "hashes": [
+ "sha256:1153601da39a25b14ddc54955dbbacbb6b2d19135386699e2ad58517953b34af",
+ "sha256:b9c40e9750f3d77e6e4d441d8b0266cf555e7cdabdcff33c4fd06366ca761ef8",
+ "sha256:ec9ef8f4a9bc6f71eec99e1806bfa2de401650d996c59330782b89a5555c1497"
+ ],
+ "version": "==4.3.4"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:0ce34342b419bd8f018e6666bfef729aec3edf62345a53b537a4dcc115746a33",
+ "sha256:1b668120716eb7ee21d8a38815e5eb3bb8211117d9a90b0f8e21722c0758cc39",
+ "sha256:209615b0fe4624d79e50220ce3310ca1a9445fd8e6d3572a896e7f9146bbf019",
+ "sha256:27bf62cb2b1a2068d443ff7097ee33393f8483b570b475db8ebf7e1cba64f088",
+ "sha256:27ea6fd1c02dcc78172a82fc37fcc0992a94e4cecf53cb6d73f11749825bd98b",
+ "sha256:2c1b21b44ac9beb0fc848d3993924147ba45c4ebc24be19825e57aabbe74a99e",
+ "sha256:2df72ab12046a3496a92476020a1a0abf78b2a7db9ff4dc2036b8dd980203ae6",
+ "sha256:320ffd3de9699d3892048baee45ebfbbf9388a7d65d832d7e580243ade426d2b",
+ "sha256:50e3b9a464d5d08cc5227413db0d1c4707b6172e4d4d915c1c70e4de0bbff1f5",
+ "sha256:5276db7ff62bb7b52f77f1f51ed58850e315154249aceb42e7f4c611f0f847ff",
+ "sha256:61a6cf00dcb1a7f0c773ed4acc509cb636af2d6337a08f362413c76b2b47a8dd",
+ "sha256:6ae6c4cb59f199d8827c5a07546b2ab7e85d262acaccaacd49b62f53f7c456f7",
+ "sha256:7661d401d60d8bf15bb5da39e4dd72f5d764c5aff5a86ef52a042506e3e970ff",
+ "sha256:7bd527f36a605c914efca5d3d014170b2cb184723e423d26b1fb2fd9108e264d",
+ "sha256:7cb54db3535c8686ea12e9535eb087d32421184eacc6939ef15ef50f83a5e7e2",
+ "sha256:7f3a2d740291f7f2c111d86a1c4851b70fb000a6c8883a59660d95ad57b9df35",
+ "sha256:81304b7d8e9c824d058087dcb89144842c8e0dea6d281c031f59f0acf66963d4",
+ "sha256:933947e8b4fbe617a51528b09851685138b49d511af0b6c0da2539115d6d4514",
+ "sha256:94223d7f060301b3a8c09c9b3bc3294b56b2188e7d8179c762a1cda72c979252",
+ "sha256:ab3ca49afcb47058393b0122428358d2fbe0408cf99f1b58b295cfeb4ed39109",
+ "sha256:bd6292f565ca46dee4e737ebcc20742e3b5be2b01556dafe169f6c65d088875f",
+ "sha256:cb924aa3e4a3fb644d0c463cad5bc2572649a6a3f68a7f8e4fbe44aaa6d77e4c",
+ "sha256:d0fc7a286feac9077ec52a927fc9fe8fe2fabab95426722be4c953c9a8bede92",
+ "sha256:ddc34786490a6e4ec0a855d401034cbd1242ef186c20d79d2166d6a4bd449577",
+ "sha256:e34b155e36fa9da7e1b7c738ed7767fc9491a62ec6af70fe9da4a057759edc2d",
+ "sha256:e5b9e8f6bda48460b7b143c3821b21b452cb3a835e6bbd5dd33aa0c8d3f5137d",
+ "sha256:e81ebf6c5ee9684be8f2c87563880f93eedd56dd2b6146d8a725b50b7e5adb0f",
+ "sha256:eb91be369f945f10d3a49f5f9be8b3d0b93a4c2be8f8a5b83b0571b8123e0a7a",
+ "sha256:f460d1ceb0e4a5dcb2a652db0904224f367c9b3c1470d5a7683c0480e582468b"
+ ],
+ "version": "==1.3.1"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
},
"msgpack-python": {
"hashes": [
],
"version": "==1.40.post1"
},
- "pyasn1": {
- "hashes": [
- "sha256:0d7f6e959fe53f3960a23d73f35e1fce61348b30915b6664309ca756de7c1f89",
- "sha256:5a0db897b311d265cde49615cf783f1c78613138605cdd0f907ecfa5b2aba3ee",
- "sha256:758cb50abddc03e4563fd9e7f03db56e3e87b58c0bd01247360326e5c0c7ffa5",
- "sha256:7d626683e3d792cccc608da02498aff37ab4f3dafd8905d6bf755d11f9b26b43",
- "sha256:a7efe807c4b83a859e2735c692b92ed7b567cfddc4163763412920041d876c2b",
- "sha256:b5a9ca48055b9a20f6d1b3d68e38692e5431c86a0f99ea602e61294e891fee5b",
- "sha256:c07d6e587b2f928366b1f67c09bda026a3e6fcc99e80a744dc67f8fca3895626",
- "sha256:d258b0a71994f7770599835249cece1caef3c70def868c4915e6e5ca49b67d15",
- "sha256:d5cd6ed995dba16fad0c521cfe31cd2d68400b53fcc2bce93326829be73ab6d1",
- "sha256:d84c2aea3cf43780e9e6a19f4e4dddee9f6976519020e64e47c57e5c7a8c3dd2",
- "sha256:e85895087905c65b5b594eb91f7522664c85545b147d5f4d4e7b1b07da8dcbdc",
- "sha256:f81c96761fca60d64b1c9b79ec2e40cf9495a745cf570613079ef324aeb9672b"
- ],
- "version": "==0.4.2"
- },
- "pyasn1-modules": {
- "hashes": [
- "sha256:041e9fbafac548d095f5b6c3b328b80792f006196e15a232b731a83c93d59493",
- "sha256:0cdca76a68dcb701fff58c397de0ef9922b472b1cb3ea9695ca19d03f1869787",
- "sha256:0cea139045c38f84abaa803bcb4b5e8775ea12a42af10019d942f227acc426c3",
- "sha256:0f2e50d20bc670be170966638fa0ae603f0bc9ed6ebe8e97a6d1d4cef30cc889",
- "sha256:47fb6757ab78fe966e7c58b2030b546854f78416d653163f0ce9290cf2278e8b",
- "sha256:598a6004ec26a8ab40a39ea955068cf2a3949ad9c0030da970f2e1ca4c9f1cc9",
- "sha256:72fd8b0c11191da088147c6e4678ec53e573923ecf60b57eeac9e97433e09fc2",
- "sha256:854700bbdd01394e2ada9c1bfbd0ed9f5d0c551350dbbd023e88b11d2771ae06",
- "sha256:af00ea8f2022b6287dc375b2c70f31ab5af83989fc6fe9eacd4976ce26cd7ccc",
- "sha256:b1f395cae2d669e0830cb023aa86f9f283b7a9aa32317d7f80d8e78aa2745812",
- "sha256:c6747146e95d2b14cc2a8399b2b0bde3f93778f8f9ec704690d2b589c376c137",
- "sha256:f53fe5bcebdf318f51399b250fe8325ef3a26d927f012cc0c8e0f9e9af7f9deb"
- ],
- "version": "==0.2.1"
- },
- "pycparser": {
+ "pylint": {
"hashes": [
- "sha256:99a8ca03e29851d96616ad0404b4aad7d9ee16f25c9f9708a11faf2810f7b226"
+ "sha256:a48070545c12430cfc4e865bf62f5ad367784765681b3db442d8230f0960aa3c",
+ "sha256:fff220bcb996b4f7e2b0f6812fd81507b72ca4d8c4d05daf2655c333800cb9b3"
],
- "version": "==2.18"
+ "index": "pypi",
+ "version": "==1.9.2"
},
- "pyopenssl": {
+ "rope": {
"hashes": [
- "sha256:07a2de1a54de07448732a81e38a55df7da109b2f47f599f8bb35b0cbec69d4bd",
- "sha256:2c10cfba46a52c0b0950118981d61e72c1e5b1aac451ca1bc77de1a679456773"
+ "sha256:a09edfd2034fd50099a67822f9bd851fbd0f4e98d3b87519f6267b60e50d80d1"
],
- "version": "==17.5.0"
- },
- "service-identity": {
- "hashes": [
- "sha256:0e76f3c042cc0f5c7e6da002cf646f59dc4023962d1d1166343ce53bdad39e17",
- "sha256:4001fbb3da19e0df22c47a06d29681a398473af4aa9d745eca525b3b2c2302ab"
- ],
- "version": "==17.0.0"
+ "index": "pypi",
+ "version": "==0.10.7"
},
"six": {
"hashes": [
],
"version": "==1.11.0"
},
- "twisted": {
- "hashes": [
- "sha256:0da1a7e35d5fcae37bc9c7978970b5feb3bc82822155b8654ec63925c05af75c",
- "sha256:716805e624f9396fcc1f47e8aef68e629fd31599a74855b6e1636122c042458d",
- "sha256:7bc3cdfd1ca5e5b84c7936db3c2cb2feb7d5b77410e713fd346da095a3b6a1d2"
- ],
- "version": "==17.9.0"
- },
- "zope.interface": {
+ "wrapt": {
"hashes": [
- "sha256:21506674d30c009271fe68a242d330c83b1b9d76d62d03d87e1e9528c61beea6",
- "sha256:3d184aff0756c44fff7de69eb4cd5b5311b6f452d4de28cb08343b3f21993763",
- "sha256:467d364b24cb398f76ad5e90398d71b9325eb4232be9e8a50d6a3b3c7a1c8789",
- "sha256:57c38470d9f57e37afb460c399eb254e7193ac7fb8042bd09bdc001981a9c74c",
- "sha256:9ada83f4384bbb12dedc152bcdd46a3ac9f5f7720d43ac3ce3e8e8b91d733c10",
- "sha256:a1daf9c5120f3cc6f2b5fef8e1d2a3fb7bbbb20ed4bfdc25bc8364bc62dcf54b",
- "sha256:e6b77ae84f2b8502d99a7855fa33334a1eb6159de45626905cb3e454c023f339",
- "sha256:e881ef610ff48aece2f4ee2af03d2db1a146dc7c705561bd6089b2356f61641f",
- "sha256:f41037260deaacb875db250021fe883bf536bf6414a4fd25b25059b02e31b120"
+ "sha256:d4d560d479f2c21e1b5443bbd15fe7ec4b37fe7e53d335d3b9b0a7b1226fe3c6"
],
- "version": "==4.5.0"
+ "version": "==1.10.11"
}
}
}
#!/usr/bin/env python3
import time
+import asyncio
+
+import supcon.net
import supcon.intf
import supcon.core
import supcon.clone
-
-from twisted.internet import reactor
-from twisted.internet.endpoints import TCP4ServerEndpoint
+import supcon.cache
""" the interface com.example.Clock """
ComExampleClockIntf = supcon.intf.DInterface.load({
{
'name': 'tick',
'args': [
- { 'name': 'time', 'description': 'The current time' }
+ {'name': 'time', 'description': 'The current time'}
],
'description': 'Fires every second'
}
""" an implementation of the interface com.example.Clock """
class ComExampleClockImpl(supcon.intf.Implementation):
- def __init__(self, reactor):
+ def __init__(self, loop: asyncio.AbstractEventLoop):
super().__init__(ComExampleClockIntf)
- self.__reactor = reactor
- reactor.callLater(1, self.__tick)
+ self.__loop = loop
+ self.__loop.call_later(1, self.__tick)
def __tick(self):
t = time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime())
- self.fire('tick', { 'time': t })
- self.__reactor.callLater(1, self.__tick)
+ self.fire('tick', {'time': t})
+ self.__loop.call_later(1, self.__tick)
+
+loop = asyncio.get_event_loop()
""" create the implementation instanz """
-clockImpl = ComExampleClockImpl(reactor)
+clockImpl = ComExampleClockImpl(loop)
""" create the local node """
local = supcon.core.Node('clock-test')
else:
local.register('/clock5', clockImpl)
register.registered = not register.registered
- reactor.callLater(5, register)
+ loop.call_later(5, register)
register.registered = False
register()
local.register('/clock', clockImpl)
-supcon.clone.Cloner(local, '/clone5', local.name, '/clock5', 'com.example.Clock')
+cloner = supcon.clone.Cloner(local, '/clone5', local.name, '/clock5', 'com.example.Clock')
+cloner.setup()
+
+cache = supcon.cache.MemoryLocalEventCache(local)
+cache.watch('/clock5', 'com.example.Clock', 'tick')
+local.register('/cache', cache)
-local.listen(TCP4ServerEndpoint(reactor, 8123))
+local.listen(supcon.net.TCPServerEndpoint('localhost', 8123))
-reactor.run()
+loop.run_forever()
#!/usr/bin/env python3
-import supcon.core
+import asyncio
-from twisted.internet import reactor
-from twisted.internet.endpoints import TCP4ClientEndpoint
+import supcon.net
+import supcon.core
+import supcon.cache
local = 'listen'
local = supcon.core.Node(local)
-printArgs = lambda *args: print(', '.join([ '{}'.format(i) for i in args ]))
+printArgs = lambda *args: print(', '.join(['{}'.format(i) for i in args]))
pluckArgs = lambda mapping, keys: (mapping[key] for key in keys)
+handles = {}
+
+def subscribe(node, path, intf, event):
+ if (node, path, intf, event) not in handles:
+ handles[(node, path, intf, event)] = local.on(node, path, intf, event, printArgs)
+
+def unsubscribe(node, path, intf, event):
+ if (node, path, intf, event) in handles:
+ local.off(handles[(node, path, intf, event)])
+ del handles[(node, path, intf, event)]
+
def onIntf(args, event):
(_Node, _Path, _Intf, eEvent) = event
(aNode, aPath, aInterface) = pluckArgs(args, ['node', 'path', 'intf'])
aInterface = supcon.intf.DInterface.load(aInterface)
for aEvent in aInterface.events:
- reg = local.on if eEvent == 'intf' else local.off
- reg(aNode, aPath, aInterface.name, aEvent, printArgs)
+ reg = subscribe if eEvent == 'intf' else unsubscribe
+ reg(aNode, aPath, aInterface.name, aEvent)
local.on(local.name, '/', 'supcon.Local', 'intf', onIntf)
local.on(local.name, '/', 'supcon.Local', 'intfLost', onIntf)
local.on(local.name, '/', 'supcon.Local', 'intf', printArgs)
local.on(local.name, '/', 'supcon.Local', 'intfLost', printArgs)
-local.connect(TCP4ClientEndpoint(reactor, 'localhost', 8123))
+cache = supcon.cache.MemoryRemoteEventCache(local, 'clock-test', '/cache')
+cache.setup()
+cache.on(('/clock5', 'com.example.Clock', 'tick'), lambda *args, **kwargs: print('CACHE', args, kwargs))
+
+closer = local.connect(supcon.net.TCPClientEndpoint('localhost', 8123))
-reactor.run()
+loop = asyncio.get_event_loop()
+loop.run_forever()
install_requirements = [
'msgpack-python',
- 'twisted',
'pigpio',
- 'pyopenssl',
- 'service-identity',
]
development_requirements = [
'pdoc',
--- /dev/null
+'''
+An EventCache provides a cache for locally sourced events on a supcon bus
+'''
+
+import os
+import asyncio
+
+from typing import Sequence, Callable
+
+import msgpack
+
+import supcon.intf
+import supcon.util
+
+EventCacheIntf = supcon.intf.DInterface.load({
+ 'name': 'supcon.EventCache',
+ 'events': [{
+ 'name': 'event',
+ 'args': [
+ {'name': 'number'},
+ {'name': 'path'},
+ {'name': 'intf'},
+ {'name': 'name'},
+ {'name': 'args'},
+ ],
+ }],
+ 'methods': [{
+ 'name': 'events',
+ 'inArgs': [
+ {'name': 'next', 'description': 'the number of next event expected by the caller'}
+ ],
+ 'outArgs': [
+ {'name': 'last', 'description': 'the number of the last event in the cache'},
+ {'name': 'first', 'description': 'the number of the first event in the cache'},
+ {'name': 'events', 'description': 'a list of number, path, intf, event, args tuples'},
+ ],
+ }]
+})
+
+class LocalEventCache(supcon.intf.Implementation):
+ '''
+ The LocalEventCache caches local bus events and implements EventCacheIntf to export new and cached
+ events on the bus.
+ '''
+
+ def __init__(self, local: supcon.intf.Node, loadEvents: Callable, saveEvents: Callable):
+ supcon.intf.Implementation.__init__(self, EventCacheIntf)
+
+ self.__local = local
+ self.__loadEvents = loadEvents
+ self.__saveEvents = saveEvents
+
+ self.__watches = {}
+ self.__events = self.__loadEvents()
+
+ self.setCallCb('events', self.__onEventsCall)
+
+ def watch(self, path: str, intf: str, name: str):
+ '''
+ Watch for local events defined by path, intf and name
+ '''
+ key = (path, intf, name)
+ if key in self.__watches:
+ return
+ self.__watches[key] = self.__local.on(self.__local.name, path, intf, name, self.__onEvent)
+
+ def teardown(self):
+ '''
+ Removes all watches
+ '''
+ for handle in self.__watches.values():
+ self.__local.off(handle)
+ self.__watches.clear()
+
+ def __onEvent(self, args, event):
+ number = 0
+ if self.__events:
+ number = self.__events[-1][0] + 1
+
+ _, path, intf, name = event
+ self.__events.append((number, path, intf, name, args))
+ self.__events = self.__saveEvents(self.__events)
+
+ self.fire('event', {'number': number, 'path': path, 'intf': intf, 'name': name, 'args': args})
+
+ def __onEventsCall(self, next=None):
+ last = None
+ first = None
+ if self.__events:
+ last = self.__events[-1][0]
+ first = self.__events[0][0]
+
+ index = next - first if next >= first else 0
+ return {'last': last, 'first': first, 'events': self.__events[index:]}
+
+
+def MemoryLocalEventCache(local: supcon.intf.Node, maxEvents=1000) -> LocalEventCache:
+ '''
+ Creates a LocalEventCache that stores the cached events in memory.
+ '''
+
+ def _loadEvents() -> Sequence:
+ return []
+
+ def _saveEvents(events: Sequence) -> Sequence:
+ if len(events) > maxEvents:
+ events = events[-maxEvents:]
+ return events
+
+ return LocalEventCache(local, _loadEvents, _saveEvents)
+
+
+def MsgpackLocalEventCache(local: supcon.intf.Node, filename: str, maxEvents=1000) \
+ -> LocalEventCache:
+ '''
+ Creates a LocalEventCache that stores the cached events in the filesystem.
+ '''
+ def _loadEvents() -> Sequence:
+ try:
+ with open(filename, 'rb') as fp:
+ return msgpack.unpackb(fp.read(), raw=False)
+ except FileNotFoundError:
+ return []
+
+ def _saveEvents(events: Sequence) -> Sequence:
+ if len(events) > maxEvents:
+ events = events[-maxEvents:]
+
+ with open(filename + '.wrt', 'wb') as fp:
+ fp.write(msgpack.packb(events, use_bin_type=True))
+ try:
+ os.remove(filename)
+ except FileNotFoundError:
+ pass
+ os.rename(filename + '.wrt', filename)
+ return events
+
+ return LocalEventCache(local, _loadEvents, _saveEvents)
+
+
+class RemoteEventCache(supcon.util.EventEmitterMixin):
+ '''
+ The RemoteEventCache connects to an Implementation of EventCacheIntf. It first emits cached events
+ that have not been emitted locally and than continues to watch for new events.
+ '''
+
+ def __init__(self, local: supcon.intf.Node, node: str, path: str, \
+ loadNext: Callable, saveNext: Callable, loop: asyncio.AbstractEventLoop = None):
+
+ self.__local = local
+ self.__node = node
+ self.__path = path
+ self.__loadNext = loadNext
+ self.__saveNext = saveNext
+
+ self.__loop = loop if loop else asyncio.get_event_loop()
+
+ self.__intfH = None
+ self.__eventH = None
+
+ self.__next = self.__loadNext()
+
+ def setup(self):
+ ''' Starts watching for events on the bus '''
+ if self.__intfH:
+ return
+
+ self.__intfH = self.__local.on(self.__local.name, '/', supcon.core.LocalIntf.name, 'intf', self.__onIntf)
+ self.__eventH = self.__local.on(self.__node, self.__path, EventCacheIntf.name, 'event', self.__onEvent)
+
+ if self.__local.nodes().hasIntf(self.__node, self.__path, EventCacheIntf.name):
+ self.__loop.create_task(self.__fetchEvents())
+
+ def teardown(self):
+ ''' Stops watching for events on the bus '''
+ if not self.__intfH:
+ return
+
+ self.__local.off(self.__intfH)
+ self.__intfH = None
+ self.__local.off(self.__eventH)
+ self.__eventH = None
+
+ def __onIntf(self, args, _):
+ if args['node'] == self.__node and args['path'] == self.__path and args['intf'] == EventCacheIntf.name:
+ self.__loop.create_task(self.__fetchEvents())
+
+ def __onEvent(self, args, _):
+ if args['number'] == self.__next:
+ self.__next += 1
+ self.__saveNext(self.__next)
+ self._emit((args['path'], args['intf'], args['name']), **args['args'])
+ else:
+ self.__loop.create_task(self.__fetchEvents())
+
+ async def __fetchEvents(self):
+ method = (self.__node, self.__path, EventCacheIntf.name, 'events')
+ inArgs = {'next': self.__next}
+ outArgs = await self.__local.call(*method, inArgs)
+
+ if outArgs['first'] > self.__next:
+ self._emit('missing', amount=outArgs['first'] - self.__next)
+ self.__next = outArgs['first']
+
+ for number, path, intf, event, args in outArgs['events']:
+ if number < self.__next:
+ continue
+ if number > self.__next:
+ break
+
+ self.__next += 1
+ self.__saveNext(self.__next)
+ self._emit((path, intf, event), **args)
+
+ if self.__next - 1 < outArgs['last']:
+ self.__loop.create_task(self.__fetchEvents())
+
+
+def MemoryRemoteEventCache(local: supcon.intf.Node, node: str, path: str,
+ loop: asyncio.AbstractEventLoop = None) -> RemoteEventCache:
+ '''
+ Creates a RemoteEventCache that stores the sequence number of the last observed event in memory.
+ '''
+
+ def _loadNext() -> int:
+ return 0
+
+ def _saveNext(value: int):
+ pass
+
+ return RemoteEventCache(local, node, path, _loadNext, _saveNext, loop)
+
+
+def MsgpackRemoteEventCache(local: supcon.intf.Node, node: str, path: str, filename: str,
+ loop: asyncio.AbstractEventLoop = None) -> RemoteEventCache:
+ '''
+ Creates a RemoteEventCache that stores the sequence number of the last observed event in the
+ filesystem.
+ '''
+
+ def _loadNext() -> int:
+ try:
+ with open(filename, 'rb') as fp:
+ return msgpack.unpackb(fp.read(), raw=False)
+ except FileNotFoundError:
+ return 0
+
+ def _saveNext(value: int):
+ with open(filename + '.wrt', 'wb') as fp:
+ fp.write(msgpack.packb(value, use_bin_type=True))
+ try:
+ os.remove(filename)
+ except FileNotFoundError:
+ pass
+ os.rename(filename + '.wrt', filename)
+ return True
+
+ return RemoteEventCache(local, node, path, _loadNext, _saveNext, loop)
import supcon.intf
+__all__ = ('Cloner')
+
class _Clone(supcon.intf.Implementation):
def __init__(self, local, rnode, rpath, interface):
self.__rnode = rnode
self.__rpath = rpath
- for event in self.interface.events:
- self.__local.on(self.__rnode, self.__rpath, self.intf, event, self.__on)
+ self.__handles = []
- def __del__(self):
+ def setup(self):
for event in self.interface.events:
- self.__local.off(self.__rnode, self.__rpath, self.intf, event, self.__on)
+ handle = self.__local.on(self.__rnode, self.__rpath, self.intf, event, self.__on)
+ self.__handles.append(handle)
+
+ def teardown(self):
+ for handle in self.__handles:
+ self.__local.off(handle)
+ self.__handles.clear()
def __on(self, args: dict, event):
(_Node, _Path, _Intf, eEvent) = event
#print('_Clone.call', method, inArgs)
return self.__local.call(self.__rnode, self.__rpath, self.intf, method, inArgs)
+
class Cloner(object):
def __init__(self, local: supcon.intf.Node, lPath: str, rNode: str, rPath: str, rIntf: str):
self.__rPath = rPath
self.__rIntf = rIntf
- self.__impl = None
-
- self.__local.on(self.__local.name, '/', 'supcon.Local', 'intf', self.__onIntf)
- self.__local.on(self.__local.name, '/', 'supcon.Local', 'intfLost', self.__onIntf)
+ self.__hIntf = None
+ self.__hIntfLost = None
- nodes = local.nodes()
- if nodes.hasIntf(rNode, rPath, rIntf):
- self.__setupImpl(nodes[rNode][rPath][rIntf])
+ self.__impl = None
@property
def local(self) -> supcon.intf.Node:
def rIntf(self) -> str:
return self.__rIntf
+ def setup(self):
+ if self.__hIntf:
+ return
+
+ self.__hIntf = self.__local.on(self.__local.name, '/', 'supcon.Local', 'intf', self.__onIntf)
+ self.__hIntfLost = self.__local.on(self.__local.name, '/', 'supcon.Local', 'intfLost', self.__onIntf)
+
+ nodes = self.__local.nodes()
+ if nodes.hasIntf(self.__rNode, self.__rPath, self.__rIntf):
+ self.__setupImpl(nodes[self.__rNode][self.__rPath][self.__rIntf])
+
+ def teardown(self):
+ if not self.__hIntf:
+ return
+
+ self.__local.off(self.__hIntf)
+ self.__hIntf = None
+
+ self.__local.off(self.__hIntfLost)
+ self.__hIntfLost = None
+
+ if self.__impl:
+ self.__teardownImpl()
+
def __onIntf(self, args, event):
if args['node'] != self.__rNode or args['path'] != self.__rPath:
return
def __setupImpl(self, interface):
#print('Cloner.__setupImpl')
self.__impl = _Clone(self.__local, self.__rNode, self.__rPath, interface)
+ self.__impl.setup()
+
self.__local.register(self.__lPath, self.__impl)
def __teardownImpl(self):
self.__local.unregister(self.__lPath, self.__impl)
+
+ self.__impl.teardown()
self.__impl = None
import uuid
import struct
-import msgpack
+import asyncio
-import twisted.internet.defer as defer
-import twisted.internet.protocol
-import twisted.internet.endpoints
+from typing import Mapping, Hashable, Callable, Any
-import twisted.application.service
-import twisted.application.internet
+import msgpack
+import supcon.net
import supcon.intf
+import supcon.util
+
+
+__all__ = [
+ 'LocalIntf', 'RemoteIntf', 'Node'
+]
_printArgs = lambda *args: print(', '.join(['{}'.format(i) for i in args]))
})
-class _Protocol(twisted.internet.protocol.Protocol):
+class _Protocol(asyncio.Protocol):
"""This class handels handshake and message encoding between nodes on the bus.
This class is just an implementation detail of the class Node.
"""
- def __init__(self):
+ def __init__(self, factory, loop: asyncio.AbstractEventLoop = None):
super().__init__()
- self.factory = None
+
+ self.__factory = factory
+ self.__loop = loop if loop else asyncio.get_event_loop()
self.__data = bytearray()
- self.__name = None
- def connectionMade(self):
+ self.__name = None # Type: str
+ self.__transport = None # Type: asyncio.Transport
+
+ self.__inactiveTimer = None
+
+ def connection_made(self, transport):
+ self.__transport = transport
self.sendMesg({
'type': 'handshake',
- 'name': self.factory.name
+ 'name': self.__factory.name
})
- def connectionLost(self, reason):
+ self.__cancelInactive()
+ self.__inactiveTimer = self.__loop.call_later(5, self.__pingOnInactive)
+
+ def connection_lost(self, exc):
+ self.__cancelInactive()
+
if not self.__name:
return
- self.factory.delNode(self.__name)
+ self.__factory.delNode(self.__name)
- def dataReceived(self, data):
+ def data_received(self, data):
self.__data.extend(data)
while len(self.__data) > 4:
try:
mesg = msgpack.unpackb(mesg, encoding='utf-8')
except BaseException as _:
- self.transport.loseConnection()
+ self.__transport.close()
else:
self.recvMesg(mesg)
def recvMesg(self, mesg):
+ self.__cancelInactive()
+ self.__inactiveTimer = self.__loop.call_later(5, self.__pingOnInactive)
+
if not self.__name:
if mesg['type'] != 'handshake':
- self.transport.loseConnection()
+ self.__transport.close()
return
self.__name = mesg['name']
- self.factory.addNode(self.__name, self)
+ self.__factory.addNode(self.__name, self)
+ elif mesg['type'] == 'ping':
+ self.sendMesg({'type': 'pong'})
+ elif mesg['type'] == 'pong':
+ pass
else:
- self.factory.recvMesg(self.__name, mesg)
+ self.__factory.recvMesg(self.__name, mesg)
def sendMesg(self, mesg):
data = msgpack.packb(mesg, use_bin_type=True)
size = struct.pack('<L', len(data))
- self.transport.write(size)
- self.transport.write(data)
+ self.__transport.write(size)
+ self.__transport.write(data)
+
+ def __cancelInactive(self):
+ if self.__inactiveTimer:
+ self.__inactiveTimer.cancel()
+ self.__inactiveTimer = None
+
+ def __pingOnInactive(self):
+ self.__cancelInactive()
+ self.sendMesg({'type': 'ping'})
+ self.__inactiveTimer = self.__loop.call_later(5, self.__closeOnInactive)
+
+ def __closeOnInactive(self):
+ self.__cancelInactive()
+ self.__transport.close()
-class _Factory(twisted.internet.protocol.Factory):
+
+class _Factory(object):
"""This class creates _Protocol instances for connections to remote nodes.
This class is just an implementation detail of the class Node and encapulates
methods to create the _Protocol instances resulting from calls to
Node.connect() and Node.listen().
"""
- protocol = _Protocol
-
- def __init__(self, name, addNode, delNode, recvMesg):
+ def __init__(self, name, addNode, delNode, recvMesg, loop: asyncio.AbstractEventLoop = None):
self.name = name
self.addNode = addNode
self.delNode = delNode
self.recvMesg = recvMesg
+ self.__loop = loop if loop else asyncio.get_event_loop()
+
+ def createProtocol(self):
+ return _Protocol(self, self.__loop)
+
class _KnownMgr(object):
del self.__keys[node]
-class _DeferredMgr(object):
- """ This class manages Deferred """
+class _FutureMgr(object):
+ """ This class manages Futures """
def __init__(self):
self.__infos = {}
- def create(self, data=None) -> (str, defer.Deferred):
- """Creates a Deferred with an info.
+ def create(self, data=None) -> (str, asyncio.Future):
+ """Creates a Future with additional data.
Args:
data: Some additional data
Returns:
- (str, defer.Deferred): A Tuple of an unique id and a Deferred
+ (str, asyncio.Future): A Tuple of an unique id and a Future
"""
pid = str(uuid.uuid4())
- def canceller(_d):
- del self.__infos[pid]
+ def doneCallback(f: asyncio.Future):
+ if f.cancelled():
+ del self.__infos[pid]
+
+ f = asyncio.Future()
+ f.add_done_callback(doneCallback)
- d = defer.Deferred(canceller)
- self.__infos[pid] = (d, data)
+ self.__infos[pid] = (f, data)
- return (pid, d)
+ return (pid, f)
def succeed(self, value, pid):
- """Succeeds the Deferred identified by the given unique id with the given
+ """Succeeds the Future identified by the given unique id with the given
response.
Args:
- pid (str): A unique id of a Deferred created with _DeferredMgr.create()
- value (mixed): The value to succeed the Deferred with
+ pid (str): A unique id of a Future created with _FutureMgr.create()
+ value (mixed): The value to succeed the Future with
"""
if pid not in self.__infos:
return
- d = self.__infos[pid][0]
+ f = self.__infos[pid][0]
del self.__infos[pid]
- d.callback(value)
+ f.set_result(value)
def fail(self, reason, pid):
"""Fail the Deferred identified by the given unique id with the given
if pid not in self.__infos:
return
- d = self.__infos[pid][0]
+ f = self.__infos[pid][0]
del self.__infos[pid]
- d.errback(reason)
+ f.set_exception(reason)
def failAll(self, reason, predicate):
- """Fail all Deferred for which predicate(data) returns true.
+ """Fail all Futures for which predicate(data) returns true.
Args:
reason (Exception): The reason to fail the Deferred with
for pid, info in self.__infos.copy().items():
if predicate(pid, info[1]):
del self.__infos[pid]
- info[0].errback(reason)
+ info[0].set_exception(reason)
class _CallbackMgr(object):
def __init__(self):
- self.__cbs = {}
-
- def on(self, key, cb):
+ self.__events = {}
+ self.__callbacks = {}
+
+ def on(self, event: Hashable, callback: Callable) -> (bool, Hashable):
+ '''
+ Registers a the given callback for the given event. Returns a tuple (bool, callable). The first
+ element denotes if the given callback was the first callback registered for the the given event.
+ The second element is a function to unregister the given callback. This function can only be
+ called once. It returns if the given callback was the last callback unregistered for the given
+ event.
+ '''
first = False
- if key not in self.__cbs:
- self.__cbs[key] = {}
+ if event not in self.__callbacks:
+ self.__callbacks[event] = {}
first = True
- self.__cbs[key][id(cb)] = cb
- return first
- def off(self, key, cb):
- if key not in self.__cbs or id(cb) not in self.__cbs[key]:
- return False
- del self.__cbs[key][id(cb)]
- if not self.__cbs[key]:
- del self.__cbs[key]
- return True
- return False
+ handle = uuid.uuid4()
+ while handle in self.__events:
+ handle = uuid.uuid4()
+
+ self.__events[handle] = event
+ self.__callbacks[event][handle] = callback
+
+ return (first, handle)
+
+ def off(self, handle: Hashable) -> (bool, Hashable):
+ if handle not in self.__events:
+ return (False, None)
+ event = self.__events[handle]
+
+ del self.__events[handle]
+ del self.__callbacks[event][handle]
- def fire(self, key, args):
- #_printArgs('_CallbackMgr.fire', key, args)
- if key not in self.__cbs:
+ last = not bool(self.__callbacks[event])
+ if last:
+ del self.__callbacks[event]
+
+ return last, event
+
+ def fire(self, event: Hashable, args: Mapping[str, Any]):
+ if event not in self.__callbacks:
return
- for cb in self.__cbs[key].values():
+ for callback in self.__callbacks[event].copy().values():
try:
- cb(args, key)
+ callback(args, event)
except BaseException as _:
traceback.print_exc()
- def keys(self, predicate):
- return [key for key in self.__cbs if predicate(key)]
+ def events(self, predicate = None):
+ if not predicate:
+ return self.__callbacks.keys()
+ return [event for event in self.__callbacks if predicate(event)]
class Node(supcon.intf.Node):
- def __init__(self, name):
+ def __init__(self, name, loop: asyncio.AbstractEventLoop = None):
super().__init__(name)
+ self.__loop = loop if loop else asyncio.get_event_loop()
+
self.__impls = {}
self.__protocols = {}
- self.__factory = _Factory(self.name, self.__addNode, self.__delNode, self.__recvMesg)
-
- self.__service = twisted.application.service.MultiService()
- self.__service.startService()
+ self.__factory = _Factory(self.name, self.__addNode, self.__delNode, self.__recvMesg, self.__loop)
fire = lambda event, args: \
self.__fireEventLocal(self.name, '/', LocalIntf.name, event, args)
self.__knownMgr = _KnownMgr(fire)
+ self.__futureMgr = _FutureMgr()
self.__informMgr = _InformMgr()
self.__callbackMgr = _CallbackMgr()
- self.__deferredMgr = _DeferredMgr()
self.__knownMgr.addNode(self.name)
- def connect(self, endpoint):
- service = twisted.application.internet.ClientService(endpoint, self.__factory)
- self.__service.addService(service)
+ def connect(self, endpoint: supcon.net.ClientEndpoint) -> supcon.net.Closer:
+ return supcon.net.ReconnectingClientEndpoint(endpoint).connect(self.__factory.createProtocol)
- def listen(self, endpoint):
- endpoint.listen(self.__factory)
+ def listen(self, endpoint: supcon.net.ServerEndpoint) -> supcon.net.Closer:
+ return endpoint.listen(self.__factory.createProtocol)
def __addNode(self, node: str, protocol: _Protocol):
if node in self.__protocols:
self.__knownMgr.addNode(node)
# reestablish existing callbacks by calls to supcon.Remote.on
- for key in self.__callbackMgr.keys(lambda key: key[0] == node):
+ for event in self.__callbackMgr.events(lambda event: event[0] == node):
self.__callRemote(node, '/', RemoteIntf.name, 'on', {
- 'path': key[1],
- 'intf': key[2],
- 'event': key[3]
+ 'path': event[1],
+ 'intf': event[2],
+ 'event': event[3]
})
- self.__callRemote(node, '/', RemoteIntf.name, 'node', {}).addCallback(
- lambda args: self.__knownMgr.setNode(supcon.intf.DNode.load(args['node']))
- ).addErrback(_printArgs)
+ def doneCallback(f: asyncio.Future):
+ e = f.exception()
+ if e:
+ print(e)
+ else:
+ r = f.result()
+ self.__knownMgr.setNode(supcon.intf.DNode.load(r['node']))
+
+ self.__callRemote(node, '/', RemoteIntf.name, 'node', {}).add_done_callback(doneCallback)
return True
reason = RuntimeError('node {} lost'.format(node))
predicate = lambda pid, data: data[0] == node
- self.__deferredMgr.failAll(reason, predicate)
+ self.__futureMgr.failAll(reason, predicate)
return True
self.__knownMgr.delIntf(self.name, path, impl.interface)
self.__broadcastEvent('intfLost', {'path': path, 'intf': impl.interface.dump()})
- def on(self, node, path, intf, event, cb):
- if self.__callbackMgr.on((node, path, intf, event), cb):
- if node != self.name and node in self.__protocols:
- self.__callRemote(node, '/', RemoteIntf.name, 'on', {
- 'path': path, 'intf': intf, 'event': event
- })
-
- def off(self, node, path, intf, event, cb):
- if self.__callbackMgr.off((node, path, intf, event), cb):
- if node != self.name and node in self.__protocols:
- self.__callRemote(node, '/', RemoteIntf.name, 'off', {
- 'path': path, 'intf': intf, 'event': event
- })
-
- def call(self, node, path, intf, method, args) -> defer.Deferred:
- # TODO: don't raise Exceptions
+ def on(self, node: str, path: str, intf: str, event: str, cb: Callable) -> Hashable:
+ first, handle = self.__callbackMgr.on((node, path, intf, event), cb)
+
+ if first and node != self.name and node in self.__protocols:
+ self.__callRemote(node, '/', RemoteIntf.name, 'on', {
+ 'path': path, 'intf': intf, 'event': event
+ })
+
+ return handle
+
+ def off(self, handle: Hashable) -> bool:
+ """Unregisters a callback for an event on the bus"""
+ last, event = self.__callbackMgr.off(handle)
+
+ if last and event[0] != self.name and event[0] in self.__protocols:
+ self.__callRemote(event[0], '/', RemoteIntf.name, 'off', {
+ 'path': event[1], 'intf': event[2], 'event': event[3]
+ })
+
+ async def call(self, node: str, path: str, intf: str, method: str, inArgs: Mapping[str, Any] = None) -> Mapping[str, Any]:
assert path != '/' or intf != RemoteIntf.name, \
'unable to call: method {} of interface {} at path /'.format(method, RemoteIntf.name)
if node == self.name:
if path == '/' and intf == LocalIntf.name:
- return self.__callLocal(method, args)
- return self.__callImpl(path, intf, method, args)
- return self.__callRemote(node, path, intf, method, args)
+ return await self.__callLocal(method, inArgs)
+ return await self.__callImpl(path, intf, method, inArgs)
+ return await self.__callRemote(node, path, intf, method, inArgs)
+
+ async def __callImpl(self, path, intf, method, args):
+ '''Dispatches method call to a local implementation.'''
- def __callImpl(self, path, intf, method, args) -> defer.Deferred:
- # TODO: don't raise Exception
assert path != '/' or intf != LocalIntf.name, \
'unable to call impl: method {} of interface {} at path /'.format(method, LocalIntf.name)
assert path != '/' or intf != RemoteIntf.name, \
'unable to call impl: method {} of interface {} at path /'.format(method, RemoteIntf.name)
- try:
- if path not in self.__impls:
- raise ValueError('unknown path') # TODO: nicer error message
- if intf not in self.__impls[path]:
- raise ValueError('unknown intf') # TODO: nicer error message
- if method not in self.__impls[path][intf]['impl'].interface.methods:
- raise ValueError('unknown method') # TODO: nicer error message
-
- d = self.__impls[path][intf]['impl'].call(method, args)
- if not isinstance(d, defer.Deferred):
- d = defer.succeed(d)
- except BaseException as e:
- d = defer.fail(e)
+ if path not in self.__impls:
+ mesg = 'unknown path {} on node {}'.format(path, self.name)
+ raise RuntimeError(mesg)
+ if intf not in self.__impls[path]:
+ mesg = 'unknown intf {} at path {} on node {}'.format(intf, path, self.name)
+ raise RuntimeError(mesg)
+ if method not in self.__impls[path][intf]['impl'].interface.methods:
+ mesg = 'unknown method {} of intf {} at path {} on node {}'.format(method, intf, path, self.name)
+ raise RuntimeError(mesg)
- return d
+ r = self.__impls[path][intf]['impl'].call(method, args)
+ if asyncio.iscoroutine(r):
+ r = await r
+
+ return r
+
+ def __callLocal(self, method, args) -> asyncio.Future:
+ '''Dispatches method call the LocalIntf implementation.'''
- def __callLocal(self, method, args) -> defer.Deferred:
- # TODO: don't raise Exception
assert method in LocalIntf.methods, \
- '{} is not an method of interface {} at path /'.format(method, RemoteIntf.name)
+ '{} is not an method of interface {} at path /'.format(method, LocalIntf.name)
+
LocalIntf.methods[method].validateInArgs(args)
+ f = asyncio.Future()
if method == 'nodes':
- return defer.succeed({'nodes': self.__knownMgr.getNodes().dump()})
+ f.set_result({'nodes': self.__knownMgr.getNodes().dump()})
+ else:
+ f.set_exception(RuntimeError('method {} not yet implemented'.format(method)))
+
+ return f
- return defer.fail(RuntimeError('method {} not yet implemented'.format(method)))
+ def __callRemote(self, node, path, intf, method, args) -> asyncio.Future:
+ '''Dispatches method call to a remote implementation.'''
- def __callRemote(self, node, path, intf, method, args) -> defer.Deferred:
- # TODO: don't raise Exception
assert path != '/' or intf != LocalIntf.name, \
'unable to call remote: method {} of interface {} at path /'.format(method, LocalIntf.name)
# TODO: validate args
- (cid, p) = self.__deferredMgr.create((node, path, intf, method))
+ (cid, f) = self.__futureMgr.create((node, path, intf, method))
self.__sendCall(node, path, intf, method, args, cid)
- return p
+ return f
def __fireImplEvent(self, path, intf, event, args):
+ '''Dispatches a locally fired event'''
+
assert path != '/' or intf != LocalIntf.name, \
'unable to fire impl: event {} of interface {} at path /'.format(event, RemoteIntf.name)
assert path != '/' or intf != RemoteIntf.name, \
self.__fireEventRemote(path, intf, event, args)
def __fireEventLocal(self, node: str, path: str, intf: str, event: str, args: dict):
+ '''Dispatches an event locally'''
+
assert path != '/' or intf != RemoteIntf.name, \
'unable to fire local: event {} of interface {} at path /'.format(event, RemoteIntf.name)
- #_printArgs('Node.__fireEventLocal', node, path, intf, event, args)
# TODO: validate args
'{} is not an method of interface {} at path /'.format(method, RemoteIntf.name)
RemoteIntf.methods[method].validateInArgs(args)
+ f = asyncio.Future()
if method == 'node':
- d = defer.succeed({'node': self.__knownMgr.getNode(self.name).dump()})
+ f.set_result({'node': self.__knownMgr.getNode(self.name).dump()})
elif method == 'on':
self.__informMgr.on(args['path'], args['intf'], args['event'], node)
- d = defer.succeed({})
+ f.set_result({})
elif method == 'off':
self.__informMgr.on(args['path'], args['intf'], args['event'], node)
- d = defer.succeed({})
+ f.set_result({})
else:
raise ValueError('method {} is not yet implemented'.format(method))
else:
- d = self.__callImpl(path, intf, method, args)
+ f = self.__callImpl(path, intf, method, args)
+
except BaseException as e:
traceback.print_exc()
- d = defer.fail(e)
+ f = asyncio.Future()
+ f.set_exception(e)
+
+ f = asyncio.ensure_future(f, loop=self.__loop)
+ def doneCallback(f: asyncio.Future):
+ e = f.exception()
+ if e:
+ self.__sendError(node, repr(e), cid)
+ else:
+ self.__sendResult(node, f.result(), cid)
- d.addCallbacks(
- lambda result: self.__sendResult(node, result, cid),
- lambda reason: self.__sendError(node, repr(reason), cid)
- ).addErrback(_printArgs)
+ f.add_done_callback(doneCallback)
def __recvError(self, _node: str, reason: str, cid: str):
try:
- self.__deferredMgr.fail(RuntimeError(reason), cid)
+ self.__futureMgr.fail(RuntimeError(reason), cid)
except BaseException as _:
traceback.print_exc()
def __recvResult(self, _node: str, result: dict, cid: str):
try:
- self.__deferredMgr.succeed(result, cid)
+ self.__futureMgr.succeed(result, cid)
except BaseException as _:
traceback.print_exc()
reason = RuntimeError('interface {} at path {} on node {} lost'.format(node, path, interface.name))
predicate = lambda pid, data: data[0] == node and data[1] == path and data[2] == interface.name
- self.__deferredMgr.failAll(reason, predicate)
+ self.__futureMgr.failAll(reason, predicate)
else:
raise ValueError('event {} not yet implemented'.format(event))
--- /dev/null
+
+import abc
+import asyncio
+
+from typing import Callable, Tuple, Any
+
+import supcon
+
+CounterIntf = supcon.intf.DInterface.load({
+ 'name': 'supcon.Counter',
+ 'events': [
+ {
+ 'name': 'count',
+ 'args': [
+ {'name': 'count', 'description': ''},
+ {'name': 'epoch', 'description': ''}
+ ],
+ },
+ {
+ 'name': 'reset',
+ 'args': [
+ {'name': 'count', 'description': ''},
+ {'name': 'epoch', 'description': ''}
+ ],
+ }
+ ],
+ 'methods': [
+ {
+ 'name': 'last',
+ 'outArgs': [
+ {'name': 'count', 'description': ''},
+ {'name': 'epoch', 'description': ''},
+ ]
+ },
+ {
+ 'name': 'current',
+ 'outArgs': [
+ {'name': 'count', 'description': ''},
+ {'name': 'epoch', 'description': ''},
+ ]
+ },
+ {
+ 'name': 'reset',
+ 'inArgs': [
+ {'name': 'epoch', 'description': ''},
+ ]
+ },
+ {
+ 'name': 'increment',
+ 'inArgs': [
+ {'name': 'amount', 'description': ''},
+ ]
+ }
+ ]
+})
+
+
+class CounterInterface(supcon.util.EventEmitterInterface):
+
+ @abc.abstractmethod
+ def last(self) -> Tuple[int, Any]:
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def current(self) -> Tuple[int, Any]:
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def reset(self, epoch: Any):
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def increment(self, amount: int = 1) -> None:
+ raise NotImplementedError
+
+
+class CounterImpl(supcon.intf.Implementation):
+
+ def __init__(self):
+ supcon.intf.Implementation.__init__(self, CounterIntf)
+
+ self.__handles = []
+
+ self.__counter = None
+ self.__exportReset = None
+ self.__exportIncrement = None
+
+ self.setCallCb('last', self.__last)
+ self.setCallCb('current', self.__current)
+
+ self.setCallCb('reset', self.__reset)
+ self.setCallCb('current', self.__increment)
+
+ def setup(self, counter: CounterInterface, exportReset=False, exportIncrement=False):
+ if self.__counter:
+ raise RuntimeError('CounterImpl already setup!')
+
+ self.__counter = counter
+ self.__exportReset = exportReset
+ self.__exportIncrement = exportIncrement
+
+ self.__handles.append(self.__counter.on('count', self.__onCount))
+ self.__handles.append(self.__counter.on('reset', self.__onReset))
+
+ def teardown(self):
+ if not self.__counter:
+ raise RuntimeError('CounterImpl not setup!')
+
+ for handle in self.__handles:
+ self.__counter.off(handle)
+ self.__handles.clear()
+
+ def __last(self):
+ return dict(zip(['count', 'epoch'], self.__counter.last()))
+
+ def __current(self):
+ return dict(zip(['count', 'epoch'], self.__counter.current()))
+
+ def __reset(self, epoch: Any) -> None:
+ if self.__exportReset:
+ return self.__counter.reset(epoch)
+ raise RuntimeError('method reset() is not exported')
+
+ def __increment(self, amount: int) -> None:
+ if self.__exportIncrement:
+ return self.__counter.increment(amount)
+ raise RuntimeError('method increment() is not exported')
+
+ def __onCount(self, count: int, epoch: Any) -> None:
+ self.fire('count', {'count': count, 'epoch': epoch})
+
+ def __onReset(self, count: int, epoch: Any) -> None:
+ self.fire('count', {'count': count, 'epoch': epoch})
+
+
+class BaseCounter(CounterInterface, supcon.util.EventEmitterMixin):
+
+ def __init__(self, count: int = 0, epoch: Any = None, lastCount: int = 0, lastEpoch: Any = None):
+ self.__last = [lastCount, lastEpoch]
+ self.__current = [count, epoch]
+
+ def last(self):
+ return tuple(self.__last)
+
+ def current(self):
+ return tuple(self.__current)
+
+ def reset(self, epoch: Any) -> None:
+ self.__last = self.__current
+ self.__current = [0, epoch]
+ if self.__last[1]:
+ self._emit('reset', *self.__last)
+
+ def increment(self, amount: int = 1) -> None:
+ self.__current[0] += amount
+ self._emit('count', *self.__current)
+
+
+class EpochCounter(BaseCounter):
+
+ def __init__(self, getEpoch: Callable[[], Any], loop: asyncio.AbstractEventLoop = None):
+ BaseCounter.__init__(self, 0, getEpoch())
+
+ self.__getEpoch = getEpoch
+ self.__loop = loop if loop is not None else asyncio.get_event_loop()
+
+ self.__loop.call_soon(self.__onTick)
+
+ def __onTick(self):
+ _, oldEpoch = self.current()
+ newEpoch = self.__getEpoch()
+ if newEpoch != oldEpoch:
+ self.reset(newEpoch)
+
+ self.__loop.call_later(0.5, self.__onTick)
-
+import abc
import struct
+import asyncio
import msgpack
-import twisted.internet.defer as defer
-import twisted.internet.interfaces
-import twisted.internet.protocol
-
-class _Protocol(twisted.internet.protocol.Protocol):
-
- def __init__(self):
- super().__init__()
- self.factory = None
+__all__ = (
+ 'Endpoint', 'TCPEndpoint', 'UnixEndpoint', 'Connection'
+)
- self.__data = bytearray()
- self.__name = None
+class Endpoint(abc.ABC):
- def dataReceived(self, data):
- self.__data.extend(data)
+ @abc.abstractmethod
+ async def connect(self) -> (asyncio.StreamReader, asyncio.StreamWriter):
+ raise NotImplementedError()
- while len(self.__data) > 4:
- size = struct.unpack_from('<L', self.__data)[0]
- if len(self.__data) < size + 4:
- break
+class TCPEndpoint(Endpoint):
- mesg = self.__data[4:size + 4]
- self.__data = self.__data[size + 4:]
+ def __init__(self, host, port, ssl=False):
+ self.__host = host
+ self.__port = port
+ self.__ssl = ssl
- try:
- mesg = msgpack.unpackb(mesg)
- except BaseException as e:
- self.factory.close(e)
- else:
- self.factory.recv(mesg)
+ @property
+ def host(self):
+ return self.__host
- def send(self, mesg):
- data = msgpack.packb(mesg)
- size = struct.pack('<L', len(data))
+ @property
+ def port(self):
+ return self.__port
- self.transport.write(size)
- self.transport.write(data)
+ async def connect(self) -> (asyncio.StreamReader, asyncio.StreamWriter):
+ return await asyncio.open_connection(self.__host, self.__port, ssl=self.__ssl)
- def close(self):
- self.transport.loseConnection()
-class _Factory(twisted.internet.protocol.Factory):
- protocol = _Protocol
+class UnixEndpoint(Endpoint):
- def __init__(self, recv, close):
- self.__recv = recv
- self.__close = close
+ def __init__(self, path):
+ self.__path = path
@property
- def recv(self):
- return self.__recv
+ def path(self):
+ return self.__path
- @property
- def close(self):
- return self.__close
+ async def connect(self) -> (asyncio.StreamReader, asyncio.StreamWriter):
+ return asyncio.open_unix_connection(self.__path)
class Connection(object):
- def __init__(self, point: twisted.internet.interfaces.IStreamClientEndpoint):
- self.__point = point
+ def __init__(self, ep: Endpoint):
+ self.__ep = ep
- self.__lock = defer.DeferredLock()
- self.__factory = _Factory(self.__recv, self.__close)
+ self.__lock = asyncio.Lock()
- self.__proto = None
self.__callid = 0
+ self.__reader = None
+ self.__writer = None
- self.__deferred = None
-
- def __del__(self):
- self.__close()
-
- @defer.inlineCallbacks
- def __open(self):
- self.__proto = yield self.__point.connect(self.__factory)
- yield self.__read()
+ async def __open(self):
+ self.__reader, self.__writer = await self.__ep.connect()
+ await self.__read()
- def __close(self, reason=None):
- if self.__proto:
- self.__proto.close()
- self.__proto = None
+ def __close(self):
+ if self.__writer:
+ self.__writer.close()
self.__callid = 0
+ self.__reader = None
+ self.__writer = None
- if self.__deferred:
- d = self.__deferred
- self.__deferred = None
- d.errback(reason)
+ async def __read(self):
+ size = await self.__reader.readexactly(4)
+ size = struct.unpack_from('<L', size)[0]
- def __read(self):
- if self.__deferred:
- raise RuntimeError('Already reading???')
- self.__deferred = defer.Deferred()
- return self.__deferred
+ mesg = await self.__reader.readexactly(size)
+ mesg = msgpack.unpackb(mesg, raw=False)
- def __recv(self, mesg):
- if not self.__deferred:
- raise RuntimeError('Not reading???')
- d = self.__deferred
- self.__deferred = None
- d.callback(mesg)
+ return mesg
def __write(self, mesg):
- self.__proto.send(mesg)
+ data = msgpack.packb(mesg, use_bin_type=True)
+ size = struct.pack('<L', len(data))
+
+ self.__writer.write(size)
+ self.__writer.write(data)
- @defer.inlineCallbacks
- def __call(self, method, params):
- yield self.__lock.acquire()
+ async def __call(self, method, params):
+ await self.__lock.acquire()
try:
if self.__callid == 0:
- yield self.__open()
+ await self.__open()
self.__callid += 1
req = {
- b'jsonrpc': b'2.0',
- b'method': method,
- b'params': params,
- b'id': self.__callid,
+ 'jsonrpc': '2.0',
+ 'method': method,
+ 'params': params,
+ 'id': self.__callid,
}
self.__write(req)
- res = yield self.__read()
+ res = await self.__read()
+ except:
+ self.__close()
+ raise
finally:
self.__lock.release()
- if b'jsonrpc' not in res or res[b'jsonrpc'] != req[b'jsonrpc']:
+ if 'jsonrpc' not in res or res['jsonrpc'] != req['jsonrpc']:
raise Exception('Not a JSON-RPC 2.0 response!')
- if b'error' in res:
- raise Exception('JSON-RPC: Remote error: {0}'.format(res[b'error']))
- if b'id' not in res or res[b'id'] != req[b'id']:
+ if 'error' in res:
+ raise Exception('JSON-RPC: Remote error: {0}'.format(res['error']))
+ if 'id' not in res or res['id'] != req['id']:
raise Exception('JSON-RPC id missing or invalid')
- return res[b'result']
+ return res['result']
def __getattr__(self, attr):
return lambda *args: self.__call(attr, args)
import re
import abc
+import asyncio
import itertools
-import traceback
-import twisted.internet.defer as defer
+from typing import Callable, Sequence, Mapping, Any, Hashable
+import supcon.net
import supcon.util
+
+class DType(supcon.util.Dumpable):
+ dInteger = "I"
+ dString = "S"
+ dBytes = "U"
+ dFloat = "F"
+ dBool = "B"
+ dNone = "N"
+ dArray = "A"
+ dMap = "M"
+ dTuple = "("
+ dUnion = "["
+ dVariant = "V"
+
+ dBasic = (dInteger, dString, dBytes, dFloat, dBool, dNone)
+ dContainer = (dArray, dMap, dTuple)
+
+ __instances = {}
+
+ @classmethod
+ def new(cls, symbol: str, children: Sequence['DType'] = None):
+ key = (symbol, tuple(children) if children else None)
+ if key not in cls.__instances:
+ cls.__instances[key] = cls(symbol, children)
+ return cls.__instances[key]
+
+ def __init__(self, symbol: str, children: Sequence['DType'] = None):
+ children = tuple(children) if children else ()
+
+ if symbol in self.dBasic:
+ assert not children, 'primitive types must not have children'
+
+ elif symbol == self.dArray:
+ assert len(children) == 1, 'array type must have exactly one child'
+
+ elif symbol == self.dMap:
+ assert len(children) == 2, 'map type must have exactly two children'
+
+ elif symbol == self.dTuple:
+ assert len(children) > 1, 'tuple type must have at least two children'
+
+ elif symbol == self.dUnion:
+ assert len(children) > 1, 'union type must have at least two children'
+
+ elif symbol == self.dVariant:
+ assert not children, 'variant type must not have children'
+
+ else:
+ raise RuntimeError('unexpected type symbol')
+
+ concrete = False
+ if self.symbol in self.dBasic:
+ concrete = True
+ if self.symbol in self.dContainer:
+ concrete = True
+ for child in self.children:
+ concrete = concrete and child.concrete
+
+ signature = self.symbol + ''.join(self.children)
+ if self.symbol == self.dTuple:
+ signature += ')'
+ if self.symbol == self.dUnion:
+ signature += ']'
+
+ self.__symbol = symbol
+ self.__children = children
+ self.__concrete = concrete
+ self.__signature = signature
+
+ @property
+ def symbol(self) -> str:
+ return self.__symbol
+
+ @property
+ def children(self) -> Sequence['DType']:
+ return self.__children
+
+ @property
+ def concrete(self) -> bool:
+ return self.__concrete
+
+ @property
+ def signature(self) -> bool:
+ return self.__signature
+
+ def __str__(self) -> str:
+ return self.signature
+
+ def __eq__(self, other) -> bool:
+ return self.__class__ == other.__class__ and self.signature == other.signature
+
+ def __ge__(self, other: 'DType') -> bool:
+ if self.signature == other.signature:
+ return True
+
+ if self.symbol in self.dContainer:
+ if self.symbol != other.symbol or len(self.children) == len(other.children):
+ return False
+ for sChild, oChild in zip(self.children, other.children):
+ if not sChild.implementedBy(oChild):
+ return False
+ return True
+
+ if self.symbol == self.dUnion:
+ if other.symbol != self.dUnion:
+ return other in self.children
+ else:
+ for oChild in other.children:
+ if oChild not in self.children:
+ return False
+ return True
+
+ if self.symbol == self.dVariant:
+ return True
+
+ return False
+
+ @classmethod
+ def _parseType(cls, s: str, i: int) -> ('DType', int):
+ if s[i] in cls.dBasic:
+ return (cls.new(s[i]), i + 1)
+
+ if s[i] == cls.dArray:
+ aType, i = cls._parseType(s, i + 1)
+ return (cls.new(cls.dArray, (aType)), i)
+
+ if s[i] == cls.dMap:
+ kType, i = cls._parseType(s, i + 1)
+ vType, i = cls._parseType(s, i)
+ return (cls.new(cls.dMap, (kType, vType)), i)
+
+ if s[i] == cls.dTuple:
+ tTypes, i = cls._parseTypes(s, i + 1)
+ assert len(tTypes) > 1, 'tuple type needs at least two elements'
+ assert s[i + 1] == ')', 'unexpected character in type signature'
+ return (cls.new(cls.dTuple, tTypes), i + 1)
+
+ if s[i] == cls.dUnion:
+ uTypes, i = cls._parseTypes(s, i + 1)
+ assert len(uTypes) > 1, 'union type needs at least two elements'
+ assert s[i + 1] == ']', 'unexpected character in type signature'
+ return (cls.new(cls.dUnion, uTypes), i + 1)
+
+ if s[i] == cls.dVariant:
+ return (cls.new(s[i]), i + 1)
+
+ raise RuntimeError('unexpected character in type signature')
+
+ @classmethod
+ def _parseTypes(cls, s: str, i: int) -> (Sequence['DType'], int):
+ dTypes = []
+ try:
+ while True:
+ dType, i = cls._parseType(s, i)
+ dTypes.append(dType)
+ except BaseException:
+ pass
+ return (dTypes, i)
+
+ def dump(self):
+ return str(self)
+
+ @classmethod
+ def load(cls, data):
+ dType, i = cls._parseType(data, 0)
+ assert i == len(data), 'unexpected character at end of type signature'
+ return dType
+
+ def isinstance(self, obj: Any):
+ if self.symbol == self.dInteger:
+ return isinstance(obj, int)
+
+ if self.symbol == self.dString:
+ return isinstance(obj, str)
+
+ if self.symbol == self.dBytes:
+ return isinstance(obj, bytes)
+
+ if self.symbol == self.dFloat:
+ return isinstance(obj, float)
+
+ if self.symbol == self.dBool:
+ return isinstance(obj, bool)
+
+ if self.symbol == self.dNone:
+ return obj is None
+
+ if self.symbol == self.dArray:
+ if not isinstance(obj, Sequence):
+ return False
+ cType = self.children[0]
+ for cObj in obj:
+ if not cType.isinstance(cObj):
+ return False
+ return True
+
+ if self.symbol == self.dMap:
+ if not isinstance(obj, Mapping):
+ return False
+ kType = self.children[0]
+ vType = self.children[1]
+ for kObj, vObj in obj.items():
+ if not kType.isinstance(kObj):
+ return False
+ if not vType.isinstance(vObj):
+ return False
+ return True
+
+ if self.symbol == self.dTuple:
+ if not isinstance(obj, Sequence):
+ return False
+ if len(self.children) != len(obj):
+ return False
+ for tType, tObj in zip(self.children, obj):
+ if not tType.isinstance(tObj):
+ return False
+ return True
+
+ if self.symbol == self.dUnion:
+ for uType in self.children:
+ if uType.isinstance(obj):
+ return True
+ return False
+
+ if self.symbol == self.dVariant:
+ if not isinstance(obj, Sequence) or len(obj) != 2:
+ return False
+ vType = obj[0]
+ if not isinstance(vType, self.__class__):
+ try:
+ vType = self.load(vType)
+ except RuntimeError:
+ return True
+ return vType.isinstance(obj[1])
+
+ raise RuntimeError('unknown symbol !?!')
+
+
class Named(supcon.util.Named):
"""A base class for objects with a name.
"""
class DEvent(supcon.util.Dumpable, NamedAndDescribed):
- """Describes an event that can be emitted by an implementation on the bus"""
+ """Describes an event that can be fired by an implementation on the bus"""
regex = re.compile('[a-zA-Z0-9]+')
description (str): a description of the method
"""
NamedAndDescribed.__init__(self, name, description)
- self.__inArgs = DArguments.to(inArgs)#
+ self.__inArgs = DArguments.to(inArgs)
self.__outArgs = DArguments.to(outArgs)
@property
@property
def events(self) -> DEvents:
- """DEvent: the list of events this interface can emit"""
+ """DEvent: the list of events this interface can fire"""
return self.__events
@property
"""The base class for interface implementations"""
def __init__(self, interface: DInterface):
- """
- Args:
- interface (DInterface): the interface that is implemented by this Implementation
- """
if not isinstance(interface, DInterface):
raise ValueError('interface must be an instance of {}'.format(DInterface))
self.__interface = interface
@property
def intf(self) -> str:
- """str: The name of the implemented interface"""
+ """The name of the implemented interface"""
return self.__interface.name
@property
def interface(self) -> DInterface:
- """Interface: The implemented interface"""
+ """The implemented interface"""
return self.__interface
- def setCallCb(self, method: str, cb):
- """Sets a callback for the given method. The method must be a method of the
- interface this Implementation implements. The callback must implement the
- given method. The callback gets called by calls to Implementation.call()
-
- Args:
- method (str): the interface method that the callback implements
- cb (callable): the callback
+ def setCallCb(self, method: str, cb: Callable):
+ """
+ Sets a callback for the given method. The method must be a method of the interface this
+ Implementation implements. The callback must implement the given method. The callback gets
+ called by calls to Implementation.call().
"""
method = DMethod.toName(method)
if method in self.__callCbs:
raise ValueError('Interface has no method {}!'.format(method))
self.__callCbs[method] = cb
- def call(self, method: str, inArgs) -> defer.Deferred:
- """Calls the given interface method with the given arguments. This method
- calls the callback set by Implementation.setCallCb()
-
- Args:
- method (str): the called interface method
- inArgs (Mapping): a map of input arguments
- Returns:
- defer.Deferred: Resolves with the result of the called method
+ async def call(self, method: str, inArgs: Mapping) -> Mapping:
"""
- def validateReturn(outArgs):
+ Calls the given interface method with the given arguments. This method calls the callback set by
+ Implementation.setCallCb()
+ """
+ def makeCall(method: str, inArgs) -> asyncio.Future:
+ future = asyncio.Future()
try:
- self.__interface.validateReturn(method, outArgs)
- except:
- traceback.print_exc()
- raise
- return outArgs
+ result = self.__callCbs[method](**inArgs)
+ if asyncio.iscoroutine(result):
+ return result
+ future.set_result(result)
+ except BaseException as e:
+ future.set_exception(e)
+ return future
- try:
- self.__interface.validateCall(method, inArgs)
- if method not in self.__callCbs:
- raise ValueError('Callback for method {} is not set!'.format(method))
+ self.__interface.validateCall(method, inArgs)
+ if method not in self.__callCbs:
+ raise ValueError('Callback for method {} is not set!'.format(method))
- d = self.__callCbs[method](**inArgs)
- if not isinstance(d, defer.Deferred):
- d = defer.succeed(d)
- d.addCallback(validateReturn)
- except BaseException as e:
- traceback.print_exc()
- d = defer.fail(e)
+ result = await makeCall(method, inArgs)
+ result = result if result else {}
- return d
+ self.__interface.validateReturn(method, result)
+ return result
def addFireCb(self, cb):
- """Adds a callback that gets called, when this Implementation fires an
- event.
-
- Args:
- cb (callable): the callback
- """
+ """Adds a callback that gets called, when this Implementation fires an event."""
self.__fireCbs.append(cb)
def delFireCb(self, cb):
- """Removes a callback that gets called, when this Implementation fires an
- event.
-
- Args:
- cb (callable): the callback
- """
+ """Removes a callback that gets called, when this Implementation fires an event."""
self.__fireCbs.remove(cb)
- def fire(self, event: str, args):
- """Fires the given event with the given arguments.
-
- Args:
- event (str): the event name
- args (collecion.Mapping): the event arguments
- """
+ def fire(self, event: str, args: Mapping = None):
+ """Fires the given event with the given arguments."""
+ args = args if args else {}
self.__interface.validateEvent(event, args)
for cb in self.__fireCbs:
class Object(object):
def __init__(self, interfaces=None):
- """
- Args:
- interfaces (DInterfaces): the interfaces that are implemented by this Object
- """
if interfaces is None:
interfaces = []
class Node(abc.ABC):
- """The Node Interface. This class defines the methods that participants can
- use to access the supcon bus."""
+ """
+ The Node Interface. This class defines the methods that participants can use to access the bus.
+ """
def __init__(self, name):
super().__init__()
@property
def name(self) -> str:
- """str: The name of the node on the bus"""
+ """The name of the node on the bus"""
return self.__name
@classmethod
@abc.abstractmethod
def nodes(self) -> DNodes:
- """list[str]: The currently connected nodes"""
+ """The currently connected nodes"""
raise NotImplementedError()
@abc.abstractmethod
- def connect(self, endpoint):
- """Connects the node to the given endpoint.
+ def connect(self, endpoint: supcon.net.ClientEndpoint) -> supcon.net.Closer:
+ """
+ Connects the node to the given endpoint.
If the connection failes or closes the connection gets reestablished with an
exponential timeout up to two minutes.
-
- Args:
- endpoint (twisted.internet.interfaces.IStreamClientEndpoint):
"""
raise NotImplementedError()
@abc.abstractmethod
- def listen(self, endpoint):
- """Listens at the given endpoint for incoming connections
-
- Args:
- endpoint (twisted.internet.interfaces.IStreamServerEndpoint):
- """
+ def listen(self, endpoint: supcon.net.ServerEndpoint) -> supcon.net.Closer:
+ """Listens at the given endpoint for incoming connections"""
raise NotImplementedError()
@abc.abstractmethod
def register(self, path: str, impl: Implementation):
- """Registers an implementation with the node
-
- Args:
- impl (Implementation):
- """
+ """Registers an implementation with the node"""
raise NotImplementedError()
@abc.abstractmethod
def unregister(self, path: str, impl: Implementation):
- """Removes an implementation from the node
-
- Args:
- impl (supcon.intf.Implementation):
- """
+ """Removes an implementation from the node"""
raise NotImplementedError()
@abc.abstractmethod
- def call(self, node: str, path: str, intf: str, method: str, args: dict) -> defer.Deferred:
- """Calls a method on the bus
-
- Args:
- node (str): a node on the bus
- path (str): a path on the given node
- intf (str): an interface at the given path
- method (str): a method of the given interface
- args (dict): a dict of method arguments
-
- Returns:
- defer.Deferred:
- """
+ async def call(self, node: str, path: str, intf: str, method: str, inArgs: Mapping[str, Any] = None) -> Mapping[str, Any]:
+ """Calls a method on the bus"""
raise NotImplementedError()
@abc.abstractmethod
- def on(self, node: str, path: str, intf: str, event: str, cb):
- """Registers a callback for an event on the bus
-
- Args:
- node (str): a node on the bus
- path (str): a path on the given node
- intf (str): an interface at the given path
- event (str): a method of the given interface
- cb (callable): a callable that gets called with a dict of event arguments
- """
+ def on(self, node: str, path: str, intf: str, event: str, cb: Callable) -> Hashable:
+ """Registers a callback for an event on the bus"""
raise NotImplementedError()
@abc.abstractmethod
- def off(self, node: str, path: str, intf: str, event: str, cb):
- """Unregisters a callback for an event on the bus
-
- Args:
- node (str): a node on the bus
- path (str): a path on the given node
- intf (str): an interface at the given path
- event (str): a method of the given interface
- cb (callable): a callable that gets called with a dict of event arguments
- """
- raise NotImplementedError()
+ def off(self, handle: Hashable) -> bool:
+ """Unregisters a callback for an event on the bus"""
+ raise NotImplementedError()
\ No newline at end of file
--- /dev/null
+
+from typing import Callable
+
+import abc
+import asyncio
+
+import supcon.util
+
+
+__all__ = (
+ 'ProtocolFactory', 'Closer', 'TCPClientEndpoint', 'TCPServerEndpoint',
+ 'UnixClientEndpoint', 'UnixServerEndpoint', 'ReconnectingClientEndpoint'
+)
+
+
+ProtocolFactory = Callable[[], asyncio.Protocol]
+
+
+class Closer(abc.ABC):
+
+ @abc.abstractproperty
+ def closed(self):
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def onClose(self, cb: Callable[[], None]):
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def close(self):
+ raise NotImplementedError()
+
+
+class ClientEndpoint(abc.ABC):
+
+ @abc.abstractmethod
+ def connect(self, factory: ProtocolFactory) -> Closer:
+ raise NotImplementedError()
+
+
+class ServerEndpoint(abc.ABC):
+
+ @abc.abstractmethod
+ def listen(self, factory: ProtocolFactory) -> Closer:
+ raise NotImplementedError()
+
+
+class _CoroCloser(Closer):
+
+ def __init__(self, coro):
+ self._future = asyncio.ensure_future(coro)
+ self._future.add_done_callback(self._futureDone)
+
+ self._close = None
+ self._closed = False
+ self._closing = False
+
+ self._cbs = []
+
+ @abc.abstractmethod
+ def _futureDone(self, future: asyncio.Future):
+ raise NotImplementedError()
+
+ def _teardown(self, *_args, **_kwargs):
+ self._closed = True
+ self._closing = False
+
+ for cb in self._cbs:
+ cb()
+ self._cbs.clear()
+
+ self._close = None
+ self._future = None
+
+ @property
+ def closed(self):
+ return self._closed
+
+ def onClose(self, cb: Callable[[], None]):
+ if self._closed:
+ cb()
+ else:
+ self._cbs.append(cb)
+
+ def close(self):
+ if self._closing or self._closed:
+ return
+
+ self._closing = True
+ if self._close:
+ self._close()
+ else:
+ self._future.cancel()
+
+
+class _ClientCloser(_CoroCloser):
+
+ def _futureDone(self, future: asyncio.Future):
+ future.remove_done_callback(self._futureDone)
+ try:
+ (transport, protocol) = future.result()
+ except BaseException:
+ self._teardown()
+ return
+ self._future = None
+
+ self._close = transport.close
+
+ handle = protocol.on('connection_lost', self._teardown)
+ self._unregister = lambda: protocol.off(handle)
+
+ if self._closing:
+ self._close.close()
+
+ def _teardown(self, *_args, **_kwargs):
+ try:
+ self._unregister()
+ del self._unregister
+ except AttributeError:
+ pass
+
+ _CoroCloser._teardown(self)
+
+
+class _ServerCloser(_CoroCloser):
+
+ def _futureDone(self, future: asyncio.Future):
+ future.remove_done_callback(self._futureDone)
+ try:
+ server = future.result()
+ except BaseException:
+ self._teardown()
+ return
+ self._future = None
+
+ self._close = server.close
+ asyncio.ensure_future(server.wait_closed()).add_done_callback(self._teardown)
+
+ if self._closing:
+ self._close()
+
+
+class _ProtocolWrapper(asyncio.Protocol, supcon.util.EventEmitterMixin):
+
+ def __init__(self, wrapped: asyncio.Protocol):
+ self.__wrapped = wrapped
+
+ def connection_made(self, transport):
+ self.__wrapped.connection_made(transport)
+ self._emit('connection_made', transport)
+
+ def connection_lost(self, exc):
+ self.__wrapped.connection_lost(exc)
+ self._emit('connection_lost', exc)
+
+ def pause_writing(self):
+ self.__wrapped.pause_writing()
+ self._emit('pause_writing')
+
+ def resume_writing(self):
+ self.__wrapped.resume_writing()
+ self._emit('resume_writing')
+
+ def data_received(self, data):
+ self.__wrapped.data_received(data)
+ self._emit('data_received', data)
+
+ def eof_received(self):
+ self.__wrapped.eof_received()
+ self._emit('eof_received')
+
+
+class TCPClientEndpoint(ClientEndpoint):
+
+ def __init__(self, host, port, ssl=None):
+ self.__host = host
+ self.__port = port
+ self.__ssl = ssl
+
+ @property
+ def host(self):
+ return self.__host
+
+ @property
+ def port(self):
+ return self.__port
+
+ def connect(self, factory: ProtocolFactory) -> Closer:
+
+ def _factory():
+ return _ProtocolWrapper(factory())
+
+ loop = asyncio.get_event_loop()
+ coro = loop.create_connection(_factory, self.__host, self.__port, ssl=self.__ssl)
+
+ return _ClientCloser(coro)
+
+
+class TCPServerEndpoint(ServerEndpoint):
+
+ def __init__(self, host, port, ssl=None):
+ self.__host = host
+ self.__port = port
+ self.__ssl = ssl
+
+ @property
+ def host(self):
+ return self.__host
+
+ @property
+ def port(self):
+ return self.__port
+
+ def listen(self, factory: ProtocolFactory) -> Closer:
+ loop = asyncio.get_event_loop()
+ coro = loop.create_server(factory, self.__host, self.__port, ssl=self.__ssl)
+
+ return _ServerCloser(coro)
+
+
+class UnixClientEndpoint(ClientEndpoint):
+
+ def __init__(self, path):
+ self.__path = path
+
+ @property
+ def path(self):
+ return self.__path
+
+ def connect(self, factory: ProtocolFactory) -> Closer:
+
+ def _factory():
+ return _ProtocolWrapper(factory())
+
+ loop = asyncio.get_event_loop()
+ coro = loop.create_unix_connection(factory, self.__path)
+
+ return _ClientCloser(coro)
+
+
+class UnixServerEndpoint(ServerEndpoint):
+
+ def __init__(self, path):
+ self.__path = path
+
+ @property
+ def path(self):
+ return self.__path
+
+ def listen(self, factory: ProtocolFactory) -> Closer:
+ loop = asyncio.get_event_loop()
+ coro = loop.create_unix_server(factory, self.__path)
+
+ return _ServerCloser(coro)
+
+
+class _ReconnectingCloser(Closer):
+
+ def __init__(self, endpoint: ClientEndpoint, factory: ProtocolFactory):
+ self.__endpoint = endpoint
+ self.__factory = factory
+
+ self.__closer = None
+ self.__closed = False
+ self.__closing = False
+
+ self.__cbs = []
+
+ self.__reconnect()
+
+ def __reconnect(self):
+ if self.__closing or self.__closed:
+ return
+
+ self.__closer = self.__endpoint.connect(self.__factory)
+ self.__closer.onClose(self.__reconnectWithTimeout)
+
+ def __reconnectWithTimeout(self):
+ loop = asyncio.get_event_loop()
+ loop.call_later(0.2, self.__reconnect)
+
+ def __teardown(self):
+ self.__closed = True
+ self.__closing = False
+
+ for cb in self.__cbs:
+ cb()
+ self.__cbs.clear()
+
+ self.__closer = None
+ self.__factory = None
+ self.__endpoint = None
+
+ @property
+ def closed(self):
+ return self.__closed
+
+ def onClose(self, cb: Callable[[], None]):
+ if self.__closed:
+ cb()
+ else:
+ self.__cbs.append(cb)
+
+ def close(self):
+ if self.__closing or self.__closed:
+ return
+
+ self.__closing = True
+ self.__closer.close()
+
+
+class ReconnectingClientEndpoint(ClientEndpoint):
+
+ def __init__(self, endpoint):
+ self.__endpoint = endpoint
+
+ def connect(self, factory: ProtocolFactory) -> Closer:
+ return _ReconnectingCloser(self.__endpoint, factory)
+from typing import Iterable, Mapping, Any
+
+import abc
+import asyncio
import msgpack
-import twisted.internet.defer as defer
-import twisted.internet.threads as threads
+class Store(abc.ABC):
+
+ async def keys(self) -> Iterable[str]:
+ raise NotImplementedError()
+
+ async def load(self, key: str) -> Any:
+ return (await self.loadAll([key]))[key]
+
+ async def loadAll(self, keys: Iterable[str]) -> Mapping[str, Any]:
+ raise NotImplementedError()
-class Store(object):
+ async def save(self, key: str, val: Any) -> 'Store':
+ return await self.saveAll({key: val})
- def load(self, key: str) -> defer.Deferred:
- pass
+ async def saveAll(self, vals: Mapping[str, Any]) -> 'Store':
+ raise NotImplementedError()
- def store(self, key: str, data: bytes) -> defer.Deferred:
- pass
+ async def clear(self, key: str) -> 'Store':
+ return await self.clearAll([key])
- def delete(self, key: str) -> defer.Deferred:
- pass
+ async def clearAll(self, keys: Iterable[str]) -> 'Store':
+ raise NotImplementedError()
-def readFile(name: str) -> defer.Deferred:
- print('readFile')
- def inThread():
- print('readFile > inThread')
+
+async def readFile(name: str) -> bytes:
+ def inExecutor():
with open(name, 'r+b') as fd:
return fd.read()
- return threads.deferToThread(inThread)
+ loop = asyncio.get_event_loop()
+ return await loop.run_in_executor(None, inExecutor)
+
-def writeFile(name: str, data: bytes) -> defer.Deferred:
- print('writeFile')
- def inThread():
- print('writeFile > inThread')
+async def writeFile(name: str, data: bytes):
+ def inExecutor():
with open(name, 'w+b') as fd:
return fd.write(data)
- return threads.deferToThread(inThread)
+ loop = asyncio.get_event_loop()
+ return await loop.run_in_executor(None, inExecutor)
+
class FileStore(Store):
def __init__(self, name):
self.__name = name
- self.__data = {}
- self.__last = self.__readData()
-
- @defer.inlineCallbacks
- def __readData(self):
- data = yield readFile(self.__name)
- data = msgpack.unpackb(data, encoding='utf-8')
- if isinstance(data, dict):
- self.__data = data
+
+ self.__data = None
+ self.__lock = asyncio.Lock()
@property
def name(self):
return self.__name
- def __queue(self, job) -> defer.Deferred:
- d = defer.Deferred()
+ async def __read(self):
+ await self.__lock.acquire()
+
+ try:
+ data = await readFile(self.__name)
+ data = msgpack.unpackb(data, encoding='utf-8')
+ self.__data = data
+ except FileNotFoundError:
+ self.__data = {}
+ finally:
+ self.__lock.release()
+
+ async def __write(self):
+ await self.__lock.acquire()
- def trampolineSucceed(result):
- if isinstance(result, defer.Deferred):
- result.addCallbacks(trampolineSucceed, trampolineFail)
- else:
- d.callback(result)
+ try:
+ data = msgpack.packb(self.__data, use_bin_type=True)
+ await writeFile(self.__name, data)
+ finally:
+ self.__lock.release()
- def trampolineFail(result):
- if isinstance(result, defer.Deferred):
- result.addCallbacks(trampolineSucceed, trampolineFail)
- else:
- d.errback(result)
+ async def keys(self):
+ if self.__data is None:
+ await self.__read()
- self.__last.addBoth(lambda _: job())
- self.__last.addCallbacks(trampolineSucceed, trampolineFail)
+ return self.__data.keys()
- return d
+ async def loadAll(self, keys: Iterable[str]) -> Mapping[str, Any]:
+ if self.__data is None:
+ await self.__read()
- def load(self, key: str) -> defer.Deferred:
- def job():
- return self.__data[key] if key in self.__data else None
- return self.__queue(job)
+ vals = {}
+ for key in keys:
+ vals[key] = self.__data[key] if key in self.__data else None
+ return vals
- def store(self, key: str, data) -> defer.Deferred:
- def job():
- self.__data[key] = data
- return writeFile(self.__name, msgpack.packb(self.__data, use_bin_type=True))
- return self.__queue(job)
+ async def saveAll(self, vals: Mapping[str, Any]) -> Store:
+ if self.__data is None:
+ await self.__read()
- def delete(self, key: str) -> defer.Deferred:
- def job():
+ for key, val in vals.items():
+ self.__data[key] = val
+
+ await self.__write()
+ return self
+
+ async def clearAll(self, keys: Iterable[str]) -> Store:
+ if self.__data is None:
+ await self.__read()
+
+ for key in keys:
if key in self.__data:
del self.__data[key]
- return writeFile(self.__name, msgpack.packb(self.__data, use_bin_type=True))
- return self.__queue(job)
+
+ await self.__write()
+ return self
\ No newline at end of file
import supcon.util
import supcon.intf
-""" the interface com.screwerk.Sensor """
+# the interface com.screwerk.Sensor
SensorIntf = supcon.intf.DInterface.load({
'name':
'com.screwerk.Sensor',
})
-def returnTrue(*args): return True
-def returnNone(*args): return None
+def returnTrue(*_args): return True
+def returnNone(*_args): return None
def returnFirst(*args): return args[0]
def sensor(self):
return self.__sensor
- def __callback(self, gpio, level, tick):
+ def __callback(self, _gpio, level, _tick):
if level >= 2:
return
super().__init__(value, guard, writeEffect, convert)
-class NodeSensor(supcon.util.EventEmitter):
+class NodeSensor(supcon.util.EventEmitterMixin):
def __init__(self, local: supcon.intf.Node, node: str, path: str):
- supcon.util.EventEmitter.__init__(self)
-
self.__local = local
self.__node = node
self.__path = path
self.__value = None
+ self.__cancel = []
+ self.__isSetup = False
+ self.__isConnected = False
- self.__local.on(self.__local.name, '/', 'supcon.Local', 'intf', self.__onIntf)
- self.__local.on(self.__local.name, '/', 'supcon.Local', 'intfLost', self.__onIntfLost)
- self.__local.on(self.node, self.path, 'com.screwerk.Sensor', 'changed', self.__onChanged)
-
- if self.__local.nodes().hasIntf(self.node, self.path, 'com.screwerk.com'):
- self.__fetchValue()
+ self.setup()
def __del__(self):
self.__local.off(self.__local.name, '/', 'supcon.Local', 'intf', self.__onIntf)
self.__local.off(self.__local.name, '/', 'supcon.Local', 'intfLost', self.__onIntfLost)
- self.__local.off(self.node, self.path, 'com.screwerk.Sensor', 'changed', self.__onChanged)
+ self.__local.off(self.node, self.path, SensorIntf.name, 'changed', self.__onChanged)
+
+ def setup(self):
+ if self.__isSetup:
+ return
+
+ cancel = self.__local.on(self.__local.name, '/', 'supcon.Local', 'intf', self.__onIntf)
+ self.__cancel.append(cancel)
+ cancel = self.__local.on(self.__local.name, '/', 'supcon.Local', 'intfLost', self.__onIntfLost)
+ self.__cancel.append(cancel)
+ cancel = self.__local.on(self.node, self.path, SensorIntf.name, 'changed', self.__onChanged)
+ self.__cancel.append(cancel)
+
+ self.__isSetup = True
+
+ if self.__local.nodes().hasIntf(self.node, self.path, 'com.screwerk.com'):
+ self.__isConnected = True
+ self.__fetchValue()
+
+ def teardown(self):
+ if not self.__isSetup:
+ return
+
+ for cancel in self.__cancel:
+ cancel()
+ self.__cancel.clear()
+
+ self.__isConnected = False
+ self.__update(None)
+
+ self.__isSetup = False
@property
- def node(self):
+ def local(self):
+ return self.__local
+
+ @property
+ def node(self) -> str:
return self.__node
@property
- def path(self):
+ def path(self) -> str:
return self.__path
@property
def value(self):
return self.__value
+ @property
+ def isSetup(self):
+ return self.__isSetup
+
+ @property
+ def isConnected(self):
+ return self.__isConnected
+
def __fetchValue(self):
- self.__local.call(self.node, self.path, 'com.screwerk.Sensor', 'value', {}).addCallbacks(
+ self.__local.call(self.node, self.path, SensorIntf.name, 'value', {}).addCallbacks(
lambda args: self.__update(args['value']),
lambda reason: self.__update(None)
)
return
self.__value = value
- self.emit('changed', self.__value)
+ self._emit('changed', self.__value)
- def __onIntf(self, args, event):
- if (args.node == self.node and args.path == self.path and args.intf.name == 'com.screwerk.Sensor'):
+ def __onIntf(self, args, _event):
+ if (args.node == self.node and args.path == self.path and args.intf.name == SensorIntf.name):
+ self.__isConnected = True
self.__fetchValue()
- def __onIntfLost(self, args, event):
- if (args.node == self.node and args.path == self.path and args.intf.name == 'com.screwerk.Sensor'):
+ def __onIntfLost(self, args, _event):
+ if (args.node == self.node and args.path == self.path and args.intf.name == SensorIntf.name):
+ self.__isConnected = False
self.__update(None)
- def __onChanged(self, args, event):
- self.__update(args['value'])
+ def __onChanged(self, args, _event):
+ self.__update(args['value'])
# -*- coding: utf-8 -*-
import abc
-
-import asyncio
+import uuid
import collections
-import collections.abc
-import twisted.internet.defer
+from typing import Hashable, Callable
-def schedule(obj):
- if asyncio.iscoroutine(obj):
- obj = twisted.internet.defer.ensureDeferred(obj)
- return obj
class Dumpable(abc.ABC):
"""
"""
return value if isinstance(value, cls) else cls.load(value)
+class EventEmitterInterface(abc.ABC):
-class EventEmitter(object):
+ @abc.abstractmethod
+ def on(self, event: Hashable, callback: Callable) -> Hashable:
+ '''
+ Registers the given callback for the given event. Returns handle to unregister the given
+ callback.
+ '''
- def __init__(self):
- self.__callbacks = {}
+ @abc.abstractmethod
+ def off(self, handle: Hashable) -> bool:
+ '''
+ Unregisters a previously registered callback by the given handle. Returns True on success.
+ '''
- def on(self, event, callback):
- """
- Registers the given callback for the given event
- """
- if type not in self.__callbacks:
- self.__callbacks[event] = {}
- self.__callbacks[event][id(callback)] = callback
- return lambda: self.off(event, callback)
+ @abc.abstractmethod
+ def _emit(self, event: Hashable, *args, **kwargs) -> None:
+ '''
+ Emits the given event by calling all callbacks registered for this event.
+ '''
- def off(self, event, callback):
- """
- Unregisters the given callback for the given event
- """
- if event not in self.__callbacks:
- return
- if id(callback) not in self.__callbacks[event]:
+
+class EventEmitterMixin(EventEmitterInterface):
+
+ def on(self, event: Hashable, callback: Callable) -> Hashable:
+ try:
+ events = self._EventEmitterMixin_events
+ callbacks = self._EventEmitterMixin_callbacks
+ except AttributeError:
+ events = self._EventEmitterMixin_events = {}
+ callbacks = self._EventEmitterMixin_callbacks = {}
+
+ if event not in callbacks:
+ callbacks[event] = {}
+
+ handle = uuid.uuid4()
+ while handle in events:
+ handle = uuid.uuid4()
+
+ events[handle] = event
+ callbacks[event][handle] = callback
+
+ return handle
+
+ def off(self, handle: Hashable):
+ try:
+ events = self._EventEmitterMixin_events
+ callbacks = self._EventEmitterMixin_callbacks
+ except AttributeError:
+ return False
+
+ if handle not in events:
+ return False
+ event = events[handle]
+
+ del events[handle]
+ del callbacks[event][handle]
+
+ if not callbacks[event]:
+ del callbacks[event]
+
+ return True
+
+ def _emit(self, event: Hashable, *args, **kwargs) -> None:
+ try:
+ callbacks = self._EventEmitterMixin_callbacks
+ except AttributeError:
return
- del self.__callbacks[event][id(callback)]
- def emit(self, event, *args):
- """
- Calls all callbacks for the given event with the given arguments
- """
- if event not in self.__callbacks:
+ if event not in callbacks:
return
- for callback in self.__callbacks[event].values():
- schedule(callback(*args))
+
+ for callback in callbacks[event].copy().values():
+ callback(*args, **kwargs)
--- /dev/null
+#!/usr/bin/env python3
+
+import os
+import ssl
+import asyncio
+
+from supcon.graph import TCPEndpoint, Connection
+
+sc = ssl.SSLContext(protocol=ssl.PROTOCOL_TLSv1_2)
+sc.load_cert_chain(os.path.dirname(__file__) + '/local.srv.pem')
+
+ep = TCPEndpoint('10.4.1.2', 4339, ssl=sc)
+gc = Connection(ep)
+
+async def testConnection(gc: Connection):
+ knoten_guid = await gc.attributsknoten('knoten_name','knoten')
+ print(knoten_guid)
+
+loop = asyncio.get_event_loop()
+loop.run_until_complete(asyncio.gather(
+ testConnection(gc),
+ testConnection(gc),
+ testConnection(gc),
+))
+loop.close()
\ No newline at end of file
--- /dev/null
+#!/usr/bin/env python3
+
+import os
+import asyncio
+
+from supcon.store import FileStore
+
+fs = FileStore(os.path.dirname(__file__) + '/store.msg')
+
+async def testFileStore(fs):
+ print(await fs.load('a'))
+ await fs.save('a', 'abc')
+ print(await fs.load('a'))
+
+loop = asyncio.get_event_loop()
+loop.run_until_complete(testFileStore(fs))
+loop.close()