diff --git a/.gitmodules b/.gitmodules deleted file mode 100644 index dcf374d3..00000000 --- a/.gitmodules +++ /dev/null @@ -1,3 +0,0 @@ -[submodule "travis"] - path = travis - url = https://github.com/nemgrouplimited/travis-functions.git diff --git a/build.gradle b/build.gradle index b0b2da94..7a5a64cd 100644 --- a/build.gradle +++ b/build.gradle @@ -34,6 +34,7 @@ plugins { id 'jacoco' id 'signing' id 'java-library' + id 'java' } apply plugin: 'nebula-aggregate-javadocs' @@ -43,8 +44,8 @@ apply plugin: 'nebula-aggregate-javadocs' ext { vertxVersion = "3.5.0" rxjavaVersion = "2.1.7" - junitVersion = "5.4.0" - catbufferVersion = "0.1.2" + junitVersion = "5.9.0" + catbufferVersion = "0.1.3-SNAPSHOT" restApiVersion = "1.0.0" jackson_version = "2.9.9" jackson_databind_version = "2.9.9" diff --git a/build_serializers.sh b/build_serializers.sh new file mode 100644 index 00000000..da06cb17 --- /dev/null +++ b/build_serializers.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +# deliberate order as generators seem to be using some ancient pyyaml + +cd catbuffer-generators +pip install -r requirements.txt +cd .. + +cd catbuffer-parser +pip install -r requirements.txt +cd .. + +cd catbuffer-generators + +PYTHONPATH=. python3 ../catbuffer-parser/main.py \ + --schema ../catbuffer-schemas/schemas/all.cats \ + --include ../catbuffer-schemas/schemas/ \ + --generator java \ + --copyright HEADER.inc diff --git a/catbuffer-generators/.gitignore b/catbuffer-generators/.gitignore new file mode 100644 index 00000000..97a42225 --- /dev/null +++ b/catbuffer-generators/.gitignore @@ -0,0 +1,15 @@ +*~ +*.pch +*.pyc +__pycache__/ +.idea +.vscode/ +.DS_Store +_generated/ +.python-version +node_modules/ +.gradle +build +.idea +/catbuffer-generators.iml +/build-old/ diff --git a/catbuffer-generators/.gitmodules b/catbuffer-generators/.gitmodules new file mode 100644 index 00000000..909e2ac5 --- /dev/null +++ b/catbuffer-generators/.gitmodules @@ -0,0 +1,3 @@ +[submodule "catbuffer"] + path = catbuffer + url = https://github.com/nemtech/catbuffer.git diff --git a/catbuffer-generators/.pycodestyle b/catbuffer-generators/.pycodestyle new file mode 100644 index 00000000..caa456f8 --- /dev/null +++ b/catbuffer-generators/.pycodestyle @@ -0,0 +1,2 @@ +[pycodestyle] +max-line-length = 140 diff --git a/catbuffer-generators/.pylintrc b/catbuffer-generators/.pylintrc new file mode 100644 index 00000000..f4817710 --- /dev/null +++ b/catbuffer-generators/.pylintrc @@ -0,0 +1,438 @@ +[MASTER] + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code +extension-pkg-whitelist= + +# Add files or directories to the blacklist. They should be base names, not +# paths. +ignore=CVS + +# Add files or directories matching the regex patterns to the blacklist. The +# regex matches against base names, not paths. +ignore-patterns= + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +#init-hook= + +# Use multiple processes to speed up Pylint. +jobs=1 + +# List of plugins (as comma separated values of python modules names) to load, +# usually to register additional checkers. +load-plugins= + +# Pickle collected data for later comparisons. +persistent=yes + +# Specify a configuration file. +#rcfile= + +# Allow loading of arbitrary C extensions. Extensions are imported into the +# active Python interpreter and may run arbitrary code. +unsafe-load-any-extension=no + + +[MESSAGES CONTROL] + +# Only show warnings with the listed confidence levels. Leave empty to show +# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED +confidence= + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once).You can also use "--disable=all" to +# disable everything first and then reenable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use"--disable=all --enable=classes +# --disable=W" +#disable=print-statement,parameter-unpacking,unpacking-in-except,old-raise-syntax,backtick,long-suffix,old-ne-operator,old-octal-literal,import-star-module-level,raw-checker-failed,bad-inline-option,locally-disabled,locally-enabled,file-ignored,suppressed-message,useless-suppression,deprecated-pragma,apply-builtin,basestring-builtin,buffer-builtin,cmp-builtin,coerce-builtin,execfile-builtin,file-builtin,long-builtin,raw_input-builtin,reduce-builtin,standarderror-builtin,unicode-builtin,xrange-builtin,coerce-method,delslice-method,getslice-method,setslice-method,no-absolute-import,old-division,dict-iter-method,dict-view-method,next-method-called,metaclass-assignment,indexing-exception,raising-string,reload-builtin,oct-method,hex-method,nonzero-method,cmp-method,input-builtin,round-builtin,intern-builtin,unichr-builtin,map-builtin-not-iterating,zip-builtin-not-iterating,range-builtin-not-iterating,filter-builtin-not-iterating,using-cmp-argument,eq-without-hash,div-method,idiv-method,rdiv-method,exception-message-attribute,invalid-str-codec,sys-max-int,bad-python3-import,deprecated-string-function,deprecated-str-translate-call +#disable=missing-docstring,misplaced-comparison-constant + +# Disable the message(s) with the given id(s). +# C0111 = Missing docstring +# C0122 = Misplaced comparison constant +# C0103 = Invalid name +# R0201 = Method could be a function +# R0902 = Too many instance attributes +# R0903 = Too few public methods +# R0913 = Too many arguments +# R0914 = Too many local variables +# W0105 = String statement has no effect +disable=C0111,C0122,C0103,R0201,R0902,R0903,R0913,R0914,W0105 + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where +# it should appear only once). See also the "--disable" option for examples. +enable= + + +[REPORTS] + +# Python expression which should return a note less than 10 (10 is the highest +# note). You have access to the variables errors warning, statement which +# respectively contain the number of errors / warnings messages and the total +# number of statements analyzed. This is used by the global evaluation report +# (RP0004). +evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details +#msg-template= + +# Set the output format. Available formats are text, parseable, colorized, json +# and msvs (visual studio).You can also give a reporter class, eg +# mypackage.mymodule.MyReporterClass. +output-format=text + +# Tells whether to display a full report or only the messages +reports=no + +# Activate the evaluation score. +score=yes + + +[REFACTORING] + +# Maximum number of nested blocks for function / method body +max-nested-blocks=5 + + +[BASIC] + +# Naming hint for argument names +argument-name-hint=(([a-z][a-zA-Z0-9_]{2,30})|(_[a-z0-9_]*))$ + +# Regular expression matching correct argument names +argument-rgx=(([a-z][a-zA-Z0-9_]{2,50})|(_[a-z0-9_]*))$ + +# Naming hint for attribute names +attr-name-hint=(([a-z][a-zA-Z0-9_]{2,50})|(_[a-z0-9_]*))$ + +# Regular expression matching correct attribute names +attr-rgx=(([a-z][a-zA-Z0-9_]{2,50})|(_[a-z0-9_]*))$ + +# Bad variable names which should always be refused, separated by a comma +bad-names=foo,bar,baz,toto,tutu,tata + +# Naming hint for class attribute names +class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ + +# Regular expression matching correct class attribute names +class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ + +# Naming hint for class names +class-name-hint=[A-Z_][a-zA-Z0-9]+$ + +# Regular expression matching correct class names +class-rgx=[A-Z_][a-zA-Z0-9]+$ + +# Naming hint for constant names +const-name-hint=(([A-Z_][A-Z0-9_]*)|(t_[A-Z0-9_]+)|(__.*__))$ + +# Regular expression matching correct constant names +const-rgx=(([A-Z_][A-Z0-9_]*)|(t_[A-Z0-9_]+)|(__.*__))$ + +# Minimum line length for functions/classes that require docstrings, shorter +# ones are exempt. +docstring-min-length=100 + +# Naming hint for function names +function-name-hint=(([a-z][a-zA-Z0-9_]{2,30})|(_[a-z0-9_]*))$ + +# Regular expression matching correct function names +function-rgx=(([a-z][a-zA-Z0-9_]{2,30})|(_[a-z0-9_]*))$ + +# Good variable names which should always be accepted, separated by a comma +good-names=i,j,k,ex,Run,_ + +# Include a hint for the correct naming format with invalid-name +include-naming-hint=no + +# Naming hint for inline iteration names +inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$ + +# Regular expression matching correct inline iteration names +inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ + +# Naming hint for method names +method-name-hint=(([a-z][a-zA-Z0-9_]{2,50})|(_[a-zA-Z0-9_]*))$ + +# Regular expression matching correct method names +method-rgx=(([a-z][a-zA-Z0-9_]{2,50})|(_[a-zA-Z0-9_]*))$ + +# Naming hint for module names +module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ + +# Regular expression matching correct module names +module-rgx=([A-Za-z][a-zA-Z0-9]+)$ + +# Colon-delimited sets of names that determine each other's naming style when +# the name regexes allow several styles. +name-group= + +# Regular expression which should only match function or class names that do +# not require a docstring. +no-docstring-rgx=. + +# List of decorators that produce properties, such as abc.abstractproperty. Add +# to this list to register other decorators that produce valid properties. +property-classes=abc.abstractproperty + +# Naming hint for variable names +variable-name-hint=(([a-z][a-zA-Z0-9_]{2,50})|(_[a-z0-9_]*))$ + +# Regular expression matching correct variable names +variable-rgx=(([a-z][a-zA-Z0-9_]{2,50})|(_[a-z0-9_]*))$ + + +[FORMAT] + +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +expected-line-ending-format= + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines=^\s*(# )??$ + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren=4 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + +# Maximum number of characters on a single line. +max-line-length=140 + +# Maximum number of lines in a module +max-module-lines=1000 + +# List of optional constructs for which whitespace checking is disabled. `dict- +# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. +# `trailing-comma` allows a space between comma and closing bracket: (a, ). +# `empty-line` allows space-only lines. +no-space-check=trailing-comma,dict-separator + +# Allow the body of a class to be on the same line as the declaration if body +# contains single statement. +single-line-class-stmt=no + +# Allow the body of an if to be on the same line as the test if there is no +# else. +single-line-if-stmt=no + + +[LOGGING] + +# Logging modules to check that the string format arguments are in logging +# function parameter format +logging-modules=logging + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME,XXX,TODO + + +[SIMILARITIES] + +# Ignore comments when computing similarities. +ignore-comments=yes + +# Ignore docstrings when computing similarities. +ignore-docstrings=yes + +# Ignore imports when computing similarities. +ignore-imports=no + +# Minimum lines number of a similarity. +min-similarity-lines=100 + + +[SPELLING] + +# Spelling dictionary name. Available dictionaries: none. To make it working +# install python-enchant package. +spelling-dict= + +# List of comma separated words that should not be checked. +spelling-ignore-words= + +# A path to a file that contains private dictionary; one word per line. +spelling-private-dict-file= + +# Tells whether to store unknown words to indicated private dictionary in +# --spelling-private-dict-file option instead of raising a message. +spelling-store-unknown-words=no + + +[TYPECHECK] + +# List of decorators that produce context managers, such as +# contextlib.contextmanager. Add to this list to register other decorators that +# produce valid context managers. +contextmanager-decorators=contextlib.contextmanager + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E1101 when accessed. Python regular +# expressions are accepted. +generated-members= + +# Tells whether missing members accessed in mixin class should be ignored. A +# mixin class is detected if its name ends with "mixin" (case insensitive). +ignore-mixin-members=yes + +# This flag controls whether pylint should warn about no-member and similar +# checks whenever an opaque object is returned when inferring. The inference +# can return multiple potential results while evaluating a Python object, but +# some branches might not be evaluated, which results in partial inference. In +# that case, it might be useful to still emit no-member and other checks for +# the rest of the inferred objects. +ignore-on-opaque-inference=yes + +# List of class names for which member attributes should not be checked (useful +# for classes with dynamically set attributes). This supports the use of +# qualified names. +ignored-classes=optparse.Values,thread._local,_thread._local + +# List of module names for which member attributes should not be checked +# (useful for modules/projects where namespaces are manipulated during runtime +# and thus existing member attributes cannot be deduced by static analysis. It +# supports qualified module names, as well as Unix pattern matching. +ignored-modules= + +# Show a hint with possible names when a member name was not found. The aspect +# of finding the hint is based on edit distance. +missing-member-hint=yes + +# The minimum edit distance a name should have in order to be considered a +# similar match for a missing member name. +missing-member-hint-distance=1 + +# The total number of similar names that should be taken in consideration when +# showing a hint for a missing member. +missing-member-max-choices=1 + + +[VARIABLES] + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid to define new builtins when possible. +additional-builtins= + +# Tells whether unused global variables should be treated as a violation. +allow-global-unused-variables=yes + +# List of strings which can identify a callback function by name. A callback +# name must start or end with one of those strings. +callbacks=cb_,_cb + +# A regular expression matching the name of dummy variables (i.e. expectedly +# not used). +dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ + +# Argument names that match this expression will be ignored. Default to name +# with leading underscore +ignored-argument-names=_.*|^ignored_|^unused_ + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# List of qualified module names which can have objects that can redefine +# builtins. +redefining-builtins-modules=six.moves,future.builtins + + +[CLASSES] + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__,__new__,setUp + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected=_asdict,_fields,_replace,_source,_make + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=mcs + + +[DESIGN] + +# Maximum number of arguments for function / method +max-args=5 + +# Maximum number of attributes for a class (see R0902). +max-attributes=8 + +# Maximum number of boolean expressions in a if statement +max-bool-expr=5 + +# Maximum number of branch for function / method body +max-branches=12 + +# Maximum number of locals for function / method body +max-locals=15 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + +# Maximum number of return / yield for function / method body +max-returns=6 + +# Maximum number of statements in function / method body +max-statements=50 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=2 + + +[IMPORTS] + +# Allow wildcard imports from modules that define __all__. +allow-wildcard-with-all=no + +# Analyse import fallback blocks. This can be used to support both Python 2 and +# 3 compatible code, which means that the block might have code that exists +# only in one or another interpreter, leading to false positives when analysed. +analyse-fallback-blocks=no + +# Deprecated modules which should not be used, separated by a comma +deprecated-modules=optparse,tkinter.tix + +# Create a graph of external dependencies in the given file (report RP0402 must +# not be disabled) +ext-import-graph= + +# Create a graph of every (i.e. internal and external) dependencies in the +# given file (report RP0402 must not be disabled) +import-graph= + +# Create a graph of internal dependencies in the given file (report RP0402 must +# not be disabled) +int-import-graph= + +# Force import order to recognize a module as part of the standard +# compatibility libraries. +known-standard-library= + +# Force import order to recognize a module as part of a third party library. +known-third-party=enchant + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when being caught. Defaults to +# "Exception" +overgeneral-exceptions=Exception diff --git a/catbuffer-generators/.travis.yml b/catbuffer-generators/.travis.yml new file mode 100644 index 00000000..dc920493 --- /dev/null +++ b/catbuffer-generators/.travis.yml @@ -0,0 +1,81 @@ +language: python +python: + - '3.7' +addons: + apt: + packages: + - openjdk-8-jdk +before_cache: + - rm -f $HOME/.gradle/caches/modules-2/modules-2.lock + - rm -fr $HOME/.gradle/caches/*/plugin-resolution/ +cache: + directories: + - "$HOME/.gradle/caches/" + - "$HOME/.gradle/wrapper/" + - "$HOME/.cache/pip" + - "$HOME/.npm" +install: + - pip install -r requirements.txt +env: + global: + - DEV_BRANCH=dev + - RELEASE_BRANCH=main + - POST_RELEASE_BRANCH=main + - RELEASE_MESSAGE=release +before_script: + - export PYTHONPATH=$PYTHONPATH:./catbuffer + - nvm install --lts + - node --version +script: + - . ./travis/travis-functions.sh + - validate_env_variables +jobs: + include: + - stage: test + name: pylint + script: pylint --load-plugins pylint_quotes generators + - name: pycodestyle + script: pycodestyle --config=.pycodestyle . + + - name: java + script: ./scripts/generate_java.sh + - name: typescript + script: ./scripts/generate_typescript.sh + - name: python + script: ./scripts/generate_python.sh + + - stage: alpha + name: java publish alpha + script: ./scripts/generate_java.sh publish + if: branch = env(DEV_BRANCH) AND type = push + - name: typescript publish alpha + script: ./scripts/generate_typescript.sh publish + if: branch = env(DEV_BRANCH) AND type = push + - name: python publish alpha + script: ./scripts/generate_python.sh publish + if: branch = env(DEV_BRANCH) AND type = push + + - stage: release + name: java publish release + script: ./scripts/generate_java.sh release + if: branch = env(RELEASE_BRANCH) AND type = api AND commit_message = env(RELEASE_MESSAGE) + - name: typescript publish release + script: ./scripts/generate_typescript.sh release + if: branch = env(RELEASE_BRANCH) AND type = api AND commit_message = env(RELEASE_MESSAGE) + - name: python publish release + script: ./scripts/generate_python.sh release + if: branch = env(RELEASE_BRANCH) AND type = api AND commit_message = env(RELEASE_MESSAGE) + + - stage: post release + name: tag and version upgrade + script: /bin/bash travis/travis-functions.sh post_release_version_file + if: branch = env(RELEASE_BRANCH) AND type = api AND commit_message = env(RELEASE_MESSAGE) + +before_install: + - | + if [ -z "${signingKeyId}" ]; then + echo "No signing the artifacts" + else + echo "Signing artifacts" + openssl aes-256-cbc -K $encrypted_37d6c1a7ee80_key -iv $encrypted_37d6c1a7ee80_iv -in travis/symbol-sdk-java.gpg.enc -out symbol-sdk-java.gpg -d + fi diff --git a/catbuffer-generators/CONTRIBUTING.md b/catbuffer-generators/CONTRIBUTING.md new file mode 100644 index 00000000..31c4785b --- /dev/null +++ b/catbuffer-generators/CONTRIBUTING.md @@ -0,0 +1,49 @@ +# Contributing to catbuffer-generators + +As explained in the [README](README.md) file, this project consists of a set of code generators that serialize and deserialize Catapult entities (Transactions, Types, ...). You can use this project to obtain classes that will help you deal with Catapult entities in their binary form from your language of choice. + +If the language you are interested in is not covered by the project, you can add a new generator by following this guide. + +## The Generators + +The [Catapult Server](https://github.com/nemtech/catapult-server) manages a number of entities (e.g. transactions) which need to be stored in binary form when communicated over a network. The binary layout of these entities is described using **catbuffer** files (Catapult Buffer files with ``.cat`` extension), stored in the [catbuffer](https://github.com/nemtech/catbuffer) repository, inside the ``schemas`` folder. + +The generators in this project read catbuffer schema files and produce the necessary files in the target language to serialize and deserialize them. + +Each generator is a Python class residing in the ``generators`` folder and listed in ``generators/All.py``. Most of them use [Mako templates](https://www.makotemplates.org/) so the boilerplate code is abstracted to common classes. + +> **NOTE:** +> Some generators like ``javascript`` and ``cpp_builder`` are still manually built and do not use templates. They are in the process of being adapted to the new mechanism. **Do not use them as examples to build your own generators!** + +Generators are invoked by name from the ``catbuffer/main.py``, after pointing the ``PYTHONPATH`` environment variable to the ``catbuffer-generators`` folder. + +For instance, if you're already in the ``catbuffer-generators`` folder, you can see the list of available generators by running: + +```bash +PYTHONPATH=. python3 catbuffer/main.py +``` + +So, to use the java generator: + +```bash +PYTHONPATH=. python3 catbuffer/main.py -g java +``` + +You can take a look at the ``scripts`` folder to see more invocation details of ``main.py``. + +## Adding a New Generator + +Unfortunately, the process to add a new generator is not automated yet: + +1. Copy the ``java`` folder inside ``generators`` and rename the folder and the files inside to the desired language. + +2. Edit ``JavaFileGenerator.py`` and ``JavaHelper.py`` (now renamed) to use the proper language name and file extension. + +3. Edit all files inside the ``templates`` folder to adapt to the selected language. Use the Java version for inspiration and make sure you know how [Mako templates](https://www.makotemplates.org/) work. + +4. Add your new generator to the global register in ``generators/All.py``. + +Once the generator is ready you should be able to invoke it using ``catbuffer/main.py`` as shown above. Create a helper script like the ones in the ``scripts`` folder to automate building and deploying the new serializer classes! + +> **NOTE:** +> The ``VectorTest`` file contains unit tests for the generator. Add your own tests to the new generator and run them from a script in the ``scripts`` folder. See how ``generate_typescrpt.sh`` executes ``npm run test``, for example. diff --git a/catbuffer-generators/HEADER.inc b/catbuffer-generators/HEADER.inc new file mode 100644 index 00000000..243a6f9a --- /dev/null +++ b/catbuffer-generators/HEADER.inc @@ -0,0 +1,20 @@ +/** +*** Copyright (c) 2016-2019, Jaguar0625, gimre, BloodyRookie, Tech Bureau, Corp. +*** Copyright (c) 2020-present, Jaguar0625, gimre, BloodyRookie. +*** +*** This file is part of Catapult. +*** +*** Catapult is free software: you can redistribute it and/or modify +*** it under the terms of the GNU Lesser General Public License as published by +*** the Free Software Foundation, either version 3 of the License, or +*** (at your option) any later version. +*** +*** Catapult is distributed in the hope that it will be useful, +*** but WITHOUT ANY WARRANTY; without even the implied warranty of +*** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +*** GNU Lesser General Public License for more details. +*** +*** You should have received a copy of the GNU Lesser General Public License +*** along with Catapult. If not, see . +**/ + diff --git a/catbuffer-generators/LICENSE b/catbuffer-generators/LICENSE new file mode 100644 index 00000000..ab602974 --- /dev/null +++ b/catbuffer-generators/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2018 + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/catbuffer-generators/README.md b/catbuffer-generators/README.md new file mode 100644 index 00000000..8c83c6ce --- /dev/null +++ b/catbuffer-generators/README.md @@ -0,0 +1,63 @@ +# catbuffer-generators + +[![Build Status](https://api.travis-ci.com/nemtech/catbuffer-generators.svg?branch=main)](https://travis-ci.com/nemtech/catbuffer-generators) + +Set of code generators to serialize and deserialize Catapult entities in different programming languages. + +In combination with the [catbuffer](https://github.com/nemtech/catbuffer) project, developers can generate builder classes for a given set of programming languages. For example, the [Symbol SDKs](https://nemtech.github.io/sdk) use the generated code to operate with the entities in binary form before announcing them to the network. + +## Supported programming languages + +- C++ +- Java +- TypeScript/JavaScript +- Python + +## Requirements + +- Python >= 3.4 + +## Installation + +1. Clone the ``catbuffer-generators`` repository: + +```bash +git clone --recurse-submodules https://github.com/nemtech/catbuffer-generators +``` + +This will also clone the ``catbuffer`` repository as a submodule. + +2. Install the package requirements: + +```bash +cd catbuffer-generators +pip install -r requirements.txt +``` + +## Usage + +Use the ``scripts/generate_all.sh`` script to create code for the different languages. For example: + +```bash +scripts/generate_all.sh cpp_builder +``` + +This processes every schema and writes the output files in the ``catbuffer/_generated/`` folder. + +Alternatively, you can use any of the language-specific scripts like ``scripts/generate_typescript.sh``. Most of these scripts, after producing the code will compile it into an output artifact in the ``build`` folder. + +> **NOTE:** +> These scripts require Bash 4 or higher. + +### Run the linter + +```bash +pylint --load-plugins pylint_quotes generators test/python +pycodestyle --config=.pycodestyle . +``` + +> **NOTE:** +> This requires Python 3.7 or higher. + +Copyright (c) 2016-2019, Jaguar0625, gimre, BloodyRookie, Tech Bureau, Corp. +Copyright (c) 2020-present, Jaguar0625, gimre, BloodyRookie. diff --git a/catbuffer-generators/generators/All.py b/catbuffer-generators/generators/All.py new file mode 100644 index 00000000..68c761fa --- /dev/null +++ b/catbuffer-generators/generators/All.py @@ -0,0 +1,11 @@ +from generators.cpp_builder.BuilderGenerator import BuilderGenerator +from generators.java.JavaFileGenerator import JavaFileGenerator +from generators.typescript.TypescriptFileGenerator import TypescriptFileGenerator +from generators.python.PythonFileGenerator import PythonFileGenerator + +AVAILABLE_GENERATORS = { + 'cpp_builder': BuilderGenerator, + 'java': JavaFileGenerator, + 'typescript': TypescriptFileGenerator, + 'python': PythonFileGenerator +} diff --git a/catbuffer-generators/generators/Descriptor.py b/catbuffer-generators/generators/Descriptor.py new file mode 100644 index 00000000..b6bc5ab0 --- /dev/null +++ b/catbuffer-generators/generators/Descriptor.py @@ -0,0 +1,3 @@ +from collections import namedtuple + +Descriptor = namedtuple('Descriptor', ['filename', 'code']) diff --git a/catbuffer-generators/generators/common/FileGenerator.py b/catbuffer-generators/generators/common/FileGenerator.py new file mode 100644 index 00000000..fc6272d8 --- /dev/null +++ b/catbuffer-generators/generators/common/FileGenerator.py @@ -0,0 +1,145 @@ +import os +from abc import ABC, abstractmethod + +from generators.Descriptor import Descriptor +from generators.common.MakoClassGenerator import MakoClassGenerator +from generators.common.MakoEnumGenerator import MakoEnumGenerator +from generators.common.MakoStaticClassGenerator import MakoStaticClassGenerator +from generators.common.MakoTypeGenerator import MakoTypeGenerator + + +class FileGenerator(ABC): + """ + Generic top level file generator. A language will extend this class defining how to create the different generators. + """ + + def __init__(self, schema, options): + self.schema = schema + self.current = None + self.options = options + + def __iter__(self): + self.current = self.generate() + return self + + def __next__(self): + return next(self.current) + + def generate(self): + """ + Main entry point for the generator. It collects all the possible file generators and execute + them producing different files. + :return: multiple Descriptors using yield. + """ + helper = self.create_helper() + generators = [] + for type_name, class_schema in self.schema.items(): + attribute_type = class_schema['type'] + if helper.is_byte_type(attribute_type): + generators.extend(self.create_type_generators(helper, type_name, class_schema)) + elif helper.is_enum_type(attribute_type): + generators.extend(self.create_enum_generators(helper, type_name, class_schema)) + elif helper.is_struct_type(attribute_type) and helper.should_generate_class(type_name): + generators.extend(self.create_class_generators(helper, type_name, class_schema)) + # write all the helper files + for filename in self.get_static_templates_file_names(): + generators.extend(self.create_static_class_generators(filename, helper)) + for generator in generators: + code = self.init_code() + code += generator.generate() + yield Descriptor(generator.get_generated_file_name(), code) + + def init_code(self): + """ + :return: a brand new memory file with the license if provided. + """ + copyright_file = self.options['copyright'] + code = [] + if os.path.isfile(copyright_file): + with open(copyright_file) as header: + code = [line.strip() for line in header] + return code + + def create_static_class_generators(self, filename, helper): + """ + It creates the generators for a static generator. By default creates one generator by file (like .java or .ts) + Note that other languages may need more than one (like .cpp and .h) + :param filename: the filename + :param helper: the language helper + :return: a list of generator, one by default using mako templates + """ + return [MakoStaticClassGenerator(self.get_template_path() + filename + '.mako', + filename + self.get_main_file_extension(), helper, + self.schema, None)] + + def create_class_generators(self, helper, type_name, class_schema): + """ + Creates the generators for given class type. By default creates one generator by file (like .java or .ts) + Note that other languages may need more than one (like .cpp and .h) + + :param helper: the language helper + :param type_name: the type name. + :param class_schema: the schema of the currency class + :return: a list of generator, one by default using mako templates + """ + return [MakoClassGenerator(helper, type_name, self.schema, class_schema, self.get_template_path(), + self.get_main_file_extension())] + + def create_enum_generators(self, helper, type_name, class_schema): + """ + + Creates the generators for given enum type. By default creates one generator by file (like .java or .ts) + Note that other languages may need more than one (like .cpp and .h) + + :param helper: the language helper + :param type_name: the type name. + :param class_schema: the schema of the currency class + :return: a list of generator, one by default using mako templates + """ + return [MakoEnumGenerator(helper, type_name, self.schema, class_schema, self.get_template_path(), + self.get_main_file_extension())] + + def create_type_generators(self, helper, type_name, class_schema): + """ + + Creates the generators for given atomic type. By default creates one generator by file (like .java or .ts) + Note that other languages may need more than one (like .cpp and .h) + + :param helper: the language helper + :param type_name: the type name. + :param class_schema: the schema of the currency class + :return: a list of generator, one by default using mako templates + """ + return [MakoTypeGenerator(helper, type_name, self.schema, class_schema, self.get_template_path(), + self.get_main_file_extension())] + + @abstractmethod + def get_template_path(self): + """ + + :return: the path where the language templates will be find. It needs to be redefined if Mako genertors are used. + """ + raise NotImplementedError('get_template_path must be defined in subclass') + + @abstractmethod + def get_main_file_extension(self): + """ + + :return: the extension of the generated files. Example: '.java' + """ + raise NotImplementedError('get_main_file_extension must be defined in subclass') + + @abstractmethod + def create_helper(self): + """ + + :return: the language helper. Subclasses would override this method returning a subclass of Helper. + """ + raise NotImplementedError('create_helper must be defined in subclass') + + def get_static_templates_file_names(self): + """ + + :return: a list of known static (GeneratorUtils for example) to be generated. Most languages would override this. + """ + return [] diff --git a/catbuffer-generators/generators/common/Helper.py b/catbuffer-generators/generators/common/Helper.py new file mode 100644 index 00000000..7f87242e --- /dev/null +++ b/catbuffer-generators/generators/common/Helper.py @@ -0,0 +1,252 @@ +from enum import Enum + +from abc import ABC, abstractmethod + + +# pylint: disable=too-many-public-methods + + +class TypeDescriptorType(Enum): + """Type descriptor enum""" + Byte = 'byte' + Struct = 'struct' + Enum = 'enum' + + +class TypeDescriptorDisposition(Enum): + Inline = 'inline' + Const = 'const' + Fill = 'fill' + Var = 'var' + + +class AttributeKind(Enum): + """Attribute type enum""" + SIMPLE = 1 + BUFFER = 2 + ARRAY = 3 + CUSTOM = 4 + FLAGS = 5 + SIZE_FIELD = 6 + FILL_ARRAY = 7 + VAR_ARRAY = 8 + UNKNOWN = 100 + + +class Helper(ABC): + """ + Helper stateless methods used when generating templates. Most languages would extend this object. + """ + + def __init__(self): + # a shortcut for the templates to access the AttributeKind type. + self.AttributeKind = AttributeKind + + @staticmethod + def is_struct_type(typename): + return typename == TypeDescriptorType.Struct.value + + @staticmethod + def is_enum_type(typename): + return typename == TypeDescriptorType.Enum.value + + @staticmethod + def is_byte_type(typename): + return typename == TypeDescriptorType.Byte.value + + @staticmethod + def resolve_alignment(a): + embedded = a.attribute is not None and 'type' in a.attribute and a.attribute[ + 'type'] == 'EmbeddedTransaction' + parent_embedded = a.parent_attribute is not None and 'type' in a.parent_attribute and a.parent_attribute[ + 'type'] == 'EmbeddedTransaction' + if embedded or parent_embedded: + return 8 + return 0 + + @staticmethod + def is_inline_type(attribute): + return 'disposition' in attribute and attribute['disposition'] == TypeDescriptorDisposition.Inline.value + + @staticmethod + def is_const_type(attribute): + return 'disposition' in attribute and attribute['disposition'] == TypeDescriptorDisposition.Const.value + + @staticmethod + def is_fill_array_type(attribute): + return 'disposition' in attribute and attribute['disposition'] == TypeDescriptorDisposition.Fill.value + + @staticmethod + def is_var_array_type(attribute): + return 'disposition' in attribute and attribute['disposition'] == TypeDescriptorDisposition.Var.value + + @staticmethod + def is_any_array_kind(attribute_kind): + return attribute_kind in (AttributeKind.ARRAY, AttributeKind.VAR_ARRAY, AttributeKind.FILL_ARRAY) + + @staticmethod + def is_sorted_array(attribute): + return 'sort_key' in attribute + + @staticmethod + def is_reserved_field(attribute): + return 'name' in attribute and '_Reserved' in attribute['name'] and 'size' in attribute + + @staticmethod + def is_conditional_attribute(attribute): + return 'condition' in attribute + + @staticmethod + def is_attribute_count_size_field(attribute, class_attributes): + if class_attributes is None: + return False + attribute_name = attribute['name'] + is_size_of_class_attributes = list( + filter(lambda a: 'size' in a and a['size'] == attribute_name, class_attributes)) + return len(is_size_of_class_attributes) == 1 + + @staticmethod + def should_generate_class(name): + # subclassees may override this method if the language is not ready to generate all the classes + # I need to exclude due to the ReceiptBuilder hack of not serializing the size + # Also, SizePrefixedEntity needs to go first, not VerifiableEntity or EntityBody the way we handle super classes. + return name not in ('SizePrefixedEntity', 'VerifiableEntity', 'EntityBody', 'EmbeddedTransactionHeader', + 'TransactionHeader') + # return True + + @staticmethod + def should_use_super_class(): + # if true, first inline is super class, the rest are inline builders + # if false, there is no super class, all inline attributes use inline builders. + return True + + @staticmethod + def add_required_import(required_import: set, + import_type, + class_name, + base_class_name # pylint: disable=unused-argument + ): + if not import_type == class_name: + required_import.add(import_type) + return required_import + + @staticmethod + def get_all_constructor_params(attributes): + return [a for a in attributes if not a.kind == AttributeKind.SIZE_FIELD and a.attribute_name != 'size'] + + def get_generated_class_name(self, typename, class_schema, schema): + class_type = class_schema['type'] + default_name = typename + 'Dto' + if self.is_byte_type(class_type) or self.is_enum_type(class_type) or typename not in schema: + return default_name + return typename + 'Builder' if self.is_struct_type(schema[typename]['type']) else default_name + + def is_builtin_type(self, typename, size): + # byte up to long are passed as 'byte' with size set to proper value + return not isinstance(size, str) and self.is_byte_type(typename) and size <= 8 + + def get_attribute_size(self, schema, attribute): + if 'size' not in attribute and not self.is_byte_type(attribute['type']) and not self.is_enum_type( + attribute['type']): + attr = schema[attribute['type']] + if 'size' in attr: + return attr['size'] + return 1 + return attribute['size'] + + @staticmethod + def get_base_type(schema: dict, attribute_type): + attribute: dict = schema.get(attribute_type) + if attribute is not None: + return attribute.get('type') + return None + + @staticmethod + def is_flags_enum(attribute_type): + return attribute_type.endswith('Flags') + + @staticmethod + def is_inline_class(attribute): + return 'disposition' in attribute and attribute['disposition'] == TypeDescriptorDisposition.Inline.value + + @staticmethod + def capitalize_first_character(string): + return string if not string else string[0].upper() + string[1:] + + @staticmethod + def decapitalize_first_character(string): + return string if not string else string[0].lower() + string[1:] + + @staticmethod + def snake_case(string: str): + return string if not string else string[0] + ''.join('_' + x if x.isupper() else x for x in string[1:]) + + # pylint: disable=R0911 + def get_attribute_kind(self, attribute, class_attributes): + if self.is_var_array_type(attribute): + return AttributeKind.VAR_ARRAY + if self.is_fill_array_type(attribute): + return AttributeKind.FILL_ARRAY + if self.is_inline_class(attribute): + return AttributeKind.CUSTOM + if self.is_attribute_count_size_field(attribute, class_attributes): + return AttributeKind.SIZE_FIELD + + attribute_type = attribute['type'] + + if self.is_flags_enum(attribute_type): + return AttributeKind.FLAGS + + if self.is_struct_type(attribute_type) or self.is_enum_type(attribute_type) or 'size' not in attribute: + return AttributeKind.CUSTOM + + attribute_size = attribute['size'] + + if isinstance(attribute_size, str): + if attribute_type == 'byte': + return AttributeKind.BUFFER + return AttributeKind.ARRAY + + if isinstance(attribute_size, int) and not attribute_type == 'byte': + return AttributeKind.ARRAY + + if self.is_builtin_type(attribute_type, attribute_size): + return AttributeKind.SIMPLE + + return AttributeKind.BUFFER + + def get_attribute_property_equal(self, schema, attributes, attribute_name, attribute_value, recurse=True): + for attribute in attributes: + if attribute_name in attribute and attribute[attribute_name] == attribute_value: + return attribute + if (recurse and 'disposition' in attribute and + attribute['disposition'] == TypeDescriptorDisposition.Inline.value): + value = self.get_attribute_property_equal(schema, schema[attribute['type']]['layout'], attribute_name, + attribute_value) + if value is not None: + return value + return None + + def get_name_from_type(self, type_name: str): + return self.decapitalize_first_character(type_name) + + @staticmethod + def get_comment_from_name(name): + return name[0].upper() + ''.join(' ' + x.lower() if x.isupper() else x for x in name[1:]) + + def get_comments_from_attribute(self, attribute): + comment = attribute['comments'].strip() if 'comments' in attribute else '' + if not comment and 'name' in attribute: + comment = self.get_comment_from_name(attribute['name']) + return comment + + def create_enum_name(self, name: str): + return self.snake_case(name).upper() + + @abstractmethod + def get_builtin_type(self, size): + raise NotImplementedError('get_builtin_type must be overridden') + + @abstractmethod + def get_generated_type(self, schema, attribute, attribute_kind): + raise NotImplementedError('get_generated_type must be overridden') diff --git a/catbuffer-generators/generators/common/MakoClassGenerator.py b/catbuffer-generators/generators/common/MakoClassGenerator.py new file mode 100644 index 00000000..377b19e6 --- /dev/null +++ b/catbuffer-generators/generators/common/MakoClassGenerator.py @@ -0,0 +1,187 @@ +from collections import namedtuple +from itertools import chain +from typing import List + +from generators.common.Helper import TypeDescriptorDisposition +from .MakoStaticClassGenerator import MakoStaticClassGenerator + +AttributeData = namedtuple('AttributeData', + ['attribute', 'kind', 'attribute_name', 'attribute_comment', 'attribute_base_type', + 'attribute_var_type', 'attribute_is_final', 'attribute_class_name', 'attribute_is_super', + 'attribute_size', 'attribute_is_conditional', 'attribute_aggregate_attribute_name', + 'attribute_is_reserved', 'attribute_aggregate_class', 'attribute_is_inline', + 'attribute_is_aggregate', 'parent_attribute', 'condition_type_attribute', + 'attribute_condition_value', 'attribute_condition_provide', + 'conditional_read_before']) + + +class MakoClassGenerator(MakoStaticClassGenerator): + """ + Generic Mako generator for class type schemas. + """ + + def __init__(self, helper, name, schema, class_schema, template_path, file_extension): + super().__init__(template_path + 'Class.mako', + helper.get_generated_class_name(name, class_schema, schema) + file_extension, + helper, + schema, + class_schema) + class_schema['name'] = name[0].lower() + name[1:] + self.required_import = set() + self.name = name + self.attributes = [] + self.generated_class_name = helper.get_generated_class_name(name, class_schema, schema) + self.base_class_name = None + self.generated_base_class_name = None + if self.helper.should_use_super_class(): + self.foreach_attributes(self.class_schema['layout'], self._find_base_callback) + self.comments = helper.get_comments_from_attribute(self.class_schema) + self._recurse_foreach_attribute(self.name, self._add_attribute) + self.body_class_name = helper.get_body_class_name(self.name) + + condition_types = [(a, schema[a.condition_type_attribute['type']]) for a in self.attributes if + a.attribute_is_conditional and a.attribute['condition_operation'] != 'has'] + condition_types_values = self._calculate_constructor_options(condition_types) + + self.all_constructor_params = helper.get_all_constructor_params(self.attributes) + # not a.attribute_is_aggregate + self.constructor_attributes = [self.all_constructor_params] if not condition_types else [ + self.constructor_arguments(self.all_constructor_params, condition_type) for condition_type in + condition_types_values] + + @staticmethod + def _calculate_constructor_options(condition_types): + if not condition_types: + return [] + condition_types_values = [[(a, value['name']) + for value in schema_type['values']] for (a, schema_type) in condition_types] + condition_types_values = list(chain.from_iterable(condition_types_values)) + condition_types_values = [ + (a.condition_type_attribute['name'], a.attribute['condition_value'], value) for + (a, value) in condition_types_values] + + with_values = {a_condition_value for (a_condition_name, a_condition_value, conditional_value) in + condition_types_values} + + condition_types_values = {(a_condition_name, + conditional_value if conditional_value in with_values else None, + conditional_value) for + (a_condition_name, a_condition_value, conditional_value) in + condition_types_values} + return condition_types_values + + def _recurse_foreach_attribute(self, class_name: str, callback, aggregate_attribute=None, deep=0): + print(str('\t' * deep) + '- ' + class_name) + class_generated = (class_name != self.name and self.helper.should_generate_class(class_name)) + class_attributes = self.schema[class_name]['layout'] + for attribute in class_attributes: + if class_generated: + attribute['aggregate_class'] = class_name + if 'disposition' in attribute: + if attribute['disposition'] == TypeDescriptorDisposition.Inline.value: + attribute['name'] = self.helper.decapitalize_first_character(attribute['type']) + aggregate_class_is_generated = self.helper.should_generate_class(attribute['type']) + # Is the aggregate class generated? + if aggregate_class_is_generated: + print(str('\t ' * (deep + 1)) + ' ' + attribute['name']) + callback(attribute, class_attributes, aggregate_attribute) + new_aggregate_attribute = attribute if aggregate_attribute is None and aggregate_class_is_generated \ + else aggregate_attribute + self._recurse_foreach_attribute(attribute['type'], self._add_attribute, + new_aggregate_attribute, + deep + 1) + elif attribute['disposition'] == TypeDescriptorDisposition.Const.value: + continue + elif self.helper.is_var_array_type(attribute) or self.helper.is_fill_array_type(attribute): + print(str('\t ' * (deep + 1)) + ' ' + attribute['name']) + callback(attribute, class_attributes, aggregate_attribute) + continue + else: + print(str('\t ' * (deep + 1)) + ' ' + attribute['name']) + callback(attribute, class_attributes, aggregate_attribute) + + def _add_attribute(self, attribute, class_attributes, aggregate_attribute): + aggregate_attribute_name = aggregate_attribute['name'] if aggregate_attribute else None + aggregate_attribute_type = aggregate_attribute['type'] if aggregate_attribute else None + kind = self.helper.get_attribute_kind(attribute, class_attributes) + attribute_is_conditional = self.helper.is_conditional_attribute(attribute) + attribute_comment = self.helper.get_comments_from_attribute(attribute) + attribute_name = attribute['name'] + attribute_size = self.helper.get_attribute_size(self.schema, attribute) + attribute_var_type = self.helper.get_generated_type(self.schema, attribute, kind) + attribute_is_final = attribute_name != 'size' and not attribute_is_conditional + attribute_type = attribute.get('type', None) + attribute_base_type = self.helper.get_base_type(self.schema, attribute_type) + attribute_class_name = self.helper.get_generated_class_name(attribute_type, attribute, self.schema) + attribute_aggregate_attribute_name = aggregate_attribute_name + attribute_is_aggregate = self.helper.is_inline_class(attribute) + attribute_is_super = self.base_class_name is not None and self.base_class_name == aggregate_attribute_type + if attribute_is_aggregate and self.base_class_name is not None: + attribute_is_super = attribute_type == self.base_class_name + attribute_is_reserved = self.helper.is_reserved_field(attribute) + attribute_is_inline = not attribute_is_super and aggregate_attribute_name is not None + attribute_aggregate_class = attribute.get('aggregate_class', None) + self.required_import = self.helper.add_required_import(self.required_import, + attribute_var_type, + self.generated_class_name, + self.generated_base_class_name) + if attribute_is_conditional: + condition_type_attribute = self.helper.get_attribute_property_equal(self.schema, + self.class_schema['layout'], 'name', + attribute['condition']) + else: + condition_type_attribute = None + + parent_attribute = self.helper.get_attribute_property_equal(self.schema, self.class_schema['layout'], 'size', + attribute_name) + conditional_read_before: bool = False + if 'condition' in attribute: + conditional_read_before = len( + [a1 for a1 in self.attributes if a1.attribute_name == attribute['condition']]) == 0 + + attribute_tuple = AttributeData(attribute, kind, attribute_name, + attribute_comment, attribute_base_type, attribute_var_type, + attribute_is_final, attribute_class_name, + attribute_is_super, attribute_size, attribute_is_conditional, + attribute_aggregate_attribute_name, attribute_is_reserved, + attribute_aggregate_class, + attribute_is_inline, attribute_is_aggregate, parent_attribute, + condition_type_attribute, None, True, conditional_read_before) + self.attributes.append(attribute_tuple) + + def _find_base_callback(self, attribute): + if self.helper.is_inline_class(attribute) and self.helper.should_generate_class(attribute['type']): + self.base_class_name = attribute['type'] + self.generated_base_class_name = self.helper.get_generated_class_name(self.base_class_name, + self.schema[self.base_class_name], + self.schema) + return True + return False + + def foreach_attributes(self, attributes, callback): + for attribute in attributes: + if callback(attribute): + break + + def constructor_arguments(self, constructor_params: List[AttributeData], condition_type_and_value): + return [self.set_default_argument(a, condition_type_and_value) for a in constructor_params] + + def set_default_argument(self, a: AttributeData, condition_type_and_value) -> AttributeData: + + (a_condition_name, a_condition_value, conditional_value) = condition_type_and_value + + if a.attribute_name == a_condition_name: + return a._replace(attribute_condition_value=conditional_value) + + if self.should_not_provide_argument(a, a_condition_name, a_condition_value): + return a._replace(attribute_condition_provide=False) + return a + + def should_not_provide_argument(self, a: AttributeData, a_condition, a_condition_value): + if 'condition' not in a.attribute: + return False + if a.attribute['condition'] != a_condition: + return False + if a.attribute['condition_value'] == a_condition_value: + return False + return True diff --git a/catbuffer-generators/generators/common/MakoEnumGenerator.py b/catbuffer-generators/generators/common/MakoEnumGenerator.py new file mode 100644 index 00000000..364c3371 --- /dev/null +++ b/catbuffer-generators/generators/common/MakoEnumGenerator.py @@ -0,0 +1,41 @@ +from generators.common.Helper import TypeDescriptorDisposition +from .MakoStaticClassGenerator import MakoStaticClassGenerator + + +class MakoEnumGenerator(MakoStaticClassGenerator): + """ + Generic Mako generator for enum type schemas. + """ + + def __init__(self, helper, name: str, schema, class_schema, template_path: str, file_extension: str): + super().__init__(template_path + 'Enum.mako', + helper.get_generated_class_name(name, class_schema, schema) + file_extension, + helper, + schema, + class_schema) + self.name = name + self.enum_values = {} + self.size = self.class_schema['size'] + self.enum_type = helper.get_builtin_type(self.size) + self.generated_class_name = helper.get_generated_class_name(name, class_schema, schema) + self._add_enum_values(self.class_schema) + self.comments = helper.get_comments_from_attribute(self.class_schema) + self.is_flag = helper.is_flags_enum(self.name) + for type_descriptor, entity_schema in self.schema.items(): + if 'layout' in entity_schema: + for attribute in entity_schema['layout']: + if attribute.get('disposition', None) == TypeDescriptorDisposition.Const.value and attribute.get( + 'type', None) == self.name: + enum_name = type_descriptor + enum_comment = self.helper.get_comment_from_name(enum_name) + enum_value = attribute['value'] + self._add_enum_value(enum_name, enum_value, enum_comment) + + def _add_enum_values(self, enum_attribute): + enum_attribute_values = enum_attribute['values'] + for current_attribute in enum_attribute_values: + self._add_enum_value(current_attribute['name'], current_attribute['value'], + self.helper.get_comments_from_attribute(current_attribute)) + + def _add_enum_value(self, name, value, comments): + self.enum_values[self.helper.create_enum_name(name)] = [value, comments] diff --git a/catbuffer-generators/generators/common/MakoStaticClassGenerator.py b/catbuffer-generators/generators/common/MakoStaticClassGenerator.py new file mode 100644 index 00000000..efabe7fa --- /dev/null +++ b/catbuffer-generators/generators/common/MakoStaticClassGenerator.py @@ -0,0 +1,44 @@ +from inspect import getframeinfo, currentframe +from os.path import dirname, abspath, realpath, join + +from mako.template import Template + + +class MakoStaticClassGenerator: + """ + Generic Mako generator. + Note that the mako context has 2 main objects. + - "genertor" with this object keeping all the known state + - "helper" with the language helper methods. + """ + + def __init__(self, template_file_name, generated_file_name, helper, schema, class_schema): + self.template_file_name = template_file_name + self.generated_file_name = generated_file_name + self.class_output = [] + self.schema = schema + self.class_schema = class_schema + self.helper = helper + + def _get_full_file_name(self): + filename = getframeinfo(currentframe()).filename + path = dirname(realpath(abspath(filename))) + return join(path, self.template_file_name) + + def _read_file(self): + full_file_name = self._get_full_file_name() + fileTemplate = Template(filename=full_file_name) + self.class_output += [fileTemplate.render(generator=self, helper=self.helper)] + + def generate(self): + self._read_file() + return self.class_output + + def log_context(self): + description = '' + for key in filter(lambda a: not a.startswith('_'), dir(self)): + description = description + key + ' = \'' + str(getattr(self, key)) + '\'\n' + return description + + def get_generated_file_name(self): + return self.generated_file_name diff --git a/catbuffer-generators/generators/common/MakoTypeGenerator.py b/catbuffer-generators/generators/common/MakoTypeGenerator.py new file mode 100644 index 00000000..34b6cb07 --- /dev/null +++ b/catbuffer-generators/generators/common/MakoTypeGenerator.py @@ -0,0 +1,22 @@ +from generators.common.Helper import AttributeKind +from .MakoStaticClassGenerator import MakoStaticClassGenerator + + +class MakoTypeGenerator(MakoStaticClassGenerator): + """ + Generic Mako generator for atomic type schemas. + """ + + def __init__(self, helper, name: str, schema, class_schema, template_path: str, file_extension: str): + super().__init__(template_path + 'Type.mako', + helper.get_generated_class_name(name, class_schema, schema) + file_extension, helper, schema, + class_schema) + class_schema['name'] = name[0].lower() + name[1:] + self.name = name + self.attribute_name = self.class_schema['name'] + self.size = self.class_schema['size'] + self.generated_class_name = helper.get_generated_class_name(name, class_schema, schema) + self.attribute_kind = helper.get_attribute_kind(self.class_schema, None) + self.attribute_type = helper.get_generated_type(self.schema, self.class_schema, self.attribute_kind) + self.comments = helper.get_comments_from_attribute(self.class_schema) + self.AttributeKind = AttributeKind diff --git a/catbuffer-generators/generators/cpp_builder/BuilderGenerator.py b/catbuffer-generators/generators/cpp_builder/BuilderGenerator.py new file mode 100644 index 00000000..3cc1362e --- /dev/null +++ b/catbuffer-generators/generators/cpp_builder/BuilderGenerator.py @@ -0,0 +1,43 @@ +# pylint: disable=too-few-public-methods +from generators.Descriptor import Descriptor +from .HeaderGenerator import HeaderGenerator +from .ImplementationGenerator import ImplementationGenerator + + +class BuilderGenerator: + """Cpp transaction builder generator, creates both header and implementation file""" + def __init__(self, schema, options): + self.schema = schema + self.options = options + self.current = None + self.generated_header = False + self.current_name = None + + def __iter__(self): + """Creates an iterator around this generator""" + self.current = iter(self.schema) + self.generated_header = False + return self + + def _iterate_until_next_transaction(self): + if self.generated_header: + return None + + name = next(self.current) + while name == 'Transaction' or name.startswith('Embedded') or not name.endswith('Transaction'): + name = next(self.current) + return name + + def __next__(self): + """Returns Descriptor with desired filename and generated file content""" + if not self.generated_header: + self.current_name = self._iterate_until_next_transaction() + generator = HeaderGenerator(self.schema, self.options, self.current_name) + code = generator.generate() + self.generated_header = True + return Descriptor('{}.h'.format(generator.builder_name()), code) + + generator = ImplementationGenerator(self.schema, self.options, self.current_name) + code = generator.generate() + self.generated_header = False + return Descriptor('{}.cpp'.format(generator.builder_name()), code) diff --git a/catbuffer-generators/generators/cpp_builder/CppGenerator.py b/catbuffer-generators/generators/cpp_builder/CppGenerator.py new file mode 100644 index 00000000..ebadd866 --- /dev/null +++ b/catbuffer-generators/generators/cpp_builder/CppGenerator.py @@ -0,0 +1,310 @@ +# pylint: disable=too-few-public-methods +from abc import ABC, abstractmethod +from enum import Enum +import os +import re +import yaml + +SUFFIX = 'Transaction' + + +class FieldKind(Enum): + SIMPLE = 1 + BUFFER = 2 + VECTOR = 3 + UNKNOWN = 100 + + +def tokenize(string): + return re.findall('[A-Z][^A-Z]*', string) + + +def join_lower(strings): + return ' '.join([string.lower() for string in strings]) + + +def uncapitalize(string): + return string[0].lower() + string[1:] if string else string + + +# note that string.capitalize also lowers [1:] +def capitalize(string): + return string[0].upper() + string[1:] if string else string + + +def singularize(string): + if string.endswith('ies'): + return string[:-3] + 'y' + + if string.endswith('es'): + return string[:-2] + + if string.endswith('s'): + return string[:-1] + + return string + + +class GeneratorInterface(ABC): + @abstractmethod + def _add_includes(self): + raise NotImplementedError('need to override method') + + @abstractmethod + def _class_header(self): + raise NotImplementedError('need to override method') + + @abstractmethod + def _generate_setter(self, field_kind, field, full_setter_name, param_name): + raise NotImplementedError('need to override method') + + @abstractmethod + def _generate_field(self, field_kind, field, builder_field_typename): + raise NotImplementedError('need to override method') + + @abstractmethod + def _builds(self): + raise NotImplementedError('need to override method') + + @abstractmethod + def _class_footer(self): + raise NotImplementedError('need to override method') + + +# FP from pylint, this is semi-abstract class +# pylint: disable=abstract-method +class CppGenerator(GeneratorInterface): + def __init__(self, schema, options, name): + super(CppGenerator, self).__init__() + self.schema = schema + self.code = [] + self.transaction_name = name + self.replacements = { + 'TRANSACTION_NAME': self.transaction_name, + 'BUILDER_NAME': self.builder_name(), + 'COMMENT_NAME': self.written_name(), + 'COMMENT_NAME_A_OR_AN': 'an' if self.written_name().startswith(('a', 'e', 'i', 'o', 'u')) else 'a' + } + + self.indent = 0 + self.hints = CppGenerator._load_hints(['includes', 'namespaces', 'plugin', 'rewrites', 'setters'])[self.transaction_name] + self.prepend_copyright(options['copyright']) + + @staticmethod + def _load_hints(filenames): + all_hints = {} + for filename in filenames: + with open('generators/cpp_builder/hints/{0}.yaml'.format(filename)) as input_file: + hints = yaml.load(input_file, Loader=yaml.SafeLoader) + for hint_key in hints: + if hint_key not in all_hints: + all_hints[hint_key] = {} + + all_hints[hint_key][filename] = hints.get(hint_key) + + return all_hints + + def transaction_body_name(self): + return '{}Body'.format(self.transaction_name) + + def builder_name(self): + return '{}Builder'.format(self.transaction_name[:-len(SUFFIX)]) + + def written_name(self): + return join_lower(tokenize(self.transaction_name[:-len(SUFFIX)])) + + def prepend_copyright(self, copyright_file): + if os.path.isfile(copyright_file): + with open(copyright_file) as header: + self.code = [line.strip() for line in header] + + def generate(self): + self._add_includes() + self._namespace_start() + self.indent = 1 + self._class_header() + self._setters() + self._builds() + self._privates() + self._class_footer() + self.indent = 0 + self._namespace_end() + + return self.code + + # region helpers + + def _get_namespace(self, typename): + namespace = self.hints['namespaces'].get(typename, '') if 'namespaces' in self.hints else '' + if namespace: + namespace += '::' + + return namespace + + def append(self, multiline_string, additional_replacements=None): + for line in re.split(r'\n', multiline_string): + # indent non-empty lines + if line: + replacements = {**self.replacements, **additional_replacements} if additional_replacements else self.replacements + self.code.append('\t' * self.indent + line.format(**replacements)) + else: + self.code.append('') + + def qualified_type(self, typename): + namespace = self._get_namespace(typename) + return namespace + typename + + @staticmethod + def _is_builtin_type(typename, size): + # uint8_t up to uint64_t are passed as 'byte' with size set to proper value + return 'byte' == typename and size <= 8 + + @staticmethod + def _builtin_type(size, signedness): + builtin_types = {1: 'int8_t', 2: 'int16_t', 4: 'int32_t', 8: 'int64_t'} + builtin_type = builtin_types[size] + return builtin_type if signedness == 'signed' else 'u' + builtin_type + + def param_type(self, typename, size, signedness): + if not isinstance(size, str) and size > 0 and self._is_builtin_type(typename, size): + return self._builtin_type(size, signedness) + + # if type is simple pass by value, otherwise pass by reference + type_descriptor = self.schema[typename] + qualified_typename = self.qualified_type(typename) + + if 'byte' == type_descriptor['type'] and type_descriptor['size'] <= 8: + return qualified_typename + + if 'enum' == type_descriptor['type']: + return qualified_typename + + return 'const {}&'.format(qualified_typename) + + def _get_schema_field(self, field_name): + return next(field for field in self.schema[self.transaction_body_name()]['layout'] if field['name'] == field_name) + + @staticmethod + def method_name(prefix, param_name): + return '{PREFIX}{CAPITALIZED_PARAM_NAME}'.format(PREFIX=prefix, CAPITALIZED_PARAM_NAME=capitalize(param_name)) + + @staticmethod + def full_method_name(prefix, typename, param_name): + method_name = CppGenerator.method_name(prefix, param_name) + return '{METHOD_NAME}({TYPE_NAME} {PARAM_NAME})'.format(METHOD_NAME=method_name, TYPE_NAME=typename, PARAM_NAME=param_name) + + # endregion + + # region generate sub-methods + + def _namespace_start(self): + self.append('namespace catapult {{ namespace builders {{') + self.append('') + + def _setters(self): + self._foreach_builder_field(self._generate_setter_proxy) + + def _privates(self): + self._foreach_builder_field(self._generate_field_proxy) + + def _namespace_end(self): + self.append('}}}}') + + # endregion + + # region internals + + def _foreach_builder_field(self, callback): + for field in self.schema[self.transaction_body_name()]['layout']: + # for builder fields, skip Size or count fields, they are always used for variable data + name = field['name'] + if name.endswith('Size') or name.endswith('Count') or '_Reserved' in name: + continue + + callback(field) + + def _get_simple_setter_name_desc(self, field): + """sample: void setRemoteAccountKey(const Key& remoteAccountKey)""" + param_type = self.param_type(field['type'], field.get('size', 0), field.get('signedness', '')) + param_name = field['name'] + return 'set', param_type, param_name + + @staticmethod + def _get_buffer_setter_name_desc(field): + """sample: void setMessage(const RawBuffer& message)""" + assert 'byte' == field['type'] + param_type = 'const RawBuffer&' + param_name = field['name'] + return 'set', param_type, param_name + + def _get_vector_setter_name_desc(self, field): + """sample: void addMosaic(const Mosaic& mosaic)""" + param_type = self.param_type(field['type'], field.get('size', 0), field.get('signedness', '')) + param_name = singularize(field['name']) + return 'add', param_type, param_name + + def _get_setter_name_desc(self, field_kind, field): + getters = { + FieldKind.SIMPLE: self._get_simple_setter_name_desc, + FieldKind.BUFFER: self._get_buffer_setter_name_desc, + FieldKind.VECTOR: self._get_vector_setter_name_desc + } + return getters[field_kind](field) + + @staticmethod + def _get_field_kind(field): + if 'size' not in field: + return FieldKind.SIMPLE + + # if raw uint type treat as SIMPLE (uint8_t - uint64_t) + if not isinstance(field['size'], str) and 'byte' == field['type'] and field['size'] <= 8: + return FieldKind.SIMPLE + + if field['size'].endswith('Size'): + return FieldKind.BUFFER + + if field['size'].endswith('Count'): + return FieldKind.VECTOR + + return FieldKind.UNKNOWN + + def _contains_any_field_kind(self, field_kind): + for field in self.schema[self.transaction_body_name()]['layout']: + if field_kind == CppGenerator._get_field_kind(field): + return True + + return False + + def _contains_any_other_field_kind(self, field_kind): + for field in self.schema[self.transaction_body_name()]['layout']: + if field_kind != CppGenerator._get_field_kind(field): + return True + + return False + + def _generate_setter_proxy(self, field): + suppress_setter = self.hints['setters'].get(field['name'], '') if 'setters' in self.hints else '' + if suppress_setter: + return + + field_kind = CppGenerator._get_field_kind(field) + prefix, param_type, param_name = self._get_setter_name_desc(field_kind, field) + full_setter_name = CppGenerator.full_method_name(prefix, param_type, param_name) + self._generate_setter(field_kind, field, full_setter_name, param_name) + + def _generate_field_proxy(self, field): + field_kind = CppGenerator._get_field_kind(field) + field_type = field['type'] + if 'size' in field and not isinstance(field['size'], str) and self._is_builtin_type(field['type'], field['size']): + field_type = self._builtin_type(field['size'], field['signedness']) + + qualified_typename = self.qualified_type(field_type) + types = { + FieldKind.SIMPLE: '{TYPE}', + FieldKind.BUFFER: 'std::vector', + FieldKind.VECTOR: 'std::vector<{TYPE}>' + } + builder_field_typename = types[field_kind].format(TYPE=qualified_typename) + self._generate_field(field_kind, field, builder_field_typename) + + # endregion diff --git a/catbuffer-generators/generators/cpp_builder/HeaderGenerator.py b/catbuffer-generators/generators/cpp_builder/HeaderGenerator.py new file mode 100644 index 00000000..d1ae468b --- /dev/null +++ b/catbuffer-generators/generators/cpp_builder/HeaderGenerator.py @@ -0,0 +1,104 @@ +from .CppGenerator import CppGenerator, FieldKind, capitalize + +# note: part of formatting happens in CppGenerator, so whenever literal brace needs +# to be produced, it needs to be doubled here + + +class HeaderGenerator(CppGenerator): + def _add_includes(self): + self.append('''#pragma once +#include "TransactionBuilder.h" +#include "plugins/txes/{PLUGIN}/src/model/{{TRANSACTION_NAME}}.h"'''.format(PLUGIN=self.hints['plugin'])) + + if self._contains_any_field_kind(FieldKind.VECTOR): + self.append('#include ') + + self.append('') + + def _class_header(self): + self.append('/// Builder for {COMMENT_NAME_A_OR_AN} {COMMENT_NAME} transaction.') + self.append('class {BUILDER_NAME} : public TransactionBuilder {{') + self.append('public:') + + self.indent += 1 + self.append('using Transaction = model::{TRANSACTION_NAME};') + self.append('using EmbeddedTransaction = model::Embedded{TRANSACTION_NAME};') + self.append('') + + self.indent -= 1 + self.append('public:') + + self.indent += 1 + self.append('/// Creates {COMMENT_NAME_A_OR_AN} {COMMENT_NAME} builder for building' + + ' {COMMENT_NAME_A_OR_AN} {COMMENT_NAME} transaction from \\a signer') + self.append('/// for the network specified by \\a networkIdentifier.') + self.append('{BUILDER_NAME}(model::NetworkIdentifier networkIdentifier, const Key& signer);') + self.append('') + + self.indent -= 1 + + @staticmethod + def _format_bound(field): + return ' and {} to `{}`'.format(field['condition'], field['condition_value']) + + def _add_comment(self, field_kind, field, param_name): + comments = { + FieldKind.SIMPLE: 'Sets the {COMMENT} to \\a {NAME}{BOUND}.', + FieldKind.BUFFER: 'Sets the {COMMENT} to \\a {NAME}.', + FieldKind.VECTOR: 'Adds \\a {NAME} to {COMMENT}.' + } + bound_msg = '' + if 'condition' in field: + bound_msg = HeaderGenerator._format_bound(field) + + comment_parts = field['comments'].split(' \\note ') + self.append('/// ' + comments[field_kind].format(COMMENT=comment_parts[0], NAME=param_name, BOUND=bound_msg)) + for comment_note in comment_parts[1:]: + self.append('/// \\note {0}.'.format(capitalize(comment_note))) + + def _generate_setter(self, field_kind, field, full_setter_name, param_name): + self._add_comment(field_kind, field, param_name) + self.append('void {};\n'.format(full_setter_name)) + + def _setters(self): + self.append('public:') + self.indent += 1 + super(HeaderGenerator, self)._setters() + self.indent -= 1 + + def _builds(self): + self.append('public:') + self.indent += 1 + self.append('''/// Gets the size of {COMMENT_NAME} transaction. +/// \\note This returns size of a normal transaction not embedded transaction. +size_t size() const; + +/// Builds a new {COMMENT_NAME} transaction. +std::unique_ptr build() const; + +/// Builds a new embedded {COMMENT_NAME} transaction. +std::unique_ptr buildEmbedded() const; +''') + self.indent -= 1 + + self.append('private:') + self.indent += 1 + self.append('''template +size_t sizeImpl() const; + +template +std::unique_ptr buildImpl() const; +''') + self.indent -= 1 + + def _generate_field(self, field_kind, field, builder_field_typename): + self.append('{TYPE} m_{NAME};'.format(TYPE=builder_field_typename, NAME=field['name'])) + + def _privates(self): + self.append('private:') + self.indent += 1 + super(HeaderGenerator, self)._privates() + self.indent -= 1 + + def _class_footer(self): + self.append('}};') diff --git a/catbuffer-generators/generators/cpp_builder/ImplementationGenerator.py b/catbuffer-generators/generators/cpp_builder/ImplementationGenerator.py new file mode 100644 index 00000000..c3208078 --- /dev/null +++ b/catbuffer-generators/generators/cpp_builder/ImplementationGenerator.py @@ -0,0 +1,196 @@ +from .CppGenerator import CppGenerator, FieldKind, capitalize + +SUFFIX = 'Transaction' + + +class ImplementationGenerator(CppGenerator): + def _add_includes(self): + self.append('#include "{BUILDER_NAME}.h"') + + if 'includes' in self.hints: + for include in self.hints['includes']: + self.append('#include "{0}"'.format(include)) + + self.append('') + + def _class_header(self): + self.append('{BUILDER_NAME}::{BUILDER_NAME}(model::NetworkIdentifier networkIdentifier, const Key& signer)') + self.indent += 2 + self.append(': TransactionBuilder(networkIdentifier, signer)') + self._foreach_builder_field(self._generate_field_initializer_list_entry) + self.indent -= 2 + self.append('{{}}') + self.append('') + + def _generate_call_to_setter_for_bound_field(self, condition_field_name, condition_value): + field = self._get_schema_field(condition_field_name) + field_kind = CppGenerator._get_field_kind(field) + _, param_type, param_name = self._get_setter_name_desc(field_kind, field) + return 'm_{NAME} = {TYPE_NAME}::{VALUE};'.format(NAME=param_name, TYPE_NAME=param_type, VALUE=condition_value) + + def _generate_setter(self, field_kind, field, full_setter_name, param_name): + self.append('void {BUILDER_NAME}::' + full_setter_name + ' {{') + self.indent += 1 + if field_kind == FieldKind.SIMPLE: + self.append('m_{NAME} = {NAME};'.format(NAME=param_name)) + if 'condition' in field: + call_line = self._generate_call_to_setter_for_bound_field(field['condition'], capitalize(field['condition_value'])) + self.append(call_line) + elif field_kind == FieldKind.BUFFER: + self.append('''if (0 == {NAME}.Size) +\tCATAPULT_THROW_INVALID_ARGUMENT("argument `{NAME}` cannot be empty"); + +if (!m_{NAME}.empty()) +\tCATAPULT_THROW_RUNTIME_ERROR("`{NAME}` field already set"); + +m_{NAME}.resize({NAME}.Size); +m_{NAME}.assign({NAME}.pData, {NAME}.pData + {NAME}.Size);'''.format(NAME=param_name)) + else: + if 'sort_key' in field: + format_string = 'InsertSorted(m_{FIELD}, {PARAM}, [](const auto& lhs, const auto& rhs) {{{{' + self.append(format_string.format(FIELD=field['name'], PARAM=param_name)) + self.indent += 1 + self.append('return lhs.{SORT_KEY} < rhs.{SORT_KEY};'.format(SORT_KEY=capitalize(field['sort_key']))) + self.indent -= 1 + self.append('}});') + else: + self.append('m_{FIELD}.push_back({PARAM});'.format(FIELD=field['name'], PARAM=param_name)) + self.indent -= 1 + self.append('}}\n') + + def _generate_field(self, field_kind, field, builder_field_typename): + pass + + def _generate_field_initializer_list_entry(self, field): + self.append(', m_{NAME}()'.format(NAME=field['name'])) + + def _generate_build_variable_fields_size(self, variable_sizes, field): + field_kind = CppGenerator._get_field_kind(field) + formatted_vector_size = 'm_{NAME}.size()'.format(NAME=field['name']) + if field_kind == FieldKind.BUFFER: + self.append('size += {};'.format(formatted_vector_size)) + elif field_kind == FieldKind.VECTOR: + qualified_typename = self.qualified_type(field['type']) + formatted_size = '{ARRAY_SIZE} * sizeof({TYPE})'.format(ARRAY_SIZE=formatted_vector_size, TYPE=qualified_typename) + self.append('size += {};'.format(formatted_size)) + + if field_kind != FieldKind.SIMPLE: + variable_sizes[field['size']] = formatted_vector_size + + def _generate_transaction_field_name(self, name): + field_name = capitalize(name) + rewritten = self.hints['rewrites'].get(field_name, '') if 'rewrites' in self.hints else '' + return rewritten or field_name + + def _generate_build_variable_fields(self, field): + field_kind = CppGenerator._get_field_kind(field) + if field_kind == FieldKind.SIMPLE: + return + + template = {'NAME': field['name'], 'TX_FIELD_NAME': self._generate_transaction_field_name(field['name'])} + if field_kind in (FieldKind.BUFFER, FieldKind.VECTOR): + self.append('std::copy(m_{NAME}.cbegin(), m_{NAME}.cend(), pTransaction->{TX_FIELD_NAME}Ptr());'.format(**template)) + + @staticmethod + def byte_size_to_type_name(size): + return {1: 'uint8_t', 2: 'uint16_t', 4: 'uint32_t', '8': 'uint64_t'}[size] + + def _generate_condition(self, condition_field_name, condition_value): + field = self._get_schema_field(condition_field_name) + field_kind = CppGenerator._get_field_kind(field) + _, param_type, _ = self._get_setter_name_desc(field_kind, field) + return 'if ({TYPE_NAME}::{VALUE} == m_{NAME})'.format(TYPE_NAME=param_type, VALUE=capitalize(condition_value), NAME=field['name']) + + def _generate_build(self, variable_sizes): + self.append('template') + self.append('std::unique_ptr {BUILDER_NAME}::buildImpl() const {{') + self.indent += 1 + + self.append('// 1. allocate, zero (header), set model::Transaction fields') + self.append('auto pTransaction = createTransaction(sizeImpl());') + self.append('') + + self.append('// 2. set fixed transaction fields') + + # set non-variadic fields + for field in self.schema[self.transaction_body_name()]['layout']: + template = {'NAME': field['name'], 'TX_FIELD_NAME': self._generate_transaction_field_name(field['name'])} + if field['name'].endswith('Size') or field['name'].endswith('Count'): + size = variable_sizes[field['name']] + size_type = ImplementationGenerator.byte_size_to_type_name(field['size']) + format_string = 'pTransaction->{TX_FIELD_NAME} = utils::checked_cast({SIZE});' + self.append(format_string.format(**template, SIZE_TYPE=size_type, SIZE=size)) + else: + field_kind = CppGenerator._get_field_kind(field) + if field_kind == FieldKind.SIMPLE: + if 'condition' in field: + condition = self._generate_condition(field['condition'], field['condition_value']) + self.append(condition) + self.indent += 1 + + # if setter has been suppressed, fill in with what is defined in setters.yaml hint file + setter = self.hints['setters'].get(field['name'], '') if 'setters' in self.hints else '' + if setter: + self.append('pTransaction->{TX_FIELD_NAME} = {SETTER};'.format(**template, SETTER=setter)) + elif '_Reserved' in field['name']: + self.append('pTransaction->{TX_FIELD_NAME} = 0;'.format(**template)) + else: + self.append('pTransaction->{TX_FIELD_NAME} = m_{NAME};'.format(**template)) + + if 'condition' in field: + self.indent -= 1 + self.append('') + + # variadic fields are defined at the end of schema, + # so break if loop reached any of them + else: + break + + self.append('') + + if self._contains_any_other_field_kind(FieldKind.SIMPLE): + self.append('// 3. set transaction attachments') + self._foreach_builder_field(self._generate_build_variable_fields) + + # variable fields that expand to conditional statement will append a blank line, so, if one is present, don't add another + if '' != self.code[-1]: + self.append('') + + self.append('return pTransaction;') + self.indent -= 1 + self.append('}}') + + def _generate_size(self): + self.append('template') + self.append('size_t {BUILDER_NAME}::sizeImpl() const {{') + self.indent += 1 + self.append('// calculate transaction size') + self.append('auto size = sizeof(TransactionType);') + + # go through variable data and add it to size, collect sizes + variable_sizes = {} + self._foreach_builder_field(lambda field: self._generate_build_variable_fields_size(variable_sizes, field)) + + self.append('return size;') + self.indent -= 1 + self.append('}}\n') + return variable_sizes + + def _builds(self): + self.append('''size_t {BUILDER_NAME}::size() const {{ +\treturn sizeImpl(); +}} + +std::unique_ptr<{BUILDER_NAME}::Transaction> {BUILDER_NAME}::build() const {{ +\treturn buildImpl(); +}} + +std::unique_ptr<{BUILDER_NAME}::EmbeddedTransaction> {BUILDER_NAME}::buildEmbedded() const {{ +\treturn buildImpl(); +}} +''') + variable_sizes = self._generate_size() + self._generate_build(variable_sizes) + + def _class_footer(self): + pass diff --git a/catbuffer-generators/generators/cpp_builder/hints/includes.yaml b/catbuffer-generators/generators/cpp_builder/hints/includes.yaml new file mode 100644 index 00000000..37dc4f60 --- /dev/null +++ b/catbuffer-generators/generators/cpp_builder/hints/includes.yaml @@ -0,0 +1,7 @@ +# custom setters may require additional includes + +MosaicDefinitionTransaction: + - plugins/txes/mosaic/src/model/MosaicIdGenerator.h + +NamespaceRegistrationTransaction: + - plugins/txes/namespace/src/model/NamespaceIdGenerator.h diff --git a/catbuffer-generators/generators/cpp_builder/hints/namespaces.yaml b/catbuffer-generators/generators/cpp_builder/hints/namespaces.yaml new file mode 100644 index 00000000..92d071bf --- /dev/null +++ b/catbuffer-generators/generators/cpp_builder/hints/namespaces.yaml @@ -0,0 +1,56 @@ +# some transactions use types that need to be namespace-qualified in c++ builders + +VotingKeyLinkTransaction: + LinkAction: model + +VrfKeyLinkTransaction: + LinkAction: model + +AddressAliasTransaction: + AliasAction: model + +MosaicAliasTransaction: + AliasAction: model + +AccountKeyLinkTransaction: + LinkAction: model + +NodeKeyLinkTransaction: + LinkAction: model + +HashLockTransaction: + UnresolvedMosaic: model + +MosaicDefinitionTransaction: + MosaicFlags: model + MosaicProperty: model + +MosaicSupplyChangeTransaction: + MosaicSupplyChangeAction: model + +SecretLockTransaction: + LockHashAlgorithm: model + UnresolvedMosaic: model + +SecretProofTransaction: + LockHashAlgorithm: model + +TransferTransaction: + Mosaic: model + UnresolvedMosaic: model + +NamespaceRegistrationTransaction: + NamespaceRegistrationType: model + +AccountAddressRestrictionTransaction: + AccountRestrictionFlags: model + +AccountMosaicRestrictionTransaction: + AccountRestrictionFlags: model + +AccountOperationRestrictionTransaction: + AccountRestrictionFlags: model + EntityType: model + +MosaicGlobalRestrictionTransaction: + MosaicRestrictionType: model diff --git a/catbuffer-generators/generators/cpp_builder/hints/plugin.yaml b/catbuffer-generators/generators/cpp_builder/hints/plugin.yaml new file mode 100644 index 00000000..84a41a1d --- /dev/null +++ b/catbuffer-generators/generators/cpp_builder/hints/plugin.yaml @@ -0,0 +1,37 @@ +# transaction plugin name needs to be known in order to produce correct include paths in c++ builders + +VotingKeyLinkTransaction: ../coresystem +VrfKeyLinkTransaction: ../coresystem + +AccountKeyLinkTransaction: account_link +NodeKeyLinkTransaction: account_link + +AggregateCompleteTransaction: aggregate +AggregateBondedTransaction: aggregate + +HashLockTransaction: lock_hash + +SecretLockTransaction: lock_secret +SecretProofTransaction: lock_secret + +AccountMetadataTransaction: metadata +MosaicMetadataTransaction: metadata +NamespaceMetadataTransaction: metadata + +MosaicDefinitionTransaction: mosaic +MosaicSupplyChangeTransaction: mosaic + +MultisigAccountModificationTransaction: multisig + +AddressAliasTransaction: namespace +MosaicAliasTransaction: namespace +NamespaceRegistrationTransaction: namespace + +AccountAddressRestrictionTransaction: restriction_account +AccountMosaicRestrictionTransaction: restriction_account +AccountOperationRestrictionTransaction: restriction_account + +MosaicAddressRestrictionTransaction: restriction_mosaic +MosaicGlobalRestrictionTransaction: restriction_mosaic + +TransferTransaction: transfer diff --git a/catbuffer-generators/generators/cpp_builder/hints/rewrites.yaml b/catbuffer-generators/generators/cpp_builder/hints/rewrites.yaml new file mode 100644 index 00000000..ec44290e --- /dev/null +++ b/catbuffer-generators/generators/cpp_builder/hints/rewrites.yaml @@ -0,0 +1,3 @@ +# rewrite transaction field access + +placeholder: diff --git a/catbuffer-generators/generators/cpp_builder/hints/setters.yaml b/catbuffer-generators/generators/cpp_builder/hints/setters.yaml new file mode 100644 index 00000000..047b54bc --- /dev/null +++ b/catbuffer-generators/generators/cpp_builder/hints/setters.yaml @@ -0,0 +1,13 @@ +# 1. suppress generation of setters for field listed in cats file +# 2. when setting the field in builder replace with specified formula +# note, currently only fields with kind SIMPLE are supported + +MosaicDefinitionTransaction: + id: model::GenerateMosaicId(model::GetSignerAddress(*pTransaction), m_nonce) + +NamespaceRegistrationTransaction: + # disable setter for discriminator + registrationType: m_registrationType + + # need to use quoted string with escape characters to break setter onto multiple lines to avoid line length warning + id: "model::GenerateNamespaceId(m_parentId, {{ reinterpret_cast(m_name.data()), m_name.size() }})" diff --git a/catbuffer-generators/generators/java/JavaFileGenerator.py b/catbuffer-generators/generators/java/JavaFileGenerator.py new file mode 100644 index 00000000..6f66cfea --- /dev/null +++ b/catbuffer-generators/generators/java/JavaFileGenerator.py @@ -0,0 +1,25 @@ +from generators.common.FileGenerator import FileGenerator +from .JavaHelper import JavaHelper + + +class JavaFileGenerator(FileGenerator): + """Java file generator""" + + def init_code(self): + code = super().init_code() + code += ['package io.nem.symbol.catapult.builders;'] + [''] + return code + + def get_template_path(self): + return '../java/templates/' + + def get_static_templates_file_names(self): + return ['BitMaskable', 'GeneratorUtils', 'AggregateTransactionBodyBuilder', 'TransactionBuilderHelper', + 'EmbeddedTransactionBuilderHelper', + 'Serializer'] + + def get_main_file_extension(self): + return '.java' + + def create_helper(self): + return JavaHelper() diff --git a/catbuffer-generators/generators/java/JavaHelper.py b/catbuffer-generators/generators/java/JavaHelper.py new file mode 100644 index 00000000..be364ad6 --- /dev/null +++ b/catbuffer-generators/generators/java/JavaHelper.py @@ -0,0 +1,74 @@ +from generators.common.Helper import Helper, AttributeKind + + +class JavaHelper(Helper): + + def get_body_class_name(self, name): + body_name = name if not name.startswith('Embedded') else name[8:] + if name.startswith('Aggregate') and any(name.endswith(postfix) for postfix in ('Transaction', 'TransactionV1')): + body_name = 'AggregateTransaction' + + return '{0}Body'.format(body_name) + + def get_builtin_type(self, size): + builtin_types = {1: 'byte', 2: 'short', 4: 'int', 8: 'long'} + builtin_type = builtin_types[size] + return builtin_type + + def get_read_method_name(self, size): + if isinstance(size, str) or size > 8: + method_name = 'readFully' + else: + type_size_method_name = {1: 'readByte', 2: 'readShort', 4: 'readInt', 8: 'readLong'} + method_name = type_size_method_name[size] + return method_name + + def get_load_from_binary_factory(self, attribute_class_name): + if attribute_class_name == 'EmbeddedTransactionBuilder': + return 'TransactionBuilderFactory' + return attribute_class_name + + def get_condition_operation_text(self, op): + if op == 'has': + return '{0}.contains({1})' + return '{0} == {1}' + + def get_reverse_method_name(self, size): + if isinstance(size, str) or size > 8 or size == 1: + method_name = '{0}' + else: + typesize_methodname = {2: 'Short.reverseBytes({0})', + 4: 'Integer.reverseBytes({0})', + 8: 'Long.reverseBytes({0})'} + method_name = typesize_methodname[size] + return method_name + + def get_to_unsigned_method_name(self, size): + unsigned_methodname = {1: 'GeneratorUtils.toUnsignedInt({0})', + 2: 'GeneratorUtils.toUnsignedInt({0})'} + return unsigned_methodname[size] + + def get_write_method_name(self, size): + if isinstance(size, str) or size > 8 or size == 0: + method_name = 'write' + else: + typesize_methodname = {1: 'writeByte', + 2: 'writeShort', + 4: 'writeInt', + 8: 'writeLong'} + method_name = typesize_methodname[size] + return method_name + + def get_generated_type(self, schema, attribute, attribute_kind): + typename = attribute['type'] + if attribute_kind in (AttributeKind.SIMPLE, AttributeKind.SIZE_FIELD): + return self.get_builtin_type(self.get_attribute_size(schema, attribute)) + if attribute_kind == AttributeKind.BUFFER: + return 'ByteBuffer' + if not self.is_byte_type(typename): + typename = self.get_generated_class_name(typename, attribute, schema) + if self.is_any_array_kind(attribute_kind): + return 'List<{0}>'.format(typename) + if attribute_kind == AttributeKind.FLAGS: + return 'EnumSet<{0}>'.format(typename) + return typename diff --git a/catbuffer-generators/generators/java/VectorTest.java b/catbuffer-generators/generators/java/VectorTest.java new file mode 100644 index 00000000..41498117 --- /dev/null +++ b/catbuffer-generators/generators/java/VectorTest.java @@ -0,0 +1,110 @@ +package io.nem.symbol.catapult.builders; + +import java.io.ByteArrayInputStream; +import java.io.DataInputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.InputStream; +import java.lang.reflect.InvocationTargetException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; +import org.junit.platform.commons.util.ExceptionUtils; +import org.yaml.snakeyaml.Yaml; + + +public class VectorTest { + + public static final String TEST_RESOURCES_VECTOR = "src/test/resources/vector"; + + public static class BuilderTestItem { + + public final String filename; + + public final String builder; + + public final String payload; + + public final String comment; + + public BuilderTestItem(String filename, String builder, String payload, String comment) { + this.filename = filename; + this.builder = builder; + this.payload = payload; + this.comment = comment; + } + + @Override + public String toString() { + String commentSuffix = comment == null ? hash(payload) : comment; + return filename + " - " + builder + " - " + commentSuffix; + } + + public static String hash(String stringToHash) { + try { + MessageDigest messageDigest = MessageDigest.getInstance("SHA-256"); + messageDigest.update(stringToHash.getBytes()); + return GeneratorUtils.toHex(messageDigest.digest()); + } catch (NoSuchAlgorithmException e) { + throw new IllegalArgumentException(e); + } + } + } + + private static List vectors() throws Exception { + List walk = Files.walk(Paths.get(TEST_RESOURCES_VECTOR)).collect(Collectors.toList()); + try (Stream paths = walk.stream()) { + return paths + .filter(Files::isRegularFile).map(Path::toFile) + .flatMap(VectorTest::getVectorFromFile).collect(Collectors.toList()); + } + } + + private static Stream getVectorFromFile(File file) { + try { + InputStream input = new FileInputStream(file); + Yaml yaml = new Yaml(); + List> data = yaml.load(input); + return data.stream().map( + stringStringMap -> { + String payload = Objects.toString(stringStringMap.get("payload")); + return new BuilderTestItem(file.getName(), + stringStringMap.get("builder"), + payload, + stringStringMap.get("comment")); + }); + } catch (Exception e) { + throw new IllegalStateException(e); + } + + } + + @ParameterizedTest + @MethodSource("vectors") + public void serialization(BuilderTestItem item) { + try { + String className = this.getClass().getPackage().getName() + "." + item.builder; + DataInputStream inputStream = new DataInputStream( + new ByteArrayInputStream(GeneratorUtils.hexToBytes(item.payload))); + Serializer serializer = (Serializer) Class.forName(className) + .getMethod("loadFromBinary", DataInputStream.class).invoke(null, + inputStream); + Assertions.assertEquals(item.payload.toUpperCase(), GeneratorUtils.toHex(serializer.serialize()).toUpperCase()); + } catch (RuntimeException | ClassNotFoundException | NoSuchMethodException | IllegalAccessException | InvocationTargetException e) { + Assertions + .fail("Cannot run test " + item + " Error: " + ExceptionUtils.readStackTrace(e)); + } + + } + +} diff --git a/catbuffer-generators/generators/java/build.gradle b/catbuffer-generators/generators/java/build.gradle new file mode 100644 index 00000000..18abcf89 --- /dev/null +++ b/catbuffer-generators/generators/java/build.gradle @@ -0,0 +1,125 @@ +plugins { + id 'java' + id 'signing' + id "de.marcphilipp.nexus-publish" version "0.4.0" + id 'io.codearte.nexus-staging' version '0.21.1' +} + +apply plugin: 'java' +apply plugin: 'maven' +apply plugin: 'signing' +apply plugin: "de.marcphilipp.nexus-publish" + +repositories { + mavenCentral() + mavenLocal() +} + +dependencies { + testImplementation 'org.bouncycastle:bcprov-jdk15on:1.58' + testImplementation group: 'org.yaml', name: 'snakeyaml', version: '1.27' + testImplementation(platform("org.junit:junit-bom:5.4.0")) + testImplementation("org.junit.jupiter:junit-jupiter:5.4.0") +} + +def getPropertyValue(key, defaultValue = null) { + def value = project.hasProperty(key) ? project[key] : System.getenv(key) + return value?.trim() == '' ? defaultValue : value +} + +def ossrhUsername = getPropertyValue('ossrhUsername'); +def ossrhPassword = getPropertyValue('ossrhPassword'); + +def signingSecretKeyRingFile = getPropertyValue('signingSecretKeyRingFile') +def signingKeyId = getPropertyValue('signingKeyId') +def signingPassword = getPropertyValue('signingPassword') + +def repoPath = getPropertyValue('TRAVIS_REPO_SLUG', "nemtech/catbuffer-generators") + +allprojects { + ext."signing.keyId" = signingKeyId + ext."signing.secretKeyRingFile" = signingSecretKeyRingFile + ext."signing.password" = signingPassword +} + +test { + useJUnitPlatform() +} + +sourceCompatibility = '1.8' +targetCompatibility = '1.8' +version = '#artifactVersion' +description = 'catapult generated lib' +group 'io.nem' + +task sourcesJar(type: Jar) { + classifier = 'sources' + from sourceSets.main.allSource +} + +task javadocJar(type: Jar) { + classifier = 'javadoc' + from javadoc +} + +artifacts { + archives javadocJar, sourcesJar +} + +signing { + required { signingKeyId } + sign publishing.publications +} + +publishing { + publications { + mavenJava(MavenPublication) { + from components.java + artifact sourcesJar + artifact javadocJar + pom { + url = "https://github.com/${repoPath}" + groupId = 'io.nem' + name = "${project.name}" + description = "Generated lib in charge of buffering catpult transactions." + scm { + connection = "scm:git:https://github.com/${repoPath}.git" + developerConnection = "scm:svn:https://github.com/${repoPath}" + url = "https://github.com/${repoPath}" + } + licenses { + license { + name = 'The Apache License, Version 2.0' + url = 'http://www.apache.org/licenses/LICENSE-2.0.txt' + } + } + developers { + developer { + id = 'fboucquez' + name = 'Fernando Boucquez' + } + } + } + } + } +} + +if (ossrhUsername) { + nexusPublishing { + repositories { + sonatype { + username = ossrhUsername + password = ossrhPassword + } + } + connectTimeout = Duration.ofSeconds(360) + clientTimeout = Duration.ofSeconds(360) + } + + nexusStaging { + packageGroup = "io.nem" + stagingProfileId = "365f7aa90b3b13" + username = ossrhUsername + password = ossrhPassword + } +} diff --git a/catbuffer-generators/generators/java/settings.gradle b/catbuffer-generators/generators/java/settings.gradle new file mode 100644 index 00000000..5718f708 --- /dev/null +++ b/catbuffer-generators/generators/java/settings.gradle @@ -0,0 +1 @@ +rootProject.name = '#artifactName' diff --git a/catbuffer-generators/generators/java/templates/AggregateTransactionBodyBuilder.mako b/catbuffer-generators/generators/java/templates/AggregateTransactionBodyBuilder.mako new file mode 100644 index 00000000..d0eb34f7 --- /dev/null +++ b/catbuffer-generators/generators/java/templates/AggregateTransactionBodyBuilder.mako @@ -0,0 +1,158 @@ +import java.io.DataInputStream; +import java.nio.ByteBuffer; +import java.util.List; +import java.io.ByteArrayInputStream; + +/** Binary layout for an aggregate transaction. */ +public final class AggregateTransactionBodyBuilder implements Serializer { + /** Aggregate hash of an aggregate's transactions. */ + private final Hash256Dto transactionsHash; + /** Reserved padding to align end of AggregateTransactionHeader on 8-byte boundary. */ + private final int aggregateTransactionHeader_Reserved1; + /** Sub-transaction data (transactions are variable sized and payload size is in bytes). */ + private final List transactions; + /** Cosignatures data (fills remaining body space after transactions). */ + private final List cosignatures; + + /** + * Constructor - Creates an object from stream. + * + * @param stream Byte stream to use to serialize the object. + */ + protected AggregateTransactionBodyBuilder(final DataInputStream stream) { + try { + this.transactionsHash = Hash256Dto.loadFromBinary(stream); + final int payloadSize = Integer.reverseBytes(stream.readInt()); + this.aggregateTransactionHeader_Reserved1 = Integer.reverseBytes(stream.readInt()); + final ByteBuffer transactionBytes = ByteBuffer.allocate(payloadSize); + stream.read(transactionBytes.array()); + final DataInputStream dataInputStream = new DataInputStream(new ByteArrayInputStream(transactionBytes.array())); + this.transactions = new java.util.ArrayList<>(); + while (dataInputStream.available() > 0) { + EmbeddedTransactionBuilder embeddedTransactionBuilder = EmbeddedTransactionBuilderHelper.loadFromBinary(dataInputStream); + transactions.add(embeddedTransactionBuilder); + GeneratorUtils.skipPadding(embeddedTransactionBuilder.getSize(), dataInputStream); + } + this.cosignatures = new java.util.ArrayList<>(); + while (stream.available() > 0) { + cosignatures.add(CosignatureBuilder.loadFromBinary(stream)); + } + } catch(Exception e) { + throw GeneratorUtils.getExceptionToPropagate(e); + } + } + + /** + * Constructor. + * + * @param transactionsHash Aggregate hash of an aggregate's transactions. + * @param transactions Sub-transaction data (transactions are variable sized and payload size is in bytes). + * @param cosignatures Cosignatures data (fills remaining body space after transactions). + */ + protected AggregateTransactionBodyBuilder(final Hash256Dto transactionsHash, final List transactions, final List cosignatures) { + GeneratorUtils.notNull(transactionsHash, "transactionsHash is null"); + GeneratorUtils.notNull(transactions, "transactions is null"); + GeneratorUtils.notNull(cosignatures, "cosignatures is null"); + this.transactionsHash = transactionsHash; + this.aggregateTransactionHeader_Reserved1 = 0; + this.transactions = transactions; + this.cosignatures = cosignatures; + } + + /** + * Creates an instance of AggregateTransactionBodyBuilder. + * + * @param transactionsHash Aggregate hash of an aggregate's transactions. + * @param transactions Sub-transaction data (transactions are variable sized and payload size is in bytes). + * @param cosignatures Cosignatures data (fills remaining body space after transactions). + * @return Instance of AggregateTransactionBodyBuilder. + */ + public static AggregateTransactionBodyBuilder create(final Hash256Dto transactionsHash, final List transactions, final List cosignatures) { + return new AggregateTransactionBodyBuilder(transactionsHash, transactions, cosignatures); + } + + /** + * Gets aggregate hash of an aggregate's transactions. + * + * @return Aggregate hash of an aggregate's transactions. + */ + public Hash256Dto getTransactionsHash() { + return this.transactionsHash; + } + + /** + * Gets reserved padding to align end of AggregateTransactionHeader on 8-byte boundary. + * + * @return Reserved padding to align end of AggregateTransactionHeader on 8-byte boundary. + */ + private int getAggregateTransactionHeader_Reserved1() { + return this.aggregateTransactionHeader_Reserved1; + } + + /** + * Gets sub-transaction data (transactions are variable sized and payload size is in bytes). + * + * @return Sub-transaction data (transactions are variable sized and payload size is in bytes). + */ + public List getTransactions() { + return this.transactions; + } + + /** + * Gets cosignatures data (fills remaining body space after transactions). + * + * @return Cosignatures data (fills remaining body space after transactions). + */ + public List getCosignatures() { + return this.cosignatures; + } + + /** + * Gets the size of the object. + * + * @return Size in bytes. + */ + public int getSize() { + int size = 0; + size += this.transactionsHash.getSize(); + size += 4; // payloadSize + size += 4; // aggregateTransactionHeader_Reserved1 + size += this.transactions.stream().mapToInt(o -> o.getSize() + GeneratorUtils.getPadding(o.getSize())).sum(); + size += this.cosignatures.stream().mapToInt(o -> o.getSize()).sum(); + return size; + } + + /** + * Creates an instance of AggregateTransactionBodyBuilder from a stream. + * + * @param stream Byte stream to use to serialize the object. + * @return Instance of AggregateTransactionBodyBuilder. + */ + public static AggregateTransactionBodyBuilder loadFromBinary(final DataInputStream stream) { + return new AggregateTransactionBodyBuilder(stream); + } + + /** + * Serializes an object to bytes. + * + * @return Serialized bytes. + */ + public byte[] serialize() { + return GeneratorUtils.serialize(dataOutputStream -> { + final byte[] transactionsHashBytes = this.transactionsHash.serialize(); + dataOutputStream.write(transactionsHashBytes, 0, transactionsHashBytes.length); + int transactionsSize = (int) this.transactions.stream().mapToInt(o -> o.getSize() + GeneratorUtils.getPadding(o.getSize())).sum(); + dataOutputStream.writeInt(Integer.reverseBytes(transactionsSize)); + dataOutputStream.writeInt(Integer.reverseBytes(this.getAggregateTransactionHeader_Reserved1())); + for (int i = 0; i < this.transactions.size(); i++) { + final byte[] transactionsBytes = this.transactions.get(i).serialize(); + dataOutputStream.write(transactionsBytes, 0, transactionsBytes.length); + GeneratorUtils.addPadding(transactionsBytes.length, dataOutputStream); + } + for (int i = 0; i < this.cosignatures.size(); i++) { + final byte[] cosignaturesBytes = this.cosignatures.get(i).serialize(); + dataOutputStream.write(cosignaturesBytes, 0, cosignaturesBytes.length); + } + }); + } +} diff --git a/catbuffer-generators/generators/java/templates/BitMaskable.mako b/catbuffer-generators/generators/java/templates/BitMaskable.mako new file mode 100644 index 00000000..ee5b831d --- /dev/null +++ b/catbuffer-generators/generators/java/templates/BitMaskable.mako @@ -0,0 +1,10 @@ +/** Interface for the flags-based Enum. */ +public interface BitMaskable { + + /** + * Gets the value of the enum. + * + * @return Value of the enum. + */ + long getValueAsLong(); +} diff --git a/catbuffer-generators/generators/java/templates/Class.mako b/catbuffer-generators/generators/java/templates/Class.mako new file mode 100644 index 00000000..c5dbf5b4 --- /dev/null +++ b/catbuffer-generators/generators/java/templates/Class.mako @@ -0,0 +1,264 @@ +import java.io.DataInputStream; +import java.nio.ByteBuffer; +import java.util.EnumSet; +import java.util.List; + +/** +* ${helper.capitalize_first_character(generator.comments)} +**/ +public class ${generator.generated_class_name}${(' extends ' + str(generator.generated_base_class_name)) if generator.generated_base_class_name is not None else ''} implements Serializer { + +% for a in [a for a in generator.attributes if not a.attribute_is_super and not a.attribute_is_inline and not a.kind == helper.AttributeKind.SIZE_FIELD]: + /** ${helper.capitalize_first_character(a.attribute_comment)}. **/ + private ${'final ' if a.attribute_is_final else ''}${a.attribute_var_type} ${a.attribute_name}; + +% endfor\ + +<%def name="renderCondition(a)" filter="trim"> + ${helper.get_condition_operation_text(a.attribute['condition_operation']).format(a.attribute['condition'], helper.get_generated_class_name(a.condition_type_attribute['type'], a.condition_type_attribute, generator.schema) + '.' + helper.create_enum_name(a.attribute['condition_value']))} +\ +## STREAM CONSTRUCTORS +<%def name="renderReader(a)" filter="trim"> + % if a.kind == helper.AttributeKind.SIMPLE: + this.${a.attribute_name} = ${helper.get_reverse_method_name(a.attribute_size).format('stream.' + helper.get_read_method_name(a.attribute_size) + '()')}; + % elif a.kind == helper.AttributeKind.BUFFER: + this.${a.attribute_name} = GeneratorUtils.readByteBuffer(stream, ${a.attribute_size}); + % elif a.kind == helper.AttributeKind.SIZE_FIELD: + final ${a.attribute_var_type} ${a.attribute_name} = ${helper.get_reverse_method_name(a.attribute_size).format('stream.' + helper.get_read_method_name(a.attribute_size) + '()')}; + % elif a.kind == helper.AttributeKind.ARRAY: + this.${a.attribute_name} = GeneratorUtils.loadFromBinaryArray(${helper.get_load_from_binary_factory(a.attribute_class_name)}::loadFromBinary, stream, ${a.attribute_size}); + % elif a.kind == helper.AttributeKind.CUSTOM and (not a.attribute_is_conditional or not a.conditional_read_before): + this.${a.attribute_name} = ${helper.get_load_from_binary_factory(a.attribute_class_name)}.loadFromBinary(stream); + % elif a.kind == helper.AttributeKind.CUSTOM: + this.${a.attribute_name} = new ${helper.get_load_from_binary_factory(a.attribute_class_name)}(${a.attribute['condition']}Condition); + % elif a.kind == helper.AttributeKind.FILL_ARRAY: + this.${a.attribute_name} = GeneratorUtils.loadFromBinaryArray(${helper.get_load_from_binary_factory(a.attribute_class_name)}::loadFromBinary, stream, ${a.attribute_size}); + % elif a.kind == helper.AttributeKind.FLAGS: + this.${a.attribute_name} = ${'GeneratorUtils.toSet({0}, {1})'.format(a.attribute_class_name + '.class', helper.get_reverse_method_name(a.attribute_size).format('stream.' + helper.get_read_method_name(a.attribute_size) + '()'))}; + % elif a.kind == helper.AttributeKind.VAR_ARRAY: + this.${a.attribute_name} = GeneratorUtils.loadFromBinaryArrayRemaining(TransactionBuilderFactory::createEmbeddedTransactionBuilder, stream, payloadSize); + % else: + FIX ME! + % endif +\ + /** + * Constructor - Creates an object from stream. + * + * @param stream Byte stream to use to serialize the object. + */ + protected ${generator.generated_class_name}(DataInputStream stream) { + % if generator.base_class_name is not None: + super(stream); +% endif + try { + % for a in set([(a.attribute['condition'], a.attribute_size, a.conditional_read_before) for a in generator.attributes if not a.attribute_is_super and not a.attribute_is_inline and a.conditional_read_before and a.attribute_is_conditional]): + final ${helper.get_builtin_type(a[1])} ${a[0]}Condition = ${helper.get_reverse_method_name(a[1]).format('stream.' + helper.get_read_method_name(a[1]) + '()')}; + % endfor + % for a in [a for a in generator.attributes if not a.attribute_is_super and not a.attribute_is_inline and not a.conditional_read_before]: + %if a.attribute_is_conditional: + if (this.${renderCondition(a) | trim}) { + ${renderReader(a) | trim} + } + % else: + ${renderReader(a) | trim} + %endif + % endfor + % for a in [a for a in generator.attributes if not a.attribute_is_super and not a.attribute_is_inline and a.conditional_read_before]: + if (this.${renderCondition(a) | trim}) { + ${renderReader(a) | trim} + } + % endfor + } catch (Exception e) { + throw GeneratorUtils.getExceptionToPropagate(e); + } + } + + /** + * Creates an instance of ${generator.generated_class_name} from a stream. + * + * @param stream Byte stream to use to serialize the object. + * @return Instance of ${generator.generated_class_name}. + */ + public static ${generator.generated_class_name} loadFromBinary(DataInputStream stream) { + return new ${generator.generated_class_name}(stream); + } + <% + constructor_params = generator.all_constructor_params + constructor_params_CSV = ', '.join([str(a.attribute_var_type) + ' ' + str(a.attribute_name) for a in constructor_params if a.attribute_condition_value == None and not a.attribute_is_aggregate and not a.attribute_is_reserved and not a.attribute_name == 'size']) + super_arguments_CSV = ', '.join([str(a.attribute_name) for a in constructor_params if a.attribute_is_super and not a.attribute_is_reserved and not a.attribute_is_aggregate and not a.attribute_name == 'size']) + %> + /** + * Constructor. + * +% for a in [a for a in constructor_params if a.attribute_condition_value == None and not a.attribute_is_aggregate and not a.attribute_is_reserved and not a.kind == helper.AttributeKind.SIZE_FIELD and not a.attribute_name == 'size']: + * @param ${a.attribute_name} ${helper.capitalize_first_character(a.attribute_comment)}. +% endfor + */ + protected ${generator.generated_class_name}(${constructor_params_CSV}) { + % if generator.base_class_name is not None: + super(${super_arguments_CSV}); + % endif + % for a in [a for a in constructor_params if a.attribute_condition_value == None and not a.attribute_is_aggregate and not a.attribute_is_reserved and not a.attribute_name == 'size']: + % if a.attribute_is_conditional: + if (${renderCondition(a) | trim}) { + GeneratorUtils.notNull(${a.attribute_name}, "${a.attribute_name} is null"); + } + %else: + GeneratorUtils.notNull(${a.attribute_name}, "${a.attribute_name} is null"); + % endif + % endfor + % for a in [a for a in constructor_params if not a.attribute_is_inline and not a.attribute_is_super and not a.attribute_name == 'size']: + % if a.attribute_is_aggregate: + this.${a.attribute_name} = new ${a.attribute_var_type}(${', '.join([str(inline.attribute_name) for inline in constructor_params if inline.attribute_aggregate_attribute_name == a.attribute_name and not inline.attribute_is_reserved and not inline.kind == helper.AttributeKind.SIZE_FIELD and inline.attribute_condition_value is None and not inline.attribute_is_aggregate])}); + % else: + this.${a.attribute_name} = ${a.attribute_name if not a.attribute_is_reserved else '0'}; + % endif + % endfor + } +## CONDITIONAL CONSTRUCTORS +% for possible_constructor_params in generator.constructor_attributes: + <% + constructor_params = [a for a in possible_constructor_params if a.attribute_condition_value is None and a.attribute_condition_provide and not a.attribute_is_reserved and not a.attribute_is_aggregate] + constructor_params_CSV = ', '.join([str(a.attribute_var_type) + ' ' + str(a.attribute_name) for a in constructor_params]) + default_value_attributes = [a for a in possible_constructor_params if a.attribute_condition_value is not None] + create_name_suffix = ''.join([helper.capitalize_first_character(a.attribute_condition_value) for a in default_value_attributes]) + constructor_arguments_CSV = ', '.join([str(a.attribute_name) + if a.attribute_condition_value is not None or a.attribute_condition_provide else 'null' + for a in possible_constructor_params if not a.attribute_is_aggregate and not a.attribute_is_reserved and not a.attribute_name == 'size']) + %> + /** + * Creates an instance of ${generator.generated_class_name}. + * +% for a in [a for a in constructor_params if a.attribute_condition_value == None and not a.attribute_is_aggregate and not a.attribute_is_reserved and not a.attribute_name == 'size']: + * @param ${a.attribute_name} ${helper.capitalize_first_character(a.attribute_comment)}. +% endfor + * @return Instance of ${generator.generated_class_name}. + */ + public static ${generator.generated_class_name} create${create_name_suffix}(${constructor_params_CSV}) { + % for a in default_value_attributes: + ${helper.get_generated_class_name(a.attribute['type'], a.attribute, generator.schema)} ${a.attribute_name} = ${helper.get_generated_class_name(a.attribute['type'], a.attribute, generator.schema)}.${helper.create_enum_name(a.attribute_condition_value)}; + % endfor + return new ${generator.generated_class_name}(${constructor_arguments_CSV}); + } +% endfor + +## GETTERS: +% for a in [a for a in generator.attributes if not a.attribute_is_super and not a.attribute_is_aggregate and not a.kind == helper.AttributeKind.SIZE_FIELD and (not a.attribute_is_reserved or not a.attribute_is_inline)]: + /** + * Gets ${a.attribute_comment}. + * + * @return ${helper.capitalize_first_character(a.attribute_comment)}. + */ + ${'private' if a.attribute_is_reserved else 'public'} ${a.attribute_var_type} get${helper.capitalize_first_character(a.attribute_name) if a.attribute_name != 'size' else 'StreamSize'}() { + % if a.attribute_is_conditional and not a.attribute_is_inline: + if (!(this.${renderCondition(a) | trim})) { + throw new java.lang.IllegalStateException("${a.attribute['condition']} is not set to ${helper.create_enum_name(a.attribute['condition_value'])}."); + } + % endif + % if a.attribute_is_inline: + return this.${a.attribute_aggregate_attribute_name}.get${helper.capitalize_first_character(a.attribute_name)}(); + % else: + return this.${a.attribute_name}; + % endif + } + +% endfor +## SIZE: +<%def name="renderSize(a)" filter="trim">\ + % if a.kind == helper.AttributeKind.SIMPLE: + size += ${a.attribute_size}; // ${a.attribute_name} + % elif a.kind == helper.AttributeKind.SIZE_FIELD: + size += ${a.attribute_size}; // ${a.attribute_name} + % elif a.kind == helper.AttributeKind.BUFFER: + size += this.${a.attribute_name}.array().length; + % elif a.kind == helper.AttributeKind.ARRAY or a.kind == helper.AttributeKind.VAR_ARRAY or a.kind == helper.AttributeKind.FILL_ARRAY: + size += this.${a.attribute_name}.stream().mapToInt(o -> o.getSize()).sum(); + % elif a.kind == helper.AttributeKind.FLAGS: + size += ${a.attribute_class_name}.values()[0].getSize(); + % else: + size += this.${a.attribute_name}.getSize(); + % endif +\ + + /** + * Gets the size of the object. + * + * @return Size in bytes. + */ + public int getSize() { + int size = ${'super.getSize()' if generator.base_class_name is not None else '0'}; +% for a in [a for a in generator.attributes if not a.attribute_is_super and not a.attribute_is_inline]: + % if a.attribute_is_conditional: + if (this.${renderCondition(a) | trim}) { + ${renderSize(a).strip()} + } + % else: + ${renderSize(a).strip()} + % endif +% endfor + return size; + } + +% if generator.base_class_name in ['Transaction', 'EmbeddedTransaction']: + /** + * Gets the body builder of the object. + * + * @return Body builder. + */ + @Override + public ${generator.body_class_name}Builder getBody() { + return this.${helper.decapitalize_first_character(generator.body_class_name)}; + } +% endif + +% if generator.name in ['Transaction', 'EmbeddedTransaction']: + /** + * Gets the body builder of the object. + * + * @return Body builder. + */ + public Serializer getBody() { + return null; + } +% endif + +<%def name="renderSerialize(a)" filter="trim">\ + % if a.kind == helper.AttributeKind.SIMPLE and (generator.name != 'Receipt' or a.attribute_name != 'size'): + dataOutputStream.${helper.get_write_method_name(a.attribute_size)}(${helper.get_reverse_method_name(a.attribute_size).format('('+ a.attribute_var_type +') this.get' + helper.capitalize_first_character(a.attribute_name) + '()')}); + % elif a.kind == helper.AttributeKind.BUFFER: + dataOutputStream.write(this.${a.attribute_name}.array(), 0, this.${a.attribute_name}.array().length); + % elif a.kind == helper.AttributeKind.SIZE_FIELD: + dataOutputStream.${helper.get_write_method_name(a.attribute_size)}(${helper.get_reverse_method_name(a.attribute_size).format('('+ a.attribute_var_type +') GeneratorUtils.getSize(this.get' + helper.capitalize_first_character(a.parent_attribute['name']) + '())')}); + % elif a.kind == helper.AttributeKind.ARRAY or a.kind == helper.AttributeKind.VAR_ARRAY or a.kind == helper.AttributeKind.FILL_ARRAY: + GeneratorUtils.writeList(dataOutputStream, this.${a.attribute_name}); + % elif a.kind == helper.AttributeKind.CUSTOM: + GeneratorUtils.writeEntity(dataOutputStream, this.${a.attribute_name}); + % elif a.kind == helper.AttributeKind.FLAGS: + dataOutputStream.${helper.get_write_method_name(a.attribute_size)}(${helper.get_reverse_method_name(a.attribute_size).format('(' + helper.get_builtin_type(a.attribute_size) + ') GeneratorUtils.toLong(' + a.attribute_class_name + '.class, this.' + a.attribute_name + ')')}); + % else: + // Ignored serialization: ${a.attribute_name} ${a.kind} + % endif +\ + /** + * Serializes an object to bytes. + * + * @return Serialized bytes. + */ + public byte[] serialize() { + return GeneratorUtils.serialize((dataOutputStream) -> { + % if generator.base_class_name is not None: + final byte[] superBytes = super.serialize(); + dataOutputStream.write(superBytes, 0, superBytes.length); +% endif + % for a in [a for a in generator.attributes if not a.attribute_is_super and not a.attribute_is_inline]: + % if a.attribute_is_conditional: + if (this.${renderCondition(a) | trim}) { + ${renderSerialize(a)} + } + % else: + ${renderSerialize(a)} + % endif + % endfor + }); + } +} diff --git a/catbuffer-generators/generators/java/templates/EmbeddedTransactionBuilderHelper.mako b/catbuffer-generators/generators/java/templates/EmbeddedTransactionBuilderHelper.mako new file mode 100644 index 00000000..96ce91e6 --- /dev/null +++ b/catbuffer-generators/generators/java/templates/EmbeddedTransactionBuilderHelper.mako @@ -0,0 +1,36 @@ +import java.io.DataInputStream; +import java.io.SequenceInputStream; +import java.io.ByteArrayInputStream; + +/** Factory in charge of creating the right transaction builder from the streamed data. */ +public class EmbeddedTransactionBuilderHelper { + + /** + * It creates the rigth embbeded transaction builder from the stream data. + * + * @param stream the stream + * @return the EmbeddedTransactionBuilder subclass + */ + public static EmbeddedTransactionBuilder loadFromBinary(final DataInputStream stream) { + + EmbeddedTransactionBuilder headerBuilder = EmbeddedTransactionBuilder.loadFromBinary(stream); +% for name in generator.schema: +<% + layout = generator.schema[name].get("layout", [{type:""}]) + entityTypeValue = next(iter([x for x in layout if x.get('name','') == 'entityType']),{}).get('value',0) + entityTypeVersion = next(iter([x for x in layout if x.get('name','') == 'version']),{}).get('value',0) +%>\ + %if (entityTypeValue > 0 and 'Aggregate' not in name and 'Block' not in name and name.startswith('Embedded')): + if (headerBuilder.getType() == EntityTypeDto.${helper.create_enum_name(name[8:])} && headerBuilder.getVersion() == ${entityTypeVersion}) { + ${name[8:]}BodyBuilder bodyBuilder = ${name[8:]}BodyBuilder.loadFromBinary(stream); + SequenceInputStream concatenate = new SequenceInputStream( + new ByteArrayInputStream(headerBuilder.serialize()), + new ByteArrayInputStream(bodyBuilder.serialize())); + return ${name}Builder.loadFromBinary(new DataInputStream(concatenate)); + } + %endif +% endfor + return headerBuilder; + } + +} diff --git a/catbuffer-generators/generators/java/templates/Enum.mako b/catbuffer-generators/generators/java/templates/Enum.mako new file mode 100644 index 00000000..66b638ea --- /dev/null +++ b/catbuffer-generators/generators/java/templates/Enum.mako @@ -0,0 +1,94 @@ +import java.io.DataInputStream; + +/** +* ${helper.capitalize_first_character(generator.comments)} +**/ +public enum ${generator.generated_class_name} implements ${'BitMaskable, ' if generator.is_flag else '' }Serializer { + +% for i, (name, (value, comment)) in enumerate(generator.enum_values.items()): + /** ${comment}. */ + ${name}((${generator.enum_type}) ${value})${';' if i == len(generator.enum_values) -1 else ','} + +% endfor + + /** Enum value. */ + private final ${generator.enum_type} value; + + /** + * Constructor. + * + * @param value Enum value. + */ + ${generator.generated_class_name}(final ${generator.enum_type} value) { + this.value = value; + } + + /** + * Gets enum value. + * + * @param value Raw value of the enum. + * @return Enum value. + */ + public static ${generator.generated_class_name} rawValueOf(final ${generator.enum_type} value) { + for (${generator.generated_class_name} current : ${generator.generated_class_name}.values()) { + if (value == current.value) { + return current; + } + } + throw new IllegalArgumentException(value + " was not a backing value for ${generator.generated_class_name}."); + } + + /** + * Gets the size of the object. + * + * @return Size in bytes. + */ + public int getSize() { + return ${generator.size}; + } + + /** + * Gets the value of the enum. + * + * @return Value of the enum. + */ + public ${generator.enum_type} getValue() { + return this.value; + } +% if generator.is_flag: + /** + * Gets the value of the enum. + * + * @return Value of the enum. + */ + public long getValueAsLong() { + return ${helper.get_to_unsigned_method_name(generator.size).format('this.value')}; + } + +% endif + /** + * Creates an instance of ${generator.generated_class_name} from a stream. + * + * @param stream Byte stream to use to serialize the object. + * @return Instance of ${generator.generated_class_name}. + */ + public static ${generator.generated_class_name} loadFromBinary(final DataInputStream stream) { + try { + final ${generator.enum_type} streamValue = ${helper.get_reverse_method_name(generator.size).format('stream.' + generator.helper.get_read_method_name(generator.size) + '()')}; + return rawValueOf(streamValue); + } catch(Exception e) { + throw GeneratorUtils.getExceptionToPropagate(e); + } + } + + /** + * Serializes an object to bytes. + * + * @return Serialized bytes. + */ + public byte[] serialize() { + return GeneratorUtils.serialize(dataOutputStream -> { + dataOutputStream.${helper.get_write_method_name(generator.size)}(${helper.get_reverse_method_name(generator.size).format('this.value')}); + }); + } +} diff --git a/catbuffer-generators/generators/java/templates/GeneratorUtils.mako b/catbuffer-generators/generators/java/templates/GeneratorUtils.mako new file mode 100644 index 00000000..8b5b9ef3 --- /dev/null +++ b/catbuffer-generators/generators/java/templates/GeneratorUtils.mako @@ -0,0 +1,432 @@ +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.DataInputStream; +import java.io.DataOutputStream; +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.Collection; +import java.util.EnumSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutionException; +import java.util.function.Function; + +/** + * Generator utility class. + */ +public final class GeneratorUtils { + + /** + * Constructor. + */ + private GeneratorUtils() { + } + + /** + * Throws if the object is null. + * + * @param object Object to to check. + * @param message Format string message. + * @param values Format values. + * @param Type of object. + */ + public static void notNull(T object, String message, Object... values) { + if (object == null) { + throw new NullPointerException(String.format(message, values)); + } + } + + /** + * Throws if the value is not true. + * + * @param expression Expression to check. + * @param message Format string message. + * @param values Format values. + */ + public static void isTrue(boolean expression, String message, Object... values) { + if (!expression) { + throw new IllegalArgumentException(String.format(message, values)); + } + } + + /** + * Throws if the value is not false. + * + * @param expression Expression to check. + * @param message Format string message. + * @param values Format values. + */ + public static void isFalse(boolean expression, String message, Object... values) { + isTrue(!expression, message, values); + } + + /** + * Converts to an int by an unsigned conversion. + * + * @param value Signed byte. + * @return Positive integer. + */ + public static int toUnsignedInt(final byte value) { + return Byte.toUnsignedInt(value); + } + + /** + * Converts to an int by an unsigned conversion. + * + * @param value Signed short. + * @return Positive integer. + */ + public static int toUnsignedInt(final short value) { + return Short.toUnsignedInt(value); + } + + /** + * Creates a bitwise representation for an Set. + * + * @param enumClass Enum type. + * @param enumSet EnumSet to convert to bit representation. + * @param Type of enum. + * @return Long value of the EnumSet. + */ + public static & BitMaskable> long toLong(final Class enumClass, + final Set enumSet) { + final T[] enumValues = enumClass.getEnumConstants(); + isFalse(enumValues.length > Long.SIZE, + "The number of enum constants is greater than " + Long.SIZE); + long result = 0; + for (final T value : enumValues) { + if (enumSet.contains(value)) { + result += value.getValueAsLong(); + } + } + return result; + } + + /** + * Creates a EnumSet from from a bit representation. + * + * @param enumClass Enum class. + * @param bitMaskValue Bitmask value. + * @param Enum type. + * @return EnumSet representing the long value. + */ + public static & BitMaskable> EnumSet toSet(final Class enumClass, + final long bitMaskValue) { + final EnumSet results = EnumSet.noneOf(enumClass); + for (final T constant : enumClass.getEnumConstants()) { + if (0 != (constant.getValueAsLong() & bitMaskValue)) { + results.add(constant); + } + } + return results; + } + + /** + * Gets a runtime exception to propagates from an exception. + * + * @param exception Exception to propagate. + * @param wrap Function that wraps an exception in a runtime exception. + * @param Specific exception type. + * @return RuntimeException to throw. + */ + public static RuntimeException getExceptionToPropagate( + final Exception exception, + final Function wrap) { + if ((exception instanceof ExecutionException) && (RuntimeException.class + .isAssignableFrom(exception.getCause().getClass()))) { + return (RuntimeException) exception.getCause(); + } + if (exception instanceof RuntimeException) { + return (RuntimeException) exception; + } + if (exception instanceof InterruptedException) { + Thread.currentThread().interrupt(); + return new IllegalStateException(exception); + } + return wrap.apply(exception); + } + + /** + * Gets a runtime exception to propagates from an exception. + * + * @param exception Exception to propagate. + * @param Specific exception type. + * @return RuntimeException to throw. + */ + public static RuntimeException getExceptionToPropagate( + final Exception exception) { + return getExceptionToPropagate(exception, RuntimeException::new); + } + + /** + * Propagates checked exceptions as a specific runtime exception. + * + * @param callable Function to call. + * @param wrap Function that wraps an exception in a runtime exception. + * @param Return type. + * @param Specific exception type. + * @return Function result. + */ + public static T propagate(final Callable callable, + final Function wrap) { + try { + return callable.call(); + } catch (final Exception e) { + throw getExceptionToPropagate(e, wrap); + } + } + + /** + * Propagates checked exceptions as a runtime exception. + * + * @param callable Function to call. + * @param Function return type. + * @return Function result. + */ + public static T propagate(final Callable callable) { + return propagate(callable, RuntimeException::new); + } + + + + /** + * Throwing consumer interface. + * + * @param Input type. + * @param Exception that is thrown. + */ + public interface ThrowingConsumer { + + /** + * Performs operation on the given argument. + * + * @param t Input argument. + * @throws E Exception that is thrown. + */ + void accept(T t) throws E; + } + + /** + * Serializes data using a helper function to write to the stream. + * + * @param consumer Helper function that writes data to DataOutputStream. + * @return Byte array of data written. + */ + public static byte[] serialize(ThrowingConsumer consumer) { + return propagate(() -> { + try (final ByteArrayOutputStream byteArrayStream = new ByteArrayOutputStream(); + final DataOutputStream dataOutputStream = new DataOutputStream(byteArrayStream)) { + consumer.accept(dataOutputStream); + return byteArrayStream.toByteArray(); + } + }); + } + + /** + * It moves the output stream pointer the padding size calculated from the payload size + * + * @param size the payload size used to calcualted the padding + * @param dataInputStream the input stream that will be moved the calcauted padding size + */ + public static void skipPadding(int size, + final DataInputStream dataInputStream) { + GeneratorUtils.propagate(() -> { + int padding = getPadding(size); + dataInputStream.skipBytes(padding); + return null; + }); + } + + /** + * This method writes 0 into the dataOutputStream. The amount of 0s is the calculated padding size from provided + * payload size. + * + * @param size the payload size used to calcualted the padding + * @param dataOutputStream used to write the 0s. + */ + public static void addPadding(int size, final DataOutputStream dataOutputStream) { + GeneratorUtils.propagate(() -> { + int padding = getPadding(size); + while (padding > 0) { + dataOutputStream.write(0); + padding--; + } + return null; + }); + } + + /** + * It calcualtes the padding that needs to be added/skipped when processing inner transactions. + * + * @param size the size of the payload using to calculate the padding + * @return the padding to be added/skipped. + */ + public static int getPadding(int size) { + int alignment = 8; + return 0 == size % alignment ? 0 : alignment - (size % alignment); + } + + /** + * It reads count elements from the stream and creates a list using the builder + * + * @param builder the builder + * @param stream the stream + * @param count the elements to be read + * @param the the type to be returned + * @return a list of T. + */ + public static List loadFromBinaryArray(final Function builder, + final DataInputStream stream, final long count) { + List list = new java.util.ArrayList<>(); + for (int i = 0; i < count; i++) { + list.add(builder.apply(stream)); + } + return list; + } + + + /** + * It reads all the remaining entities using the total payload size. + * + * @param builder the entity builder + * @param stream the stream to read from + * @param payloadSize the payload size + * @param the type of the entity + * @return a list of entities + * @throws IOException when data cannot be loaded. + */ + public static List loadFromBinaryArrayRemaining( + final Function builder, DataInputStream stream, int payloadSize) + throws IOException { + final ByteBuffer byteCount = ByteBuffer.allocate(payloadSize); + stream.read(byteCount.array()); + final DataInputStream dataInputStream = new DataInputStream( + new ByteArrayInputStream(byteCount.array())); + List entities = new java.util.ArrayList<>(); + while (dataInputStream.available() > 0) { + T entity = builder.apply(dataInputStream); + entities.add(entity); + GeneratorUtils.skipPadding(entity.getSize(), dataInputStream); + } + return entities; + } + + /** + * Write a list of catbuffer entities into the writer. + * + * @param dataOutputStream the stream to serialize into + * @param entities the entities to be serialized + * @throws IOException when data cannot be written. + */ + public static void writeList(final DataOutputStream dataOutputStream, + final List entities) throws IOException { + for (Serializer entity : entities) { + final byte[] entityBytes = entity.serialize(); + dataOutputStream.write(entityBytes, 0, entityBytes.length); + } + } + + /** + * Write a serializer into the writer. + * + * @param dataOutputStream the stream to serialize into + * @param entity the entities to be serialized + * @throws IOException when data cannot be written. + */ + public static void writeEntity(final DataOutputStream dataOutputStream, final Serializer entity) + throws IOException { + final byte[] entityBytes = entity.serialize(); + dataOutputStream.write(entityBytes, 0, entityBytes.length); + } + + /** + * Read a {@link ByteBuffer} of the given size form the strem + * + * @param stream the stream + * @param size the size of the buffer to read + * @return the buffer + * @throws IOException when data cannot be read + */ + public static ByteBuffer readByteBuffer(final DataInputStream stream, final int size) throws IOException { + ByteBuffer buffer = ByteBuffer.allocate(size); + stream.readFully(buffer.array()); + return buffer; + } + + /** + * Returns the size of the buffer. + * + * @param buffer the buffer + * @return its size + */ + public static int getSize(final ByteBuffer buffer) { + return buffer.array().length; + } + + /** + * Returns the size of the collection + * @param collection the collecion + * @return the size. + */ + public static int getSize(final Collection collection) { + return collection.size(); + } + + private static final char[] HEX_ARRAY = "0123456789ABCDEF".toCharArray(); + + /** + * Basic to hex function that converts a byte array to an hex + * @param bytes the bytes + * @return the hex representation. + */ + public static String toHex(byte[] bytes) { + char[] hexChars = new char[bytes.length * 2]; + for (int j = 0; j < bytes.length; j++) { + int v = bytes[j] & 0xFF; + hexChars[j * 2] = HEX_ARRAY[v >>> 4]; + hexChars[j * 2 + 1] = HEX_ARRAY[v & 0x0F]; + } + return new String(hexChars); + } + + /** + * Basic from hex to byte array function. + * @param hex the hex string + * @return the byte array. + */ + public static byte[] hexToBytes(String hex) { + int len = hex.length(); + byte[] data = new byte[len / 2]; + for (int i = 0; i < len; i += 2) { + data[i / 2] = (byte) ((Character.digit(hex.charAt(i), 16) << 4) + + Character.digit(hex.charAt(i + 1), 16)); + } + return data; + } + + /** + * It writes the builder into a file for future unit testing. + * @param the type of the builder. + * @param builder the builder. + * @param file the file to append. + * @return the builder + */ + public static T writeBuilderToFile(T builder, String file) { + try (FileWriter writer = new FileWriter(new File(file), true)) { + String payload = toHex(builder.serialize()); + String builderName = builder.getClass().getSimpleName(); + writer.write("- builder: " + builderName + "\n"); + writer.write(" payload: " + payload + "\n"); + return builder; + } catch (Exception e) { + throw new IllegalArgumentException(e); + } + } +} + + + + diff --git a/catbuffer-generators/generators/java/templates/Serializer.mako b/catbuffer-generators/generators/java/templates/Serializer.mako new file mode 100644 index 00000000..b4657097 --- /dev/null +++ b/catbuffer-generators/generators/java/templates/Serializer.mako @@ -0,0 +1,17 @@ +/** Objects of this interface knows how to serialize a catbuffer object. */ +public interface Serializer { + + /** + * Serializes an object to bytes. + * + * @return Serialized bytes. + */ + byte[] serialize(); + + /** + * Gets the size of the object. + * + * @return Size in bytes. + */ + int getSize(); +} diff --git a/catbuffer-generators/generators/java/templates/TransactionBuilderHelper.mako b/catbuffer-generators/generators/java/templates/TransactionBuilderHelper.mako new file mode 100644 index 00000000..c6d55c33 --- /dev/null +++ b/catbuffer-generators/generators/java/templates/TransactionBuilderHelper.mako @@ -0,0 +1,44 @@ +import java.io.DataInputStream; +import java.io.SequenceInputStream; +import java.io.ByteArrayInputStream; + +/** Factory in charge of creating the right transaction builder from the streamed data. */ +public class TransactionBuilderHelper { + + /** + * It creates the right transaction builder from the stream data. + * + * @param stream the stream + * @return the TransactionBuilder subclass + */ + public static TransactionBuilder loadFromBinary(final DataInputStream stream) { + + TransactionBuilder headerBuilder = TransactionBuilder.loadFromBinary(stream); +% for name in generator.schema: +<% + layout = generator.schema[name].get("layout", [{type:""}]) + entityTypeValue = next(iter([x for x in layout if x.get('name','') == 'entityType']),{}).get('value',0) + entityTypeVersion = next(iter([x for x in layout if x.get('name','') == 'version']),{}).get('value',0) +%>\ + %if (entityTypeValue > 0 and 'Aggregate' not in name and 'Block' not in name and not name.startswith('Embedded')): + if (headerBuilder.getType() == EntityTypeDto.${helper.create_enum_name(name)} && headerBuilder.getVersion() == ${entityTypeVersion}) { + ${name}BodyBuilder bodyBuilder = ${name}BodyBuilder.loadFromBinary(stream); + SequenceInputStream concatenate = new SequenceInputStream( + new ByteArrayInputStream(headerBuilder.serialize()), + new ByteArrayInputStream(bodyBuilder.serialize())); + return ${name}Builder.loadFromBinary(new DataInputStream(concatenate)); + } + %elif (entityTypeValue > 0 and 'Block' not in name and not name.startswith('Embedded')): + if (headerBuilder.getType() == EntityTypeDto.${helper.create_enum_name(name)} && headerBuilder.getVersion() == ${entityTypeVersion}) { + AggregateTransactionBodyBuilder bodyBuilder = AggregateTransactionBodyBuilder.loadFromBinary(stream); + SequenceInputStream concatenate = new SequenceInputStream( + new ByteArrayInputStream(headerBuilder.serialize()), + new ByteArrayInputStream(bodyBuilder.serialize())); + return ${name}Builder.loadFromBinary(new DataInputStream(concatenate)); + } + %endif +% endfor + return headerBuilder; + } + +} diff --git a/catbuffer-generators/generators/java/templates/Type.mako b/catbuffer-generators/generators/java/templates/Type.mako new file mode 100644 index 00000000..ef3f2a50 --- /dev/null +++ b/catbuffer-generators/generators/java/templates/Type.mako @@ -0,0 +1,77 @@ +import java.io.DataInputStream; +import java.nio.ByteBuffer; + +/** ${generator.comments}. */ +public final class ${generator.generated_class_name} implements Serializer { + /** ${generator.comments}. */ + private final ${generator.attribute_type} ${generator.attribute_name}; + + /** + * Constructor. + * + * @param ${generator.attribute_name} ${generator.comments}. + */ + public ${generator.generated_class_name}(final ${generator.attribute_type} ${generator.attribute_name}) { + this.${generator.attribute_name} = ${generator.attribute_name}; + } + + /** + * Constructor - Creates an object from stream. + * + * @param stream Byte stream to use to serialize. + */ + public ${generator.generated_class_name}(final DataInputStream stream) { + try { +% if generator.attribute_kind == helper.AttributeKind.BUFFER: + this.${generator.attribute_name} = GeneratorUtils.readByteBuffer(stream, ${generator.size}); +% else: + this.${generator.attribute_name} = ${helper.get_reverse_method_name(generator.size).format('stream.' + generator.helper.get_read_method_name(generator.size) + '()')}; +% endif + } catch(Exception e) { + throw GeneratorUtils.getExceptionToPropagate(e); + } + } + + /** + * Gets ${generator.comments}. + * + * @return ${generator.comments}. + */ + public ${generator.attribute_type} get${generator.name}() { + return this.${generator.attribute_name}; + } + + /** + * Gets the size of the object. + * + * @return Size in bytes. + */ + public int getSize() { + return ${generator.size}; + } + + /** + * Creates an instance of ${generator.generated_class_name} from a stream. + * + * @param stream Byte stream to use to serialize the object. + * @return Instance of ${generator.generated_class_name}. + */ + public static ${generator.generated_class_name} loadFromBinary(final DataInputStream stream) { + return new ${generator.generated_class_name}(stream); + } + + /** + * Serializes an object to bytes. + * + * @return Serialized bytes. + */ + public byte[] serialize() { + return GeneratorUtils.serialize(dataOutputStream -> { +% if generator.attribute_kind == helper.AttributeKind.BUFFER: + dataOutputStream.write(this.${generator.attribute_name}.array(), 0, this.${generator.attribute_name}.array().length); +% else: + dataOutputStream.${helper.get_write_method_name(generator.size)}(${helper.get_reverse_method_name(generator.size).format('this.get' + generator.name + '()')}); +% endif + }); + } +} diff --git a/catbuffer-generators/generators/javascript/.eslintrc b/catbuffer-generators/generators/javascript/.eslintrc new file mode 100644 index 00000000..ffc92826 --- /dev/null +++ b/catbuffer-generators/generators/javascript/.eslintrc @@ -0,0 +1,104 @@ +--- +env: + es6: true +extends: airbnb +parserOptions: + sourceType: module +plugins: + - sort-requires + +rules: + indent: + - error + - tab + linebreak-style: + - error + - unix + quotes: + - error + - single + semi: + - error + - always + yoda: + - error + - always + curly: + - error + - multi-or-nest + - consistent + max-len: + - error + - code: 140 + + no-tabs: + - off + no-bitwise: + - off + no-plusplus: + - off + no-mixed-operators: + - error + - allowSamePrecedence: true + no-param-reassign: + - error + - props: false + no-underscore-dangle: + - error + - allow: + - _id # mongodb identifier + + camelcase: + - off # for consts, e.g. Foo_Bar + comma-dangle: + - error + - never + default-case: + - off + + arrow-parens: + - error + - as-needed + func-names: + - error + - never + func-style: + - error + - expression + wrap-iife: + - error + - inside + + prefer-destructuring: + - error + - object: true + array: false + + valid-jsdoc: + - error + - requireReturn: false + prefer: + arg: param + argument: param + class: constructor + return: returns + preferType: + Boolean: boolean + Number: number + Object: object + String: string + + import/extensions: + - error + - never + import/no-absolute-path: + - error + import/no-unresolved: + - 2 + import/no-deprecated: + - error + import/named: + - error + + sort-requires/sort-requires: + - 2 diff --git a/catbuffer-generators/generators/javascript/javascript_builder/JavaScriptBlockGenerator.py b/catbuffer-generators/generators/javascript/javascript_builder/JavaScriptBlockGenerator.py new file mode 100644 index 00000000..d73781c9 --- /dev/null +++ b/catbuffer-generators/generators/javascript/javascript_builder/JavaScriptBlockGenerator.py @@ -0,0 +1,47 @@ +from enum import Enum + +from .JavaScriptGeneratorUtils import indent + + +class BlockType(Enum): + NONE = 0 + # pylint: disable=invalid-name + IF = 1 + ELSE = 2 + ELIF = 3 + FOR = 4 + + +class JavaScriptBlockGenerator: + def __init__(self): + self.type = BlockType.NONE + self.rule = '' + self.instructions = [] + self.iterator = None + + def wrap(self, block_type, rule, iterator=None): + self.type = block_type + self.rule = rule + self.iterator = iterator + + def add_instructions(self, instructions): + self.instructions += instructions + + def add_block(self, block): + self.add_instructions(block.get_instructions()) + + def get_instructions(self): + if self.type is not BlockType.NONE: + if self.type is BlockType.IF: + return ['if ({0}) {{'.format(self.rule)] + indent(self.instructions) + ['}'] + if self.type is BlockType.ELIF: + return ['else if ({0}) {{'.format(self.rule)] + indent(self.instructions) + ['}'] + if self.type is BlockType.ELSE: + return ['else {'] + indent(self.instructions) + ['}'] + if self.type is BlockType.FOR: + return [ + 'const {0}'.format(self.iterator), + 'for ({0} = 0; {0} {1}; {0}++) {{'.format(self.iterator, self.rule) + ] + indent(self.instructions) + ['}'] + + return self.instructions diff --git a/catbuffer-generators/generators/javascript/javascript_builder/JavaScriptClassGenerator.py b/catbuffer-generators/generators/javascript/javascript_builder/JavaScriptClassGenerator.py new file mode 100644 index 00000000..133cce5a --- /dev/null +++ b/catbuffer-generators/generators/javascript/javascript_builder/JavaScriptClassGenerator.py @@ -0,0 +1,56 @@ +from .JavaScriptFunctionGenerator import FunctionType, JavaScriptFunctionGenerator +from .JavaScriptGeneratorUtils import indent + + +class JavaScriptClassGenerator: + @staticmethod + def get_generated_class_name(name): + return '{}Buffer'.format(name) + + @staticmethod + def get_generated_getter_name(attribute): + return 'get{}'.format(attribute.capitalize()) + + @staticmethod + def get_generated_setter_name(attribute): + return 'set{}'.format(attribute.capitalize()) + + def __init__(self, name): + self.class_name = JavaScriptClassGenerator.get_generated_class_name(name) + self.class_header = ['class {} {{'.format(self.class_name)] + self.functions = [] + + def add_constructor(self, initial_values, params): + new_constructor = JavaScriptFunctionGenerator(FunctionType.CONSTRUCTOR) + new_constructor.set_params(params) + + for attribute, value in initial_values.items(): + new_constructor.add_instructions(['this.{} = {}'.format(attribute, value)]) + + self.functions.append(new_constructor) + + def _add_getter(self, attribute): + new_getter = JavaScriptFunctionGenerator() + new_getter.set_name(JavaScriptClassGenerator.get_generated_getter_name(attribute)) + new_getter.add_instructions(['return this.{0}'.format(attribute)]) + self.add_function(new_getter) + + def _add_setter(self, attribute): + new_setter = JavaScriptFunctionGenerator() + new_setter.set_name(JavaScriptClassGenerator.get_generated_setter_name(attribute)) + new_setter.set_params([attribute]) + new_setter.add_instructions(['this.{0} = {0}'.format(attribute)]) + self.add_function(new_setter) + + def add_getter_setter(self, attribute): + self._add_getter(attribute) + self._add_setter(attribute) + + def add_function(self, function): + self.functions.append(function) + + def get_instructions(self): + functions = [] + for function in self.functions: + functions += function.get_instructions() + return self.class_header + indent(functions) + ['}'] diff --git a/catbuffer-generators/generators/javascript/javascript_builder/JavaScriptFunctionGenerator.py b/catbuffer-generators/generators/javascript/javascript_builder/JavaScriptFunctionGenerator.py new file mode 100644 index 00000000..17c63a87 --- /dev/null +++ b/catbuffer-generators/generators/javascript/javascript_builder/JavaScriptFunctionGenerator.py @@ -0,0 +1,42 @@ +from enum import Enum + +from .JavaScriptGeneratorUtils import indent + + +class FunctionType(Enum): + FUNCTION = 0 + ARROW_FUNCTION = 1 + CONSTRUCTOR = 2 + STATIC = 3 + + +class JavaScriptFunctionGenerator: + def __init__(self, function_type=FunctionType.FUNCTION): + self.type = function_type + self.name = None + self.params = [] + self.instructions = [] + + def set_name(self, name): + self.name = name + + def set_params(self, params): + self.params = params + + def _get_header(self): + if self.type is FunctionType.ARROW_FUNCTION: + return ['{} = ({}) => {{'.format(self.name, ', '.join(self.params))] + if self.type is FunctionType.STATIC: + return ['static {}({}) {{'.format(self.name, ', '.join(self.params))] + if self.type is FunctionType.CONSTRUCTOR: + return ['constructor({}) {{'.format(', '.join(self.params))] + return ['{}({}) {{'.format(self.name, ', '.join(self.params))] + + def add_instructions(self, instructions): + self.instructions += instructions + + def add_block(self, block): + self.instructions += block.get_instructions() + + def get_instructions(self): + return self._get_header() + indent(self.instructions) + ['}'] diff --git a/catbuffer-generators/generators/javascript/javascript_builder/JavaScriptGeneratorUtils.py b/catbuffer-generators/generators/javascript/javascript_builder/JavaScriptGeneratorUtils.py new file mode 100644 index 00000000..e6e3e1fc --- /dev/null +++ b/catbuffer-generators/generators/javascript/javascript_builder/JavaScriptGeneratorUtils.py @@ -0,0 +1,2 @@ +def indent(instructions, n_indents=1): + return [' ' * 4 * n_indents + instruction for instruction in instructions] diff --git a/catbuffer-generators/generators/javascript/package.json b/catbuffer-generators/generators/javascript/package.json new file mode 100644 index 00000000..bf513ce0 --- /dev/null +++ b/catbuffer-generators/generators/javascript/package.json @@ -0,0 +1,21 @@ +{ + "name": "catbuffer-generated", + "version": "0.0.0", + "description": "", + "main": "", + "scripts": {}, + "keywords": [], + "author": "", + "license": "ISC", + "devDependencies": { + "chai": "^4.1.2", + "eslint": "^6.0.1", + "eslint-config-airbnb": "^17.1.1", + "eslint-plugin-import": "^2.18.0", + "eslint-plugin-jsx-a11y": "^6.2.3", + "eslint-plugin-react": "^7.14.2", + "eslint-plugin-sort-requires": "^2.1.0", + "mocha": "^6.1.4" + }, + "dependencies": {} +} diff --git a/catbuffer-generators/generators/javascript/support/JavaScriptUtils.js b/catbuffer-generators/generators/javascript/support/JavaScriptUtils.js new file mode 100644 index 00000000..1346c8c0 --- /dev/null +++ b/catbuffer-generators/generators/javascript/support/JavaScriptUtils.js @@ -0,0 +1,73 @@ +const bufferToUint = buffer => { + const dataView = new DataView(buffer.buffer); + if (1 === buffer.byteLength) + return dataView.getUint8(0, true); + + else if (2 === buffer.byteLength) + return dataView.getUint16(0, true); + + else if (4 === buffer.byteLength) + return dataView.getUint32(0, true); + + throw new Error('Unexpected buffer size'); +}; + +const concatTypedArrays = (array1, array2) => { + const newArray = new Uint8Array(array1.length + array2.length); + newArray.set(array1); + newArray.set(array2, array1.length); + return newArray; +}; + +const fitByteArray = (array, size) => { + if (array.length > size) { + throw new RangeError('Data size larger than allowed'); + } else if (array.length < size) { + const newArray = new Uint8Array(size); + newArray.fill(0); + newArray.set(array, size - array.length); + return newArray; + } + return array; +}; + +class Uint8ArrayConsumableBuffer { + constructor(binary) { + this.offset = 0; + this.binary = binary; + } + getBytes(count) { + if (count + this.offset > this.binary.length) + throw new RangeError(); + + const bytes = this.binary.slice(this.offset, this.offset + count); + this.offset += count; + return bytes; + } +} + +const uintToBuffer = (uintValue, bufferSize) => { + const buffer = new ArrayBuffer(bufferSize); + const dataView = new DataView(buffer); + if (1 === bufferSize) + dataView.setUint8(0, uintValue, true); + + else if (2 === bufferSize) + dataView.setUint16(0, uintValue, true); + + else if (4 === bufferSize) + dataView.setUint32(0, uintValue, true); + + else + throw new Error('Unexpected bufferSize'); + + return new Uint8Array(buffer); +}; + +module.exports = { + bufferToUint, + concatTypedArrays, + fitByteArray, + Uint8ArrayConsumableBuffer, + uintToBuffer +}; diff --git a/catbuffer-generators/generators/javascript/test/.eslintrc b/catbuffer-generators/generators/javascript/test/.eslintrc new file mode 100644 index 00000000..a7e14651 --- /dev/null +++ b/catbuffer-generators/generators/javascript/test/.eslintrc @@ -0,0 +1,7 @@ +--- +env: + mocha: true +rules: + import/no-extraneous-dependencies: + - error + - devDependencies: true diff --git a/catbuffer-generators/generators/javascript/test/support/JavaScriptUtils_spec.js b/catbuffer-generators/generators/javascript/test/support/JavaScriptUtils_spec.js new file mode 100644 index 00000000..15fbb936 --- /dev/null +++ b/catbuffer-generators/generators/javascript/test/support/JavaScriptUtils_spec.js @@ -0,0 +1,223 @@ +const assert = require('assert'); +const JavaScriptUtils = require('../../support/JavaScriptUtils.js'); +const { expect } = require('chai'); + + +describe('bufferToUint function', () => { + it('converts a 1 byte unsigned integer', () => { + // Arrange: + const int8 = 232; + const array8 = new ArrayBuffer(1); + const array8DataView = new DataView(array8); + + // Act: + array8DataView.setUint8(0, int8, true); + const value = JavaScriptUtils.bufferToUint(new Uint8Array(array8)); + array8DataView.setUint8(0, 0, true); + const value2 = JavaScriptUtils.bufferToUint(new Uint8Array(array8)); + + // Assert: + expect(value).to.equal(int8); + expect(value2).to.equal(0); + }); + + it('converts a 2 byte unsigned integer', () => { + // Arrange: + const int16 = 54345; + const array16 = new ArrayBuffer(2); + const array16DataView = new DataView(array16); + + // Act: + array16DataView.setUint16(0, int16, true); + const value = JavaScriptUtils.bufferToUint(new Uint8Array(array16)); + array16DataView.setUint16(0, 0, true); + const value2 = JavaScriptUtils.bufferToUint(new Uint8Array(array16)); + + // Assert: + expect(value).to.equal(int16); + expect(value2).to.equal(0); + }); + + it('converts a 4 byte unsigned integer', () => { + // Arrange: + const int32 = 765435; + const array32 = new ArrayBuffer(4); + const array32DataView = new DataView(array32); + + // Act: + array32DataView.setUint32(0, int32, true); + const value = JavaScriptUtils.bufferToUint(new Uint8Array(array32)); + array32DataView.setUint32(0, 0, true); + const value2 = JavaScriptUtils.bufferToUint(new Uint8Array(array32)); + + // Assert: + expect(value).to.equal(int32); + expect(value2).to.equal(0); + }); + + it('throws an exception when buffer size is unsupported', () => { + // Assert: + [0, 3, 8, 16].forEach(size => expect(() => JavaScriptUtils.bufferToUint(new Uint8Array(size))).to.throw('Unexpected buffer size')); + }); +}); + +describe('concatTypedArrays function', () => { + it('returns an empty array for two empty array inputs', () => { + // Arrange: + const array1 = new Uint8Array([]); + const array2 = new Uint8Array([]); + + // Act: + const result = JavaScriptUtils.concatTypedArrays(array1, array2); + + // Assert: + expect(Array.from(result)).to.deep.equal([]); + }); + + it('returns original array if one of the params is empty', () => { + // Arrange: + const array1 = new Uint8Array([]); + const array2 = new Uint8Array([23, 54]); + + // Act: + const result = JavaScriptUtils.concatTypedArrays(array1, array2); + const result2 = JavaScriptUtils.concatTypedArrays(array2, array1); + + // Assert: + expect(Array.from(result)).to.deep.equal([23, 54]); + expect(Array.from(result2)).to.deep.equal([23, 54]); + }); + + it('returns the ordered concatenation of both params', () => { + // Arrange: + const array1 = new Uint8Array([23, 54]); + const array2 = new Uint8Array([34, 2, 77, 91, 12]); + + // Act: + const result = JavaScriptUtils.concatTypedArrays(array1, array2); + const result2 = JavaScriptUtils.concatTypedArrays(array2, array1); + + // Assert: + expect(Array.from(result)).to.deep.equal([23, 54, 34, 2, 77, 91, 12]); + expect(Array.from(result2)).to.deep.equal([34, 2, 77, 91, 12, 23, 54]); + }); +}); + +describe('fitByteArray function', () => { + it('throws if data provided is larger than the requested output size', () => { + // Assert: + assert.throws(() => JavaScriptUtils.fitByteArray(new Uint8Array([34, 2, 77, 91, 12]), 2), RangeError); + }); + + it('returns the same array if the parameter size is the same as the parameter array length', () => { + // Arrange: + const array1 = new Uint8Array([]); + const array2 = new Uint8Array([34, 2, 77, 91, 12]); + + // Assert: + expect(JavaScriptUtils.fitByteArray(array1, array1.length)).to.deep.equal(array1); + expect(JavaScriptUtils.fitByteArray(array2, array2.length)).to.deep.equal(array2); + }); + + it('returns an array of the provided size initialized with trailing 0s if the input array was smaller', () => { + // Arrange: + const array1 = new Uint8Array([4]); + const array2 = new Uint8Array([34, 2, 77, 91, 12]); + + // Assert: + expect(Array.from(JavaScriptUtils.fitByteArray(array1, 2))).to.deep.equal([0].concat(Array.from(array1))); + expect(Array.from(JavaScriptUtils.fitByteArray(array2, 10))).to.deep.equal([0, 0, 0, 0, 0].concat(Array.from(array2))); + }); +}); + +describe('Uint8ArrayConsumableBuffer class', () => { + it('constructs with buffer offset to 0 and stored array', () => { + // Arrange: + const array = new Uint8Array([34, 2, 77, 91, 12]); + + // Act: + const consumableBuffer = new JavaScriptUtils.Uint8ArrayConsumableBuffer(array); + + // Assert: + expect(consumableBuffer.offset).to.equal(0); + expect(consumableBuffer.binary).to.equal(array); + }); + + it('getBytes method throws if requesting more bytes than available with offset 0', () => { + // Arrange: + const array1 = new Uint8Array([]); + const array2 = new Uint8Array([34, 2, 77, 91, 12]); + const consumableBuffer1 = new JavaScriptUtils.Uint8ArrayConsumableBuffer(array1); + const consumableBuffer2 = new JavaScriptUtils.Uint8ArrayConsumableBuffer(array2); + + // Assert: + expect(() => consumableBuffer1.getBytes(array1.length + 1)).to.throw(); + expect(() => consumableBuffer2.getBytes(array2.length + 1)).to.throw(); + }); + + it('getBytes method throws if requesting more bytes than available when offset is not 0', () => { + // Arrange: + const array = new Uint8Array([34, 2, 77, 91, 12]); + const consumableBuffer = new JavaScriptUtils.Uint8ArrayConsumableBuffer(array); + consumableBuffer.offset = 5; + + // Assert: + expect(() => consumableBuffer.getBytes(1)).to.throw(); + }); + + it('getBytes method returns requested bytes, and increases offset', () => { + // Arrange: + const array = [34, 2, 77, 91, 12]; + const consumableBuffer = new JavaScriptUtils.Uint8ArrayConsumableBuffer(new Uint8Array(array)); + + // Assert: + expect(Array.from(consumableBuffer.getBytes(2))).to.deep.equal([34, 2]); + expect(consumableBuffer.offset).to.equal(2); + + expect(Array.from(consumableBuffer.getBytes(3))).to.deep.equal([77, 91, 12]); + expect(consumableBuffer.offset).to.equal(5); + }); +}); + +describe('uintToBuffer function', () => { + it('converts an integer to a 1 byte buffer', () => { + // Arrange: + const int8 = 233; + + // Act: + const buffer = JavaScriptUtils.uintToBuffer(int8, 1); + + // Assert: + expect(buffer.byteLength).to.equal(1); + expect(Array.from(buffer)).to.deep.equal([233]); + }); + + it('converts an integer to a 2 byte buffer', () => { + // Arrange: + const int16 = 54346; + + // Act: + const buffer = JavaScriptUtils.uintToBuffer(int16, 2); + + // Assert: + expect(buffer.byteLength).to.equal(2); + expect(Array.from(buffer)).to.deep.equal([74, 212]); + }); + + it('converts an integer to a 4 byte buffer', () => { + // Arrange: + const int32 = 765436; + + // Act: + const buffer = JavaScriptUtils.uintToBuffer(int32, 4); + + // Assert: + expect(buffer.byteLength).to.equal(4); + expect(Array.from(buffer)).to.deep.equal([252, 173, 11, 0]); + }); + + it('throws for unexpected sizes', () => { + // Assert: + [0, 3, 8, 16].forEach(size => expect(() => JavaScriptUtils.uintToBuffer(765436, size)).to.throw('Unexpected bufferSize')); + }); +}); diff --git a/catbuffer-generators/generators/javascript/yarn.lock b/catbuffer-generators/generators/javascript/yarn.lock new file mode 100644 index 00000000..532b6093 --- /dev/null +++ b/catbuffer-generators/generators/javascript/yarn.lock @@ -0,0 +1,1786 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@babel/code-frame@^7.0.0": + version "7.5.5" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.5.5.tgz#bc0782f6d69f7b7d49531219699b988f669a8f9d" + integrity sha512-27d4lZoomVyo51VegxI20xZPuSHusqbQag/ztrBC7wegWoQ1nLREPVSKSW8byhTlzTKyNE4ifaTA6lCp7JjpFw== + dependencies: + "@babel/highlight" "^7.0.0" + +"@babel/highlight@^7.0.0": + version "7.5.0" + resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.5.0.tgz#56d11312bd9248fa619591d02472be6e8cb32540" + integrity sha512-7dV4eu9gBxoM0dAnj/BCFDW9LFU0zvTrkq0ugM7pnHEgguOEeOz1so2ZghEdzviYzQEED0r4EAgpsBChKy1TRQ== + dependencies: + chalk "^2.0.0" + esutils "^2.0.2" + js-tokens "^4.0.0" + +"@babel/runtime@^7.4.5": + version "7.5.5" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.5.5.tgz#74fba56d35efbeca444091c7850ccd494fd2f132" + integrity sha512-28QvEGyQyNkB0/m2B4FU7IEZGK2NUrcMtT6BZEFALTguLk+AUT6ofsHtPk5QyjAdUkpMJ+/Em+quwz4HOt30AQ== + dependencies: + regenerator-runtime "^0.13.2" + +acorn-jsx@^5.0.0: + version "5.0.1" + resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.0.1.tgz#32a064fd925429216a09b141102bfdd185fae40e" + integrity sha512-HJ7CfNHrfJLlNTzIEUTj43LNWGkqpRLxm3YjAlcD0ACydk9XynzYsCBHxut+iqt+1aBXkx9UP/w/ZqMr13XIzg== + +acorn@^6.0.7: + version "6.4.1" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-6.4.1.tgz#531e58ba3f51b9dacb9a6646ca4debf5b14ca474" + integrity sha512-ZVA9k326Nwrj3Cj9jlh3wGFutC2ZornPNARZwsNYqQYgN0EsV2d53w5RN/co65Ohn4sUAUtb1rSUAOD6XN9idA== + +ajv@^6.10.0, ajv@^6.10.2: + version "6.12.6" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" + integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== + dependencies: + fast-deep-equal "^3.1.1" + fast-json-stable-stringify "^2.0.0" + json-schema-traverse "^0.4.1" + uri-js "^4.2.2" + +ansi-colors@3.2.3: + version "3.2.3" + resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-3.2.3.tgz#57d35b8686e851e2cc04c403f1c00203976a1813" + integrity sha512-LEHHyuhlPY3TmuUYMh2oz89lTShfvgbmzaBcxve9t/9Wuy7Dwf4yoAKcND7KFT1HAQfqZ12qtc+DUrBMeKF9nw== + +ansi-escapes@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-3.2.0.tgz#8780b98ff9dbf5638152d1f1fe5c1d7b4442976b" + integrity sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ== + +ansi-regex@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" + integrity sha1-w7M6te42DYbg5ijwRorn7yfWVN8= + +ansi-regex@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998" + integrity sha1-7QMXwyIGT3lGbAKWa922Bas32Zg= + +ansi-regex@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-4.1.0.tgz#8b9f8f08cf1acb843756a839ca8c7e3168c51997" + integrity sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg== + +ansi-styles@^3.2.0, ansi-styles@^3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" + integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== + dependencies: + color-convert "^1.9.0" + +argparse@^1.0.7: + version "1.0.10" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" + integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== + dependencies: + sprintf-js "~1.0.2" + +aria-query@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/aria-query/-/aria-query-3.0.0.tgz#65b3fcc1ca1155a8c9ae64d6eee297f15d5133cc" + integrity sha1-ZbP8wcoRVajJrmTW7uKX8V1RM8w= + dependencies: + ast-types-flow "0.0.7" + commander "^2.11.0" + +array-includes@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/array-includes/-/array-includes-3.0.3.tgz#184b48f62d92d7452bb31b323165c7f8bd02266d" + integrity sha1-GEtI9i2S10UrsxsyMWXH+L0CJm0= + dependencies: + define-properties "^1.1.2" + es-abstract "^1.7.0" + +assertion-error@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/assertion-error/-/assertion-error-1.1.0.tgz#e60b6b0e8f301bd97e5375215bda406c85118c0b" + integrity sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw== + +ast-types-flow@0.0.7, ast-types-flow@^0.0.7: + version "0.0.7" + resolved "https://registry.yarnpkg.com/ast-types-flow/-/ast-types-flow-0.0.7.tgz#f70b735c6bca1a5c9c22d982c3e39e7feba3bdad" + integrity sha1-9wtzXGvKGlycItmCw+Oef+ujva0= + +astral-regex@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/astral-regex/-/astral-regex-1.0.0.tgz#6c8c3fb827dd43ee3918f27b82782ab7658a6fd9" + integrity sha512-+Ryf6g3BKoRc7jfp7ad8tM4TtMiaWvbF/1/sQcZPkkS7ag3D5nMBCe2UfOTONtAkaG0tO0ij3C5Lwmf1EiyjHg== + +axobject-query@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/axobject-query/-/axobject-query-2.0.2.tgz#ea187abe5b9002b377f925d8bf7d1c561adf38f9" + integrity sha512-MCeek8ZH7hKyO1rWUbKNQBbl4l2eY0ntk7OGi+q0RlafrCnfPxC06WZA+uebCfmYp4mNU9jRBP1AhGyf8+W3ww== + dependencies: + ast-types-flow "0.0.7" + +balanced-match@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" + integrity sha1-ibTRmasr7kneFk6gK4nORi1xt2c= + +brace-expansion@^1.1.7: + version "1.1.11" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" + integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== + dependencies: + balanced-match "^1.0.0" + concat-map "0.0.1" + +browser-stdout@1.3.1: + version "1.3.1" + resolved "https://registry.yarnpkg.com/browser-stdout/-/browser-stdout-1.3.1.tgz#baa559ee14ced73452229bad7326467c61fabd60" + integrity sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw== + +callsites@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" + integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== + +camelcase@^5.0.0: + version "5.3.1" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" + integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== + +chai@^4.1.2: + version "4.2.0" + resolved "https://registry.yarnpkg.com/chai/-/chai-4.2.0.tgz#760aa72cf20e3795e84b12877ce0e83737aa29e5" + integrity sha512-XQU3bhBukrOsQCuwZndwGcCVQHyZi53fQ6Ys1Fym7E4olpIqqZZhhoFJoaKVvV17lWQoXYwgWN2nF5crA8J2jw== + dependencies: + assertion-error "^1.1.0" + check-error "^1.0.2" + deep-eql "^3.0.1" + get-func-name "^2.0.0" + pathval "^1.1.0" + type-detect "^4.0.5" + +chalk@^2.0.0, chalk@^2.0.1, chalk@^2.1.0, chalk@^2.4.2: + version "2.4.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" + integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== + dependencies: + ansi-styles "^3.2.1" + escape-string-regexp "^1.0.5" + supports-color "^5.3.0" + +chardet@^0.7.0: + version "0.7.0" + resolved "https://registry.yarnpkg.com/chardet/-/chardet-0.7.0.tgz#90094849f0937f2eedc2425d0d28a9e5f0cbad9e" + integrity sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA== + +check-error@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/check-error/-/check-error-1.0.2.tgz#574d312edd88bb5dd8912e9286dd6c0aed4aac82" + integrity sha1-V00xLt2Iu13YkS6Sht1sCu1KrII= + +cli-cursor@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-2.1.0.tgz#b35dac376479facc3e94747d41d0d0f5238ffcb5" + integrity sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU= + dependencies: + restore-cursor "^2.0.0" + +cli-width@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-2.2.0.tgz#ff19ede8a9a5e579324147b0c11f0fbcbabed639" + integrity sha1-/xnt6Kml5XkyQUewwR8PvLq+1jk= + +cliui@^4.0.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-4.1.0.tgz#348422dbe82d800b3022eef4f6ac10bf2e4d1b49" + integrity sha512-4FG+RSG9DL7uEwRUZXZn3SS34DiDPfzP0VOiEwtUWlE+AR2EIg+hSyvrIgUUfhdgR/UkAeW2QHgeP+hWrXs7jQ== + dependencies: + string-width "^2.1.1" + strip-ansi "^4.0.0" + wrap-ansi "^2.0.0" + +code-point-at@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77" + integrity sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c= + +color-convert@^1.9.0: + version "1.9.3" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" + integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== + dependencies: + color-name "1.1.3" + +color-name@1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" + integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU= + +commander@^2.11.0: + version "2.20.0" + resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.0.tgz#d58bb2b5c1ee8f87b0d340027e9e94e222c5a422" + integrity sha512-7j2y+40w61zy6YC2iRNpUe/NwhNyoXrYpHMrSunaMG64nRnaf96zO/KMQR4OyN/UnE5KLyEBnKHd4aG3rskjpQ== + +concat-map@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= + +confusing-browser-globals@^1.0.5: + version "1.0.7" + resolved "https://registry.yarnpkg.com/confusing-browser-globals/-/confusing-browser-globals-1.0.7.tgz#5ae852bd541a910e7ffb2dbb864a2d21a36ad29b" + integrity sha512-cgHI1azax5ATrZ8rJ+ODDML9Fvu67PimB6aNxBrc/QwSaDaM9eTfIEUHx3bBLJJ82ioSb+/5zfsMCCEJax3ByQ== + +contains-path@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/contains-path/-/contains-path-0.1.0.tgz#fe8cf184ff6670b6baef01a9d4861a5cbec4120a" + integrity sha1-/ozxhP9mcLa67wGp1IYaXL7EEgo= + +cross-spawn@^6.0.0, cross-spawn@^6.0.5: + version "6.0.5" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4" + integrity sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ== + dependencies: + nice-try "^1.0.4" + path-key "^2.0.1" + semver "^5.5.0" + shebang-command "^1.2.0" + which "^1.2.9" + +damerau-levenshtein@^1.0.4: + version "1.0.5" + resolved "https://registry.yarnpkg.com/damerau-levenshtein/-/damerau-levenshtein-1.0.5.tgz#780cf7144eb2e8dbd1c3bb83ae31100ccc31a414" + integrity sha512-CBCRqFnpu715iPmw1KrdOrzRqbdFwQTwAWyyyYS42+iAgHCuXZ+/TdMgQkUENPomxEz9z1BEzuQU2Xw0kUuAgA== + +debug@3.2.6: + version "3.2.6" + resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.6.tgz#e83d17de16d8a7efb7717edbe5fb10135eee629b" + integrity sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ== + dependencies: + ms "^2.1.1" + +debug@^2.6.8, debug@^2.6.9: + version "2.6.9" + resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" + integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== + dependencies: + ms "2.0.0" + +debug@^4.0.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.1.1.tgz#3b72260255109c6b589cee050f1d516139664791" + integrity sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw== + dependencies: + ms "^2.1.1" + +decamelize@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" + integrity sha1-9lNNFRSCabIDUue+4m9QH5oZEpA= + +deep-eql@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/deep-eql/-/deep-eql-3.0.1.tgz#dfc9404400ad1c8fe023e7da1df1c147c4b444df" + integrity sha512-+QeIQyN5ZuO+3Uk5DYh6/1eKO0m0YmJFGNmFHGACpf1ClL1nmlV/p4gNgbl2pJGxgXb4faqo6UE+M5ACEMyVcw== + dependencies: + type-detect "^4.0.0" + +deep-is@~0.1.3: + version "0.1.3" + resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34" + integrity sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ= + +define-properties@^1.1.2, define-properties@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.3.tgz#cf88da6cbee26fe6db7094f61d870cbd84cee9f1" + integrity sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ== + dependencies: + object-keys "^1.0.12" + +diff@3.5.0: + version "3.5.0" + resolved "https://registry.yarnpkg.com/diff/-/diff-3.5.0.tgz#800c0dd1e0a8bfbc95835c202ad220fe317e5a12" + integrity sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA== + +doctrine@1.5.0: + version "1.5.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-1.5.0.tgz#379dce730f6166f76cefa4e6707a159b02c5a6fa" + integrity sha1-N53Ocw9hZvds76TmcHoVmwLFpvo= + dependencies: + esutils "^2.0.2" + isarray "^1.0.0" + +doctrine@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d" + integrity sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw== + dependencies: + esutils "^2.0.2" + +doctrine@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961" + integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w== + dependencies: + esutils "^2.0.2" + +emoji-regex@^7.0.1, emoji-regex@^7.0.2: + version "7.0.3" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-7.0.3.tgz#933a04052860c85e83c122479c4748a8e4c72156" + integrity sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA== + +end-of-stream@^1.1.0: + version "1.4.1" + resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.1.tgz#ed29634d19baba463b6ce6b80a37213eab71ec43" + integrity sha512-1MkrZNvWTKCaigbn+W15elq2BB/L22nqrSY5DKlo3X6+vclJm8Bb5djXJBmEX6fS3+zCh/F4VBK5Z2KxJt4s2Q== + dependencies: + once "^1.4.0" + +error-ex@^1.2.0: + version "1.3.2" + resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" + integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== + dependencies: + is-arrayish "^0.2.1" + +es-abstract@^1.11.0, es-abstract@^1.12.0, es-abstract@^1.5.1, es-abstract@^1.7.0: + version "1.13.0" + resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.13.0.tgz#ac86145fdd5099d8dd49558ccba2eaf9b88e24e9" + integrity sha512-vDZfg/ykNxQVwup/8E1BZhVzFfBxs9NqMzGcvIJrqg5k2/5Za2bWo40dK2J1pgLngZ7c+Shh8lwYtLGyrwPutg== + dependencies: + es-to-primitive "^1.2.0" + function-bind "^1.1.1" + has "^1.0.3" + is-callable "^1.1.4" + is-regex "^1.0.4" + object-keys "^1.0.12" + +es-to-primitive@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.0.tgz#edf72478033456e8dda8ef09e00ad9650707f377" + integrity sha512-qZryBOJjV//LaxLTV6UC//WewneB3LcXOL9NP++ozKVXsIIIpm/2c13UDiD9Jp2eThsecw9m3jPqDwTyobcdbg== + dependencies: + is-callable "^1.1.4" + is-date-object "^1.0.1" + is-symbol "^1.0.2" + +escape-string-regexp@1.0.5, escape-string-regexp@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" + integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= + +eslint-config-airbnb-base@^13.2.0: + version "13.2.0" + resolved "https://registry.yarnpkg.com/eslint-config-airbnb-base/-/eslint-config-airbnb-base-13.2.0.tgz#f6ea81459ff4dec2dda200c35f1d8f7419d57943" + integrity sha512-1mg/7eoB4AUeB0X1c/ho4vb2gYkNH8Trr/EgCT/aGmKhhG+F6vF5s8+iRBlWAzFIAphxIdp3YfEKgEl0f9Xg+w== + dependencies: + confusing-browser-globals "^1.0.5" + object.assign "^4.1.0" + object.entries "^1.1.0" + +eslint-config-airbnb@^17.1.1: + version "17.1.1" + resolved "https://registry.yarnpkg.com/eslint-config-airbnb/-/eslint-config-airbnb-17.1.1.tgz#2272e0b86bb1e2b138cdf88d07a3b6f4cda3d626" + integrity sha512-xCu//8a/aWqagKljt+1/qAM62BYZeNq04HmdevG5yUGWpja0I/xhqd6GdLRch5oetEGFiJAnvtGuTEAese53Qg== + dependencies: + eslint-config-airbnb-base "^13.2.0" + object.assign "^4.1.0" + object.entries "^1.1.0" + +eslint-import-resolver-node@^0.3.2: + version "0.3.2" + resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.2.tgz#58f15fb839b8d0576ca980413476aab2472db66a" + integrity sha512-sfmTqJfPSizWu4aymbPr4Iidp5yKm8yDkHp+Ir3YiTHiiDfxh69mOUsmiqW6RZ9zRXFaF64GtYmN7e+8GHBv6Q== + dependencies: + debug "^2.6.9" + resolve "^1.5.0" + +eslint-module-utils@^2.4.0: + version "2.4.0" + resolved "https://registry.yarnpkg.com/eslint-module-utils/-/eslint-module-utils-2.4.0.tgz#8b93499e9b00eab80ccb6614e69f03678e84e09a" + integrity sha512-14tltLm38Eu3zS+mt0KvILC3q8jyIAH518MlG+HO0p+yK885Lb1UHTY/UgR91eOyGdmxAPb+OLoW4znqIT6Ndw== + dependencies: + debug "^2.6.8" + pkg-dir "^2.0.0" + +eslint-plugin-import@^2.18.0: + version "2.18.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.18.0.tgz#7a5ba8d32622fb35eb9c8db195c2090bd18a3678" + integrity sha512-PZpAEC4gj/6DEMMoU2Df01C5c50r7zdGIN52Yfi7CvvWaYssG7Jt5R9nFG5gmqodxNOz9vQS87xk6Izdtpdrig== + dependencies: + array-includes "^3.0.3" + contains-path "^0.1.0" + debug "^2.6.9" + doctrine "1.5.0" + eslint-import-resolver-node "^0.3.2" + eslint-module-utils "^2.4.0" + has "^1.0.3" + lodash "^4.17.11" + minimatch "^3.0.4" + read-pkg-up "^2.0.0" + resolve "^1.11.0" + +eslint-plugin-jsx-a11y@^6.2.3: + version "6.2.3" + resolved "https://registry.yarnpkg.com/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.2.3.tgz#b872a09d5de51af70a97db1eea7dc933043708aa" + integrity sha512-CawzfGt9w83tyuVekn0GDPU9ytYtxyxyFZ3aSWROmnRRFQFT2BiPJd7jvRdzNDi6oLWaS2asMeYSNMjWTV4eNg== + dependencies: + "@babel/runtime" "^7.4.5" + aria-query "^3.0.0" + array-includes "^3.0.3" + ast-types-flow "^0.0.7" + axobject-query "^2.0.2" + damerau-levenshtein "^1.0.4" + emoji-regex "^7.0.2" + has "^1.0.3" + jsx-ast-utils "^2.2.1" + +eslint-plugin-react@^7.14.2: + version "7.14.2" + resolved "https://registry.yarnpkg.com/eslint-plugin-react/-/eslint-plugin-react-7.14.2.tgz#94c193cc77a899ac0ecbb2766fbef88685b7ecc1" + integrity sha512-jZdnKe3ip7FQOdjxks9XPN0pjUKZYq48OggNMd16Sk+8VXx6JOvXmlElxROCgp7tiUsTsze3jd78s/9AFJP2mA== + dependencies: + array-includes "^3.0.3" + doctrine "^2.1.0" + has "^1.0.3" + jsx-ast-utils "^2.1.0" + object.entries "^1.1.0" + object.fromentries "^2.0.0" + object.values "^1.1.0" + prop-types "^15.7.2" + resolve "^1.10.1" + +eslint-plugin-sort-requires@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-sort-requires/-/eslint-plugin-sort-requires-2.1.0.tgz#3efad948dc83798219e809f54067c40e55444861" + integrity sha1-PvrZSNyDeYIZ6An1QGfEDlVESGE= + +eslint-scope@^4.0.3: + version "4.0.3" + resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-4.0.3.tgz#ca03833310f6889a3264781aa82e63eb9cfe7848" + integrity sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg== + dependencies: + esrecurse "^4.1.0" + estraverse "^4.1.1" + +eslint-utils@^1.3.1: + version "1.4.0" + resolved "https://registry.yarnpkg.com/eslint-utils/-/eslint-utils-1.4.0.tgz#e2c3c8dba768425f897cf0f9e51fe2e241485d4c" + integrity sha512-7ehnzPaP5IIEh1r1tkjuIrxqhNkzUJa9z3R92tLJdZIVdWaczEhr3EbhGtsMrVxi1KeR8qA7Off6SWc5WNQqyQ== + dependencies: + eslint-visitor-keys "^1.0.0" + +eslint-visitor-keys@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-1.0.0.tgz#3f3180fb2e291017716acb4c9d6d5b5c34a6a81d" + integrity sha512-qzm/XxIbxm/FHyH341ZrbnMUpe+5Bocte9xkmFMzPMjRaZMcXww+MpBptFvtU+79L362nqiLhekCxCxDPaUMBQ== + +eslint@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/eslint/-/eslint-6.0.1.tgz#4a32181d72cb999d6f54151df7d337131f81cda7" + integrity sha512-DyQRaMmORQ+JsWShYsSg4OPTjY56u1nCjAmICrE8vLWqyLKxhFXOthwMj1SA8xwfrv0CofLNVnqbfyhwCkaO0w== + dependencies: + "@babel/code-frame" "^7.0.0" + ajv "^6.10.0" + chalk "^2.1.0" + cross-spawn "^6.0.5" + debug "^4.0.1" + doctrine "^3.0.0" + eslint-scope "^4.0.3" + eslint-utils "^1.3.1" + eslint-visitor-keys "^1.0.0" + espree "^6.0.0" + esquery "^1.0.1" + esutils "^2.0.2" + file-entry-cache "^5.0.1" + functional-red-black-tree "^1.0.1" + glob-parent "^3.1.0" + globals "^11.7.0" + ignore "^4.0.6" + import-fresh "^3.0.0" + imurmurhash "^0.1.4" + inquirer "^6.2.2" + is-glob "^4.0.0" + js-yaml "^3.13.1" + json-stable-stringify-without-jsonify "^1.0.1" + levn "^0.3.0" + lodash "^4.17.11" + minimatch "^3.0.4" + mkdirp "^0.5.1" + natural-compare "^1.4.0" + optionator "^0.8.2" + progress "^2.0.0" + regexpp "^2.0.1" + semver "^5.5.1" + strip-ansi "^4.0.0" + strip-json-comments "^2.0.1" + table "^5.2.3" + text-table "^0.2.0" + +espree@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/espree/-/espree-6.0.0.tgz#716fc1f5a245ef5b9a7fdb1d7b0d3f02322e75f6" + integrity sha512-lJvCS6YbCn3ImT3yKkPe0+tJ+mH6ljhGNjHQH9mRtiO6gjhVAOhVXW1yjnwqGwTkK3bGbye+hb00nFNmu0l/1Q== + dependencies: + acorn "^6.0.7" + acorn-jsx "^5.0.0" + eslint-visitor-keys "^1.0.0" + +esprima@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" + integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== + +esquery@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.0.1.tgz#406c51658b1f5991a5f9b62b1dc25b00e3e5c708" + integrity sha512-SmiyZ5zIWH9VM+SRUReLS5Q8a7GxtRdxEBVZpm98rJM7Sb+A9DVCndXfkeFUd3byderg+EbDkfnevfCwynWaNA== + dependencies: + estraverse "^4.0.0" + +esrecurse@^4.1.0: + version "4.2.1" + resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.2.1.tgz#007a3b9fdbc2b3bb87e4879ea19c92fdbd3942cf" + integrity sha512-64RBB++fIOAXPw3P9cy89qfMlvZEXZkqqJkjqqXIvzP5ezRZjW+lPWjw35UX/3EhUPFYbg5ER4JYgDw4007/DQ== + dependencies: + estraverse "^4.1.0" + +estraverse@^4.0.0, estraverse@^4.1.0, estraverse@^4.1.1: + version "4.2.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.2.0.tgz#0dee3fed31fcd469618ce7342099fc1afa0bdb13" + integrity sha1-De4/7TH81GlhjOc0IJn8GvoL2xM= + +esutils@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.2.tgz#0abf4f1caa5bcb1f7a9d8acc6dea4faaa04bac9b" + integrity sha1-Cr9PHKpbyx96nYrMbepPqqBLrJs= + +execa@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/execa/-/execa-1.0.0.tgz#c6236a5bb4df6d6f15e88e7f017798216749ddd8" + integrity sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA== + dependencies: + cross-spawn "^6.0.0" + get-stream "^4.0.0" + is-stream "^1.1.0" + npm-run-path "^2.0.0" + p-finally "^1.0.0" + signal-exit "^3.0.0" + strip-eof "^1.0.0" + +external-editor@^3.0.3: + version "3.1.0" + resolved "https://registry.yarnpkg.com/external-editor/-/external-editor-3.1.0.tgz#cb03f740befae03ea4d283caed2741a83f335495" + integrity sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew== + dependencies: + chardet "^0.7.0" + iconv-lite "^0.4.24" + tmp "^0.0.33" + +fast-deep-equal@^3.1.1: + version "3.1.3" + resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" + integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== + +fast-json-stable-stringify@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" + integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== + +fast-levenshtein@~2.0.4: + version "2.0.6" + resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" + integrity sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc= + +figures@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/figures/-/figures-2.0.0.tgz#3ab1a2d2a62c8bfb431a0c94cb797a2fce27c962" + integrity sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI= + dependencies: + escape-string-regexp "^1.0.5" + +file-entry-cache@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-5.0.1.tgz#ca0f6efa6dd3d561333fb14515065c2fafdf439c" + integrity sha512-bCg29ictuBaKUwwArK4ouCaqDgLZcysCFLmM/Yn/FDoqndh/9vNuQfXRDvTuXKLxfD/JtZQGKFT8MGcJBK644g== + dependencies: + flat-cache "^2.0.1" + +find-up@3.0.0, find-up@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73" + integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg== + dependencies: + locate-path "^3.0.0" + +find-up@^2.0.0, find-up@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-2.1.0.tgz#45d1b7e506c717ddd482775a2b77920a3c0c57a7" + integrity sha1-RdG35QbHF93UgndaK3eSCjwMV6c= + dependencies: + locate-path "^2.0.0" + +flat-cache@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-2.0.1.tgz#5d296d6f04bda44a4630a301413bdbc2ec085ec0" + integrity sha512-LoQe6yDuUMDzQAEH8sgmh4Md6oZnc/7PjtwjNFSzveXqSHt6ka9fPBuso7IGf9Rz4uqnSnWiFH2B/zj24a5ReA== + dependencies: + flatted "^2.0.0" + rimraf "2.6.3" + write "1.0.3" + +flat@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/flat/-/flat-4.1.0.tgz#090bec8b05e39cba309747f1d588f04dbaf98db2" + integrity sha512-Px/TiLIznH7gEDlPXcUD4KnBusa6kR6ayRUVcnEAbreRIuhkqow/mun59BuRXwoYk7ZQOLW1ZM05ilIvK38hFw== + dependencies: + is-buffer "~2.0.3" + +flatted@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/flatted/-/flatted-2.0.1.tgz#69e57caa8f0eacbc281d2e2cb458d46fdb449e08" + integrity sha512-a1hQMktqW9Nmqr5aktAux3JMNqaucxGcjtjWnZLHX7yyPCmlSV3M54nGYbqT8K+0GhF3NBgmJCc3ma+WOgX8Jg== + +fs.realpath@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" + integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= + +function-bind@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" + integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== + +functional-red-black-tree@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327" + integrity sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc= + +get-caller-file@^1.0.1: + version "1.0.3" + resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-1.0.3.tgz#f978fa4c90d1dfe7ff2d6beda2a515e713bdcf4a" + integrity sha512-3t6rVToeoZfYSGd8YoLFR2DJkiQrIiUrGcjvFX2mDw3bn6k2OtwHN0TNCLbBO+w8qTvimhDkv+LSscbJY1vE6w== + +get-caller-file@^2.0.1: + version "2.0.5" + resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" + integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== + +get-func-name@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/get-func-name/-/get-func-name-2.0.0.tgz#ead774abee72e20409433a066366023dd6887a41" + integrity sha1-6td0q+5y4gQJQzoGY2YCPdaIekE= + +get-stream@^4.0.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5" + integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w== + dependencies: + pump "^3.0.0" + +glob-parent@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-3.1.0.tgz#9e6af6299d8d3bd2bd40430832bd113df906c5ae" + integrity sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4= + dependencies: + is-glob "^3.1.0" + path-dirname "^1.0.0" + +glob@7.1.3: + version "7.1.3" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.3.tgz#3960832d3f1574108342dafd3a67b332c0969df1" + integrity sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.0.4" + once "^1.3.0" + path-is-absolute "^1.0.0" + +glob@^7.1.3: + version "7.1.4" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.4.tgz#aa608a2f6c577ad357e1ae5a5c26d9a8d1969255" + integrity sha512-hkLPepehmnKk41pUGm3sYxoFs/umurYfYJCerbXEyFIWcAzvpipAgVkBqqT9RBKMGjnq6kMuyYwha6csxbiM1A== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.0.4" + once "^1.3.0" + path-is-absolute "^1.0.0" + +globals@^11.7.0: + version "11.12.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" + integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== + +graceful-fs@^4.1.2: + version "4.2.0" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.0.tgz#8d8fdc73977cb04104721cb53666c1ca64cd328b" + integrity sha512-jpSvDPV4Cq/bgtpndIWbI5hmYxhQGHPC4d4cqBPb4DLniCfhJokdXhwhaDuLBGLQdvvRum/UiX6ECVIPvDXqdg== + +growl@1.10.5: + version "1.10.5" + resolved "https://registry.yarnpkg.com/growl/-/growl-1.10.5.tgz#f2735dc2283674fa67478b10181059355c369e5e" + integrity sha512-qBr4OuELkhPenW6goKVXiv47US3clb3/IbuWF9KNKEijAy9oeHxU9IgzjvJhHkUzhaj7rOUD7+YGWqUjLp5oSA== + +has-flag@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" + integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0= + +has-symbols@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.0.tgz#ba1a8f1af2a0fc39650f5c850367704122063b44" + integrity sha1-uhqPGvKg/DllD1yFA2dwQSIGO0Q= + +has@^1.0.1, has@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" + integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== + dependencies: + function-bind "^1.1.1" + +he@1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f" + integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw== + +hosted-git-info@^2.1.4: + version "2.7.1" + resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.7.1.tgz#97f236977bd6e125408930ff6de3eec6281ec047" + integrity sha512-7T/BxH19zbcCTa8XkMlbK5lTo1WtgkFi3GvdWEyNuc4Vex7/9Dqbnpsf4JMydcfj9HCg4zUWFTL3Za6lapg5/w== + +iconv-lite@^0.4.24: + version "0.4.24" + resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" + integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== + dependencies: + safer-buffer ">= 2.1.2 < 3" + +ignore@^4.0.6: + version "4.0.6" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-4.0.6.tgz#750e3db5862087b4737ebac8207ffd1ef27b25fc" + integrity sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg== + +import-fresh@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.1.0.tgz#6d33fa1dcef6df930fae003446f33415af905118" + integrity sha512-PpuksHKGt8rXfWEr9m9EHIpgyyaltBy8+eF6GJM0QCAxMgxCfucMF3mjecK2QsJr0amJW7gTqh5/wht0z2UhEQ== + dependencies: + parent-module "^1.0.0" + resolve-from "^4.0.0" + +imurmurhash@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" + integrity sha1-khi5srkoojixPcT7a21XbyMUU+o= + +inflight@^1.0.4: + version "1.0.6" + resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk= + dependencies: + once "^1.3.0" + wrappy "1" + +inherits@2: + version "2.0.4" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== + +inquirer@^6.2.2: + version "6.5.0" + resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-6.5.0.tgz#2303317efc9a4ea7ec2e2df6f86569b734accf42" + integrity sha512-scfHejeG/lVZSpvCXpsB4j/wQNPM5JC8kiElOI0OUTwmc1RTpXr4H32/HOlQHcZiYl2z2VElwuCVDRG8vFmbnA== + dependencies: + ansi-escapes "^3.2.0" + chalk "^2.4.2" + cli-cursor "^2.1.0" + cli-width "^2.0.0" + external-editor "^3.0.3" + figures "^2.0.0" + lodash "^4.17.12" + mute-stream "0.0.7" + run-async "^2.2.0" + rxjs "^6.4.0" + string-width "^2.1.0" + strip-ansi "^5.1.0" + through "^2.3.6" + +invert-kv@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/invert-kv/-/invert-kv-2.0.0.tgz#7393f5afa59ec9ff5f67a27620d11c226e3eec02" + integrity sha512-wPVv/y/QQ/Uiirj/vh3oP+1Ww+AWehmi1g5fFWGPF6IpCBCDVrhgHRMvrLfdYcwDh3QJbGXDW4JAuzxElLSqKA== + +is-arrayish@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" + integrity sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0= + +is-buffer@~2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-2.0.3.tgz#4ecf3fcf749cbd1e472689e109ac66261a25e725" + integrity sha512-U15Q7MXTuZlrbymiz95PJpZxu8IlipAp4dtS3wOdgPXx3mqBnslrWU14kxfHB+Py/+2PVKSr37dMAgM2A4uArw== + +is-callable@^1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.1.4.tgz#1e1adf219e1eeb684d691f9d6a05ff0d30a24d75" + integrity sha512-r5p9sxJjYnArLjObpjA4xu5EKI3CuKHkJXMhT7kwbpUyIFD1n5PMAsoPvWnvtZiNz7LjkYDRZhd7FlI0eMijEA== + +is-date-object@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.1.tgz#9aa20eb6aeebbff77fbd33e74ca01b33581d3a16" + integrity sha1-mqIOtq7rv/d/vTPnTKAbM1gdOhY= + +is-extglob@^2.1.0, is-extglob@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" + integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= + +is-fullwidth-code-point@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb" + integrity sha1-754xOG8DGn8NZDr4L95QxFfvAMs= + dependencies: + number-is-nan "^1.0.0" + +is-fullwidth-code-point@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" + integrity sha1-o7MKXE8ZkYMWeqq5O+764937ZU8= + +is-glob@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-3.1.0.tgz#7ba5ae24217804ac70707b96922567486cc3e84a" + integrity sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo= + dependencies: + is-extglob "^2.1.0" + +is-glob@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc" + integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg== + dependencies: + is-extglob "^2.1.1" + +is-promise@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-2.1.0.tgz#79a2a9ece7f096e80f36d2b2f3bc16c1ff4bf3fa" + integrity sha1-eaKp7OfwlugPNtKy87wWwf9L8/o= + +is-regex@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.0.4.tgz#5517489b547091b0930e095654ced25ee97e9491" + integrity sha1-VRdIm1RwkbCTDglWVM7SXul+lJE= + dependencies: + has "^1.0.1" + +is-stream@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" + integrity sha1-EtSj3U5o4Lec6428hBc66A2RykQ= + +is-symbol@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.2.tgz#a055f6ae57192caee329e7a860118b497a950f38" + integrity sha512-HS8bZ9ox60yCJLH9snBpIwv9pYUAkcuLhSA1oero1UB5y9aiQpRA8y2ex945AOtCZL1lJDeIk3G5LthswI46Lw== + dependencies: + has-symbols "^1.0.0" + +isarray@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" + integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= + +isexe@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" + integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA= + +"js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" + integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== + +js-yaml@3.13.1, js-yaml@^3.13.1: + version "3.13.1" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.13.1.tgz#aff151b30bfdfa8e49e05da22e7415e9dfa37847" + integrity sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw== + dependencies: + argparse "^1.0.7" + esprima "^4.0.0" + +json-schema-traverse@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" + integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== + +json-stable-stringify-without-jsonify@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" + integrity sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE= + +jsx-ast-utils@^2.1.0, jsx-ast-utils@^2.2.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/jsx-ast-utils/-/jsx-ast-utils-2.2.1.tgz#4d4973ebf8b9d2837ee91a8208cc66f3a2776cfb" + integrity sha512-v3FxCcAf20DayI+uxnCuw795+oOIkVu6EnJ1+kSzhqqTZHNkTZ7B66ZgLp4oLJ/gbA64cI0B7WRoHZMSRdyVRQ== + dependencies: + array-includes "^3.0.3" + object.assign "^4.1.0" + +lcid@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/lcid/-/lcid-2.0.0.tgz#6ef5d2df60e52f82eb228a4c373e8d1f397253cf" + integrity sha512-avPEb8P8EGnwXKClwsNUgryVjllcRqtMYa49NTsbQagYuT1DcXnl1915oxWjoyGrXR6zH/Y0Zc96xWsPcoDKeA== + dependencies: + invert-kv "^2.0.0" + +levn@^0.3.0, levn@~0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" + integrity sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4= + dependencies: + prelude-ls "~1.1.2" + type-check "~0.3.2" + +load-json-file@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-2.0.0.tgz#7947e42149af80d696cbf797bcaabcfe1fe29ca8" + integrity sha1-eUfkIUmvgNaWy/eXvKq8/h/inKg= + dependencies: + graceful-fs "^4.1.2" + parse-json "^2.2.0" + pify "^2.0.0" + strip-bom "^3.0.0" + +locate-path@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-2.0.0.tgz#2b568b265eec944c6d9c0de9c3dbbbca0354cd8e" + integrity sha1-K1aLJl7slExtnA3pw9u7ygNUzY4= + dependencies: + p-locate "^2.0.0" + path-exists "^3.0.0" + +locate-path@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e" + integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A== + dependencies: + p-locate "^3.0.0" + path-exists "^3.0.0" + +lodash@^4.17.11, lodash@^4.17.12, lodash@^4.17.14: + version "4.17.19" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.19.tgz#e48ddedbe30b3321783c5b4301fbd353bc1e4a4b" + integrity sha512-JNvd8XER9GQX0v2qJgsaN/mzFCNA5BRe/j8JN9d+tWyGLSodKQHKFicdwNYzWwI3wjRnaKPsGj1XkBjx/F96DQ== + +log-symbols@2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-2.2.0.tgz#5740e1c5d6f0dfda4ad9323b5332107ef6b4c40a" + integrity sha512-VeIAFslyIerEJLXHziedo2basKbMKtTw3vfn5IzG0XTjhAVEJyNHnL2p7vc+wBDSdQuUpNw3M2u6xb9QsAY5Eg== + dependencies: + chalk "^2.0.1" + +loose-envify@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" + integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== + dependencies: + js-tokens "^3.0.0 || ^4.0.0" + +map-age-cleaner@^0.1.1: + version "0.1.3" + resolved "https://registry.yarnpkg.com/map-age-cleaner/-/map-age-cleaner-0.1.3.tgz#7d583a7306434c055fe474b0f45078e6e1b4b92a" + integrity sha512-bJzx6nMoP6PDLPBFmg7+xRKeFZvFboMrGlxmNj9ClvX53KrmvM5bXFXEWjbz4cz1AFn+jWJ9z/DJSz7hrs0w3w== + dependencies: + p-defer "^1.0.0" + +mem@^4.0.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/mem/-/mem-4.3.0.tgz#461af497bc4ae09608cdb2e60eefb69bff744178" + integrity sha512-qX2bG48pTqYRVmDB37rn/6PT7LcR8T7oAX3bf99u1Tt1nzxYfxkgqDwUwolPlXweM0XzBOBFzSx4kfp7KP1s/w== + dependencies: + map-age-cleaner "^0.1.1" + mimic-fn "^2.0.0" + p-is-promise "^2.0.0" + +mimic-fn@^1.0.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-1.2.0.tgz#820c86a39334640e99516928bd03fca88057d022" + integrity sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ== + +mimic-fn@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" + integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== + +minimatch@3.0.4, minimatch@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" + integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== + dependencies: + brace-expansion "^1.1.7" + +minimist@0.0.8: + version "0.0.8" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.8.tgz#857fcabfc3397d2625b8228262e86aa7a011b05d" + integrity sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0= + +mkdirp@0.5.1, mkdirp@^0.5.1: + version "0.5.1" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903" + integrity sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM= + dependencies: + minimist "0.0.8" + +mocha@^6.1.4: + version "6.1.4" + resolved "https://registry.yarnpkg.com/mocha/-/mocha-6.1.4.tgz#e35fada242d5434a7e163d555c705f6875951640" + integrity sha512-PN8CIy4RXsIoxoFJzS4QNnCH4psUCPWc4/rPrst/ecSJJbLBkubMiyGCP2Kj/9YnWbotFqAoeXyXMucj7gwCFg== + dependencies: + ansi-colors "3.2.3" + browser-stdout "1.3.1" + debug "3.2.6" + diff "3.5.0" + escape-string-regexp "1.0.5" + find-up "3.0.0" + glob "7.1.3" + growl "1.10.5" + he "1.2.0" + js-yaml "3.13.1" + log-symbols "2.2.0" + minimatch "3.0.4" + mkdirp "0.5.1" + ms "2.1.1" + node-environment-flags "1.0.5" + object.assign "4.1.0" + strip-json-comments "2.0.1" + supports-color "6.0.0" + which "1.3.1" + wide-align "1.1.3" + yargs "13.2.2" + yargs-parser "13.0.0" + yargs-unparser "1.5.0" + +ms@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" + integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g= + +ms@2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.1.tgz#30a5864eb3ebb0a66f2ebe6d727af06a09d86e0a" + integrity sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg== + +ms@^2.1.1: + version "2.1.2" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" + integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== + +mute-stream@0.0.7: + version "0.0.7" + resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.7.tgz#3075ce93bc21b8fab43e1bc4da7e8115ed1e7bab" + integrity sha1-MHXOk7whuPq0PhvE2n6BFe0ee6s= + +natural-compare@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" + integrity sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc= + +nice-try@^1.0.4: + version "1.0.5" + resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366" + integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ== + +node-environment-flags@1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/node-environment-flags/-/node-environment-flags-1.0.5.tgz#fa930275f5bf5dae188d6192b24b4c8bbac3d76a" + integrity sha512-VNYPRfGfmZLx0Ye20jWzHUjyTW/c+6Wq+iLhDzUI4XmhrDd9l/FozXV3F2xOaXjvp0co0+v1YSR3CMP6g+VvLQ== + dependencies: + object.getownpropertydescriptors "^2.0.3" + semver "^5.7.0" + +normalize-package-data@^2.3.2: + version "2.5.0" + resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8" + integrity sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA== + dependencies: + hosted-git-info "^2.1.4" + resolve "^1.10.0" + semver "2 || 3 || 4 || 5" + validate-npm-package-license "^3.0.1" + +npm-run-path@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f" + integrity sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8= + dependencies: + path-key "^2.0.0" + +number-is-nan@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d" + integrity sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0= + +object-assign@^4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" + integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM= + +object-keys@^1.0.11, object-keys@^1.0.12: + version "1.1.1" + resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" + integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== + +object.assign@4.1.0, object.assign@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.0.tgz#968bf1100d7956bb3ca086f006f846b3bc4008da" + integrity sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w== + dependencies: + define-properties "^1.1.2" + function-bind "^1.1.1" + has-symbols "^1.0.0" + object-keys "^1.0.11" + +object.entries@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/object.entries/-/object.entries-1.1.0.tgz#2024fc6d6ba246aee38bdb0ffd5cfbcf371b7519" + integrity sha512-l+H6EQ8qzGRxbkHOd5I/aHRhHDKoQXQ8g0BYt4uSweQU1/J6dZUOyWh9a2Vky35YCKjzmgxOzta2hH6kf9HuXA== + dependencies: + define-properties "^1.1.3" + es-abstract "^1.12.0" + function-bind "^1.1.1" + has "^1.0.3" + +object.fromentries@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/object.fromentries/-/object.fromentries-2.0.0.tgz#49a543d92151f8277b3ac9600f1e930b189d30ab" + integrity sha512-9iLiI6H083uiqUuvzyY6qrlmc/Gz8hLQFOcb/Ri/0xXFkSNS3ctV+CbE6yM2+AnkYfOB3dGjdzC0wrMLIhQICA== + dependencies: + define-properties "^1.1.2" + es-abstract "^1.11.0" + function-bind "^1.1.1" + has "^1.0.1" + +object.getownpropertydescriptors@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.0.3.tgz#8758c846f5b407adab0f236e0986f14b051caa16" + integrity sha1-h1jIRvW0B62rDyNuCYbxSwUcqhY= + dependencies: + define-properties "^1.1.2" + es-abstract "^1.5.1" + +object.values@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/object.values/-/object.values-1.1.0.tgz#bf6810ef5da3e5325790eaaa2be213ea84624da9" + integrity sha512-8mf0nKLAoFX6VlNVdhGj31SVYpaNFtUnuoOXWyFEstsWRgU837AK+JYM0iAxwkSzGRbwn8cbFmgbyxj1j4VbXg== + dependencies: + define-properties "^1.1.3" + es-abstract "^1.12.0" + function-bind "^1.1.1" + has "^1.0.3" + +once@^1.3.0, once@^1.3.1, once@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= + dependencies: + wrappy "1" + +onetime@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/onetime/-/onetime-2.0.1.tgz#067428230fd67443b2794b22bba528b6867962d4" + integrity sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ= + dependencies: + mimic-fn "^1.0.0" + +optionator@^0.8.2: + version "0.8.2" + resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.2.tgz#364c5e409d3f4d6301d6c0b4c05bba50180aeb64" + integrity sha1-NkxeQJ0/TWMB1sC0wFu6UBgK62Q= + dependencies: + deep-is "~0.1.3" + fast-levenshtein "~2.0.4" + levn "~0.3.0" + prelude-ls "~1.1.2" + type-check "~0.3.2" + wordwrap "~1.0.0" + +os-locale@^3.0.0, os-locale@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-3.1.0.tgz#a802a6ee17f24c10483ab9935719cef4ed16bf1a" + integrity sha512-Z8l3R4wYWM40/52Z+S265okfFj8Kt2cC2MKY+xNi3kFs+XGI7WXu/I309QQQYbRW4ijiZ+yxs9pqEhJh0DqW3Q== + dependencies: + execa "^1.0.0" + lcid "^2.0.0" + mem "^4.0.0" + +os-tmpdir@~1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" + integrity sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ= + +p-defer@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/p-defer/-/p-defer-1.0.0.tgz#9f6eb182f6c9aa8cd743004a7d4f96b196b0fb0c" + integrity sha1-n26xgvbJqozXQwBKfU+WsZaw+ww= + +p-finally@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" + integrity sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4= + +p-is-promise@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/p-is-promise/-/p-is-promise-2.1.0.tgz#918cebaea248a62cf7ffab8e3bca8c5f882fc42e" + integrity sha512-Y3W0wlRPK8ZMRbNq97l4M5otioeA5lm1z7bkNkxCka8HSPjR0xRWmpCmc9utiaLP9Jb1eD8BgeIxTW4AIF45Pg== + +p-limit@^1.1.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-1.3.0.tgz#b86bd5f0c25690911c7590fcbfc2010d54b3ccb8" + integrity sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q== + dependencies: + p-try "^1.0.0" + +p-limit@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.2.0.tgz#417c9941e6027a9abcba5092dd2904e255b5fbc2" + integrity sha512-pZbTJpoUsCzV48Mc9Nh51VbwO0X9cuPFE8gYwx9BTCt9SF8/b7Zljd2fVgOxhIF/HDTKgpVzs+GPhyKfjLLFRQ== + dependencies: + p-try "^2.0.0" + +p-locate@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-2.0.0.tgz#20a0103b222a70c8fd39cc2e580680f3dde5ec43" + integrity sha1-IKAQOyIqcMj9OcwuWAaA893l7EM= + dependencies: + p-limit "^1.1.0" + +p-locate@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4" + integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ== + dependencies: + p-limit "^2.0.0" + +p-try@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/p-try/-/p-try-1.0.0.tgz#cbc79cdbaf8fd4228e13f621f2b1a237c1b207b3" + integrity sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M= + +p-try@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" + integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== + +parent-module@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" + integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== + dependencies: + callsites "^3.0.0" + +parse-json@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-2.2.0.tgz#f480f40434ef80741f8469099f8dea18f55a4dc9" + integrity sha1-9ID0BDTvgHQfhGkJn43qGPVaTck= + dependencies: + error-ex "^1.2.0" + +path-dirname@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/path-dirname/-/path-dirname-1.0.2.tgz#cc33d24d525e099a5388c0336c6e32b9160609e0" + integrity sha1-zDPSTVJeCZpTiMAzbG4yuRYGCeA= + +path-exists@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" + integrity sha1-zg6+ql94yxiSXqfYENe1mwEP1RU= + +path-is-absolute@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= + +path-key@^2.0.0, path-key@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40" + integrity sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A= + +path-parse@^1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.6.tgz#d62dbb5679405d72c4737ec58600e9ddcf06d24c" + integrity sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw== + +path-type@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/path-type/-/path-type-2.0.0.tgz#f012ccb8415b7096fc2daa1054c3d72389594c73" + integrity sha1-8BLMuEFbcJb8LaoQVMPXI4lZTHM= + dependencies: + pify "^2.0.0" + +pathval@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/pathval/-/pathval-1.1.0.tgz#b942e6d4bde653005ef6b71361def8727d0645e0" + integrity sha1-uULm1L3mUwBe9rcTYd74cn0GReA= + +pify@^2.0.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" + integrity sha1-7RQaasBDqEnqWISY59yosVMw6Qw= + +pkg-dir@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-2.0.0.tgz#f6d5d1109e19d63edf428e0bd57e12777615334b" + integrity sha1-9tXREJ4Z1j7fQo4L1X4Sd3YVM0s= + dependencies: + find-up "^2.1.0" + +prelude-ls@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" + integrity sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ= + +progress@^2.0.0: + version "2.0.3" + resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.3.tgz#7e8cf8d8f5b8f239c1bc68beb4eb78567d572ef8" + integrity sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA== + +prop-types@^15.7.2: + version "15.7.2" + resolved "https://registry.yarnpkg.com/prop-types/-/prop-types-15.7.2.tgz#52c41e75b8c87e72b9d9360e0206b99dcbffa6c5" + integrity sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ== + dependencies: + loose-envify "^1.4.0" + object-assign "^4.1.1" + react-is "^16.8.1" + +pump@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64" + integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww== + dependencies: + end-of-stream "^1.1.0" + once "^1.3.1" + +punycode@^2.1.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" + integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== + +react-is@^16.8.1: + version "16.8.6" + resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.8.6.tgz#5bbc1e2d29141c9fbdfed456343fe2bc430a6a16" + integrity sha512-aUk3bHfZ2bRSVFFbbeVS4i+lNPZr3/WM5jT2J5omUVV1zzcs1nAaf3l51ctA5FFvCRbhrH0bdAsRRQddFJZPtA== + +read-pkg-up@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-2.0.0.tgz#6b72a8048984e0c41e79510fd5e9fa99b3b549be" + integrity sha1-a3KoBImE4MQeeVEP1en6mbO1Sb4= + dependencies: + find-up "^2.0.0" + read-pkg "^2.0.0" + +read-pkg@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-2.0.0.tgz#8ef1c0623c6a6db0dc6713c4bfac46332b2368f8" + integrity sha1-jvHAYjxqbbDcZxPEv6xGMysjaPg= + dependencies: + load-json-file "^2.0.0" + normalize-package-data "^2.3.2" + path-type "^2.0.0" + +regenerator-runtime@^0.13.2: + version "0.13.2" + resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.2.tgz#32e59c9a6fb9b1a4aff09b4930ca2d4477343447" + integrity sha512-S/TQAZJO+D3m9xeN1WTI8dLKBBiRgXBlTJvbWjCThHWZj9EvHK70Ff50/tYj2J/fvBY6JtFVwRuazHN2E7M9BA== + +regexpp@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-2.0.1.tgz#8d19d31cf632482b589049f8281f93dbcba4d07f" + integrity sha512-lv0M6+TkDVniA3aD1Eg0DVpfU/booSu7Eev3TDO/mZKHBfVjgCGTV4t4buppESEYDtkArYFOxTJWv6S5C+iaNw== + +require-directory@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" + integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I= + +require-main-filename@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-1.0.1.tgz#97f717b69d48784f5f526a6c5aa8ffdda055a4d1" + integrity sha1-l/cXtp1IeE9fUmpsWqj/3aBVpNE= + +require-main-filename@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b" + integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg== + +resolve-from@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" + integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== + +resolve@^1.10.0, resolve@^1.10.1, resolve@^1.11.0, resolve@^1.5.0: + version "1.11.1" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.11.1.tgz#ea10d8110376982fef578df8fc30b9ac30a07a3e" + integrity sha512-vIpgF6wfuJOZI7KKKSP+HmiKggadPQAdsp5HiC1mvqnfp0gF1vdwgBWZIdrVft9pgqoMFQN+R7BSWZiBxx+BBw== + dependencies: + path-parse "^1.0.6" + +restore-cursor@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-2.0.0.tgz#9f7ee287f82fd326d4fd162923d62129eee0dfaf" + integrity sha1-n37ih/gv0ybU/RYpI9YhKe7g368= + dependencies: + onetime "^2.0.0" + signal-exit "^3.0.2" + +rimraf@2.6.3: + version "2.6.3" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.3.tgz#b2d104fe0d8fb27cf9e0a1cda8262dd3833c6cab" + integrity sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA== + dependencies: + glob "^7.1.3" + +run-async@^2.2.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/run-async/-/run-async-2.3.0.tgz#0371ab4ae0bdd720d4166d7dfda64ff7a445a6c0" + integrity sha1-A3GrSuC91yDUFm19/aZP96RFpsA= + dependencies: + is-promise "^2.1.0" + +rxjs@^6.4.0: + version "6.5.2" + resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-6.5.2.tgz#2e35ce815cd46d84d02a209fb4e5921e051dbec7" + integrity sha512-HUb7j3kvb7p7eCUHE3FqjoDsC1xfZQ4AHFWfTKSpZ+sAhhz5X1WX0ZuUqWbzB2QhSLp3DoLUG+hMdEDKqWo2Zg== + dependencies: + tslib "^1.9.0" + +"safer-buffer@>= 2.1.2 < 3": + version "2.1.2" + resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" + integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== + +"semver@2 || 3 || 4 || 5", semver@^5.5.0, semver@^5.5.1, semver@^5.7.0: + version "5.7.0" + resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.0.tgz#790a7cf6fea5459bac96110b29b60412dc8ff96b" + integrity sha512-Ya52jSX2u7QKghxeoFGpLwCtGlt7j0oY9DYb5apt9nPlJ42ID+ulTXESnt/qAQcoSERyZ5sl3LDIOw0nAn/5DA== + +set-blocking@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" + integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc= + +shebang-command@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea" + integrity sha1-RKrGW2lbAzmJaMOfNj/uXer98eo= + dependencies: + shebang-regex "^1.0.0" + +shebang-regex@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3" + integrity sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM= + +signal-exit@^3.0.0, signal-exit@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d" + integrity sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0= + +slice-ansi@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-2.1.0.tgz#cacd7693461a637a5788d92a7dd4fba068e81636" + integrity sha512-Qu+VC3EwYLldKa1fCxuuvULvSJOKEgk9pi8dZeCVK7TqBfUNTH4sFkk4joj8afVSfAYgJoSOetjx9QWOJ5mYoQ== + dependencies: + ansi-styles "^3.2.0" + astral-regex "^1.0.0" + is-fullwidth-code-point "^2.0.0" + +spdx-correct@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.1.0.tgz#fb83e504445268f154b074e218c87c003cd31df4" + integrity sha512-lr2EZCctC2BNR7j7WzJ2FpDznxky1sjfxvvYEyzxNyb6lZXHODmEoJeFu4JupYlkfha1KZpJyoqiJ7pgA1qq8Q== + dependencies: + spdx-expression-parse "^3.0.0" + spdx-license-ids "^3.0.0" + +spdx-exceptions@^2.1.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/spdx-exceptions/-/spdx-exceptions-2.2.0.tgz#2ea450aee74f2a89bfb94519c07fcd6f41322977" + integrity sha512-2XQACfElKi9SlVb1CYadKDXvoajPgBVPn/gOQLrTvHdElaVhr7ZEbqJaRnJLVNeaI4cMEAgVCeBMKF6MWRDCRA== + +spdx-expression-parse@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-3.0.0.tgz#99e119b7a5da00e05491c9fa338b7904823b41d0" + integrity sha512-Yg6D3XpRD4kkOmTpdgbUiEJFKghJH03fiC1OPll5h/0sO6neh2jqRDVHOQ4o/LMea0tgCkbMgea5ip/e+MkWyg== + dependencies: + spdx-exceptions "^2.1.0" + spdx-license-ids "^3.0.0" + +spdx-license-ids@^3.0.0: + version "3.0.5" + resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.5.tgz#3694b5804567a458d3c8045842a6358632f62654" + integrity sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q== + +sprintf-js@~1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" + integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw= + +string-width@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3" + integrity sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M= + dependencies: + code-point-at "^1.0.0" + is-fullwidth-code-point "^1.0.0" + strip-ansi "^3.0.0" + +"string-width@^1.0.2 || 2", string-width@^2.0.0, string-width@^2.1.0, string-width@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" + integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw== + dependencies: + is-fullwidth-code-point "^2.0.0" + strip-ansi "^4.0.0" + +string-width@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961" + integrity sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w== + dependencies: + emoji-regex "^7.0.1" + is-fullwidth-code-point "^2.0.0" + strip-ansi "^5.1.0" + +strip-ansi@^3.0.0, strip-ansi@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" + integrity sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8= + dependencies: + ansi-regex "^2.0.0" + +strip-ansi@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f" + integrity sha1-qEeQIusaw2iocTibY1JixQXuNo8= + dependencies: + ansi-regex "^3.0.0" + +strip-ansi@^5.1.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-5.2.0.tgz#8c9a536feb6afc962bdfa5b104a5091c1ad9c0ae" + integrity sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA== + dependencies: + ansi-regex "^4.1.0" + +strip-bom@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" + integrity sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM= + +strip-eof@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf" + integrity sha1-u0P/VZim6wXYm1n80SnJgzE2Br8= + +strip-json-comments@2.0.1, strip-json-comments@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" + integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo= + +supports-color@6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-6.0.0.tgz#76cfe742cf1f41bb9b1c29ad03068c05b4c0e40a" + integrity sha512-on9Kwidc1IUQo+bQdhi8+Tijpo0e1SS6RoGo2guUwn5vdaxw8RXOF9Vb2ws+ihWOmh4JnCJOvaziZWP1VABaLg== + dependencies: + has-flag "^3.0.0" + +supports-color@^5.3.0: + version "5.5.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" + integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== + dependencies: + has-flag "^3.0.0" + +table@^5.2.3: + version "5.4.4" + resolved "https://registry.yarnpkg.com/table/-/table-5.4.4.tgz#6e0f88fdae3692793d1077fd172a4667afe986a6" + integrity sha512-IIfEAUx5QlODLblLrGTTLJA7Tk0iLSGBvgY8essPRVNGHAzThujww1YqHLs6h3HfTg55h++RzLHH5Xw/rfv+mg== + dependencies: + ajv "^6.10.2" + lodash "^4.17.14" + slice-ansi "^2.1.0" + string-width "^3.0.0" + +text-table@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" + integrity sha1-f17oI66AUgfACvLfSoTsP8+lcLQ= + +through@^2.3.6: + version "2.3.8" + resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" + integrity sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU= + +tmp@^0.0.33: + version "0.0.33" + resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9" + integrity sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw== + dependencies: + os-tmpdir "~1.0.2" + +tslib@^1.9.0: + version "1.10.0" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.10.0.tgz#c3c19f95973fb0a62973fb09d90d961ee43e5c8a" + integrity sha512-qOebF53frne81cf0S9B41ByenJ3/IuH8yJKngAX35CmiZySA0khhkovshKK+jGCaMnVomla7gVlIcc3EvKPbTQ== + +type-check@~0.3.2: + version "0.3.2" + resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" + integrity sha1-WITKtRLPHTVeP7eE8wgEsrUg23I= + dependencies: + prelude-ls "~1.1.2" + +type-detect@^4.0.0, type-detect@^4.0.5: + version "4.0.8" + resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" + integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== + +uri-js@^4.2.2: + version "4.4.1" + resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" + integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== + dependencies: + punycode "^2.1.0" + +validate-npm-package-license@^3.0.1: + version "3.0.4" + resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz#fc91f6b9c7ba15c857f4cb2c5defeec39d4f410a" + integrity sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew== + dependencies: + spdx-correct "^3.0.0" + spdx-expression-parse "^3.0.0" + +which-module@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a" + integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho= + +which@1.3.1, which@^1.2.9: + version "1.3.1" + resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" + integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== + dependencies: + isexe "^2.0.0" + +wide-align@1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.3.tgz#ae074e6bdc0c14a431e804e624549c633b000457" + integrity sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA== + dependencies: + string-width "^1.0.2 || 2" + +wordwrap@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" + integrity sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus= + +wrap-ansi@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-2.1.0.tgz#d8fc3d284dd05794fe84973caecdd1cf824fdd85" + integrity sha1-2Pw9KE3QV5T+hJc8rs3Rz4JP3YU= + dependencies: + string-width "^1.0.1" + strip-ansi "^3.0.1" + +wrappy@1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= + +write@1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/write/-/write-1.0.3.tgz#0800e14523b923a387e415123c865616aae0f5c3" + integrity sha512-/lg70HAjtkUgWPVZhZcm+T4hkL8Zbtp1nFNOn3lRrxnlv50SRBv7cR7RqR+GMsd3hUXy9hWBo4CHTbFTcOYwig== + dependencies: + mkdirp "^0.5.1" + +"y18n@^3.2.1 || ^4.0.0", y18n@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.0.tgz#95ef94f85ecc81d007c264e190a120f0a3c8566b" + integrity sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w== + +yargs-parser@13.0.0: + version "13.0.0" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-13.0.0.tgz#3fc44f3e76a8bdb1cc3602e860108602e5ccde8b" + integrity sha512-w2LXjoL8oRdRQN+hOyppuXs+V/fVAYtpcrRxZuF7Kt/Oc+Jr2uAcVntaUTNT6w5ihoWfFDpNY8CPx1QskxZ/pw== + dependencies: + camelcase "^5.0.0" + decamelize "^1.2.0" + +yargs-parser@^11.1.1: + version "11.1.1" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-11.1.1.tgz#879a0865973bca9f6bab5cbdf3b1c67ec7d3bcf4" + integrity sha512-C6kB/WJDiaxONLJQnF8ccx9SEeoTTLek8RVbaOIsrAUS8VrBEXfmeSnCZxygc+XC2sNMBIwOOnfcxiynjHsVSQ== + dependencies: + camelcase "^5.0.0" + decamelize "^1.2.0" + +yargs-parser@^13.0.0: + version "13.1.1" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-13.1.1.tgz#d26058532aa06d365fe091f6a1fc06b2f7e5eca0" + integrity sha512-oVAVsHz6uFrg3XQheFII8ESO2ssAf9luWuAd6Wexsu4F3OtIW0o8IribPXYrD4WC24LWtPrJlGy87y5udK+dxQ== + dependencies: + camelcase "^5.0.0" + decamelize "^1.2.0" + +yargs-unparser@1.5.0: + version "1.5.0" + resolved "https://registry.yarnpkg.com/yargs-unparser/-/yargs-unparser-1.5.0.tgz#f2bb2a7e83cbc87bb95c8e572828a06c9add6e0d" + integrity sha512-HK25qidFTCVuj/D1VfNiEndpLIeJN78aqgR23nL3y4N0U/91cOAzqfHlF8n2BvoNDcZmJKin3ddNSvOxSr8flw== + dependencies: + flat "^4.1.0" + lodash "^4.17.11" + yargs "^12.0.5" + +yargs@13.2.2: + version "13.2.2" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-13.2.2.tgz#0c101f580ae95cea7f39d927e7770e3fdc97f993" + integrity sha512-WyEoxgyTD3w5XRpAQNYUB9ycVH/PQrToaTXdYXRdOXvEy1l19br+VJsc0vcO8PTGg5ro/l/GY7F/JMEBmI0BxA== + dependencies: + cliui "^4.0.0" + find-up "^3.0.0" + get-caller-file "^2.0.1" + os-locale "^3.1.0" + require-directory "^2.1.1" + require-main-filename "^2.0.0" + set-blocking "^2.0.0" + string-width "^3.0.0" + which-module "^2.0.0" + y18n "^4.0.0" + yargs-parser "^13.0.0" + +yargs@^12.0.5: + version "12.0.5" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-12.0.5.tgz#05f5997b609647b64f66b81e3b4b10a368e7ad13" + integrity sha512-Lhz8TLaYnxq/2ObqHDql8dX8CJi97oHxrjUcYtzKbbykPtVW9WB+poxI+NM2UIzsMgNCZTIf0AQwsjK5yMAqZw== + dependencies: + cliui "^4.0.0" + decamelize "^1.2.0" + find-up "^3.0.0" + get-caller-file "^1.0.1" + os-locale "^3.0.0" + require-directory "^2.1.1" + require-main-filename "^1.0.1" + set-blocking "^2.0.0" + string-width "^2.0.0" + which-module "^2.0.0" + y18n "^3.2.1 || ^4.0.0" + yargs-parser "^11.1.1" diff --git a/catbuffer-generators/generators/python/.pypirc b/catbuffer-generators/generators/python/.pypirc new file mode 100644 index 00000000..6457e08e --- /dev/null +++ b/catbuffer-generators/generators/python/.pypirc @@ -0,0 +1,11 @@ +[distutils] +index-servers= + pypi + testpypi + +[pypi] +username = __token__ + +[testpypi] +repository: https://test.pypi.org/legacy/ +username = __token__ \ No newline at end of file diff --git a/catbuffer-generators/generators/python/PythonFileGenerator.py b/catbuffer-generators/generators/python/PythonFileGenerator.py new file mode 100644 index 00000000..3b2aa7f5 --- /dev/null +++ b/catbuffer-generators/generators/python/PythonFileGenerator.py @@ -0,0 +1,44 @@ +import os +from generators.common.FileGenerator import FileGenerator +from .PythonHelper import PythonHelper + + +class PythonFileGenerator(FileGenerator): + """Python file generator""" + + def init_code(self): + code = ['#!/usr/bin/python'] + copyright_file = self.options['copyright'] + code += self.get_copyright(copyright_file) + code += ['# pylint: disable=W0622,W0612,C0301,R0904', ''] + return code + + @staticmethod + def get_copyright(copyright_file): + code = [] + if os.path.isfile(copyright_file): + with open(copyright_file) as header: + for line in header: + line = line.strip() + if line.startswith('/**') or line.startswith('**/'): + code += ['"""'] + elif line.startswith('***'): + if len(line) > 3: + code += [line.replace('***', ' ')] + else: + code += [line.replace('***', '')] + else: + code += [line] + return code + + def get_template_path(self): + return '../python/templates/' + + def get_static_templates_file_names(self): + return ['GeneratorUtils', 'EmbeddedTransactionBuilderFactory', 'TransactionBuilderFactory'] + + def get_main_file_extension(self): + return '.py' + + def create_helper(self): + return PythonHelper() diff --git a/catbuffer-generators/generators/python/PythonHelper.py b/catbuffer-generators/generators/python/PythonHelper.py new file mode 100644 index 00000000..672594d8 --- /dev/null +++ b/catbuffer-generators/generators/python/PythonHelper.py @@ -0,0 +1,55 @@ +import re + +from generators.common.Helper import Helper, AttributeKind + + +class PythonHelper(Helper): + + @staticmethod + def add_required_import(required_import: set, import_type, class_name, base_class_name): + for typename in re.split('[\\[\\]]', import_type): + if typename: + if typename in ['List']: + required_import.add('from typing import ' + typename) + elif 'TransactionHeaderBuilder' in typename: + if typename == base_class_name: + required_import.add('from .' + typename + ' import ' + typename) + elif typename != class_name and str(typename)[0].isupper(): + required_import.add('from .' + typename + ' import ' + typename) + if class_name == 'AggregateTransactionBodyBuilder': + required_import.add('from. EmbeddedTransactionBuilderFactory import EmbeddedTransactionBuilderFactory') + return required_import + + @staticmethod + def get_all_constructor_params(attributes): + return [a for a in attributes if not a.kind == AttributeKind.SIZE_FIELD] + + @staticmethod + def get_body_class_name(name): + body_name = name if not name.startswith('Embedded') else name[8:] + if name.startswith('Aggregate') and name.endswith('Transaction'): + body_name = 'AggregateTransaction' + return '{0}Body'.format(body_name) + + def get_builtin_type(self, size): + return 'int' + + @staticmethod + def get_condition_operation_text(op): + if op == 'has': + return '{1} in {0}' + return '{0} == {1}' + + def get_generated_type(self, schema, attribute, attribute_kind): + typename = attribute['type'] + if attribute_kind in (AttributeKind.SIMPLE, AttributeKind.SIZE_FIELD): + return self.get_builtin_type(self.get_attribute_size(schema, attribute)) + if attribute_kind == AttributeKind.BUFFER: + return 'bytes' + if not self.is_byte_type(typename): + typename = self.get_generated_class_name(typename, attribute, schema) + if self.is_any_array_kind(attribute_kind): + return 'List[{0}]'.format(typename) + if attribute_kind == AttributeKind.FLAGS: + return 'List[{0}]'.format(typename) + return typename diff --git a/catbuffer-generators/generators/python/README.md b/catbuffer-generators/generators/python/README.md new file mode 100644 index 00000000..651ac952 --- /dev/null +++ b/catbuffer-generators/generators/python/README.md @@ -0,0 +1,37 @@ +# catbuffer + +This is the Python version of the catbuffer library. It is generated using [catbuffer-generators](https://github.com/nemtech/catbuffer-generators) from the [catbuffer](https://github.com/nemtech/catbuffer) specification. + +The generated code is in Python version 3.7. + +This library helps serialize and deserialize NEM's Catapult entities in Python applications. + +The library's main client may be a community-driven NEM Python SDK (nem2-sdk-python) but it can also be used alone. + +## Installation & Usage +### pip install + +The python catbuffer package is hosted on [PyPI](https://pypi.org/project/catbuffer). + +To install the latest release: + +```sh +pip install catbuffer +``` +(you may need to run `pip` with root permission: `sudo pip install catbuffer`) + +To install a specific version or a snapshot: + +```sh +pip install catbuffer=={version} +``` + +Example: + +```sh +pip3 install catbuffer==0.0.2.20200329.111953a1 +``` + +## Python generator developer notes + +As catbuffer schema uses upper and lower Camel Case naming convention, the generated code also uses this convention for easier cross-referencing between the code and the schemas. You may want to disable PEP 8 naming convention violation inspection in your IDE. diff --git a/catbuffer-generators/generators/python/setup.py b/catbuffer-generators/generators/python/setup.py new file mode 100644 index 00000000..adb738a4 --- /dev/null +++ b/catbuffer-generators/generators/python/setup.py @@ -0,0 +1,29 @@ +from setuptools import setup, find_packages # noqa: H301 + +with open('README.md', 'r') as readme_file: + README = readme_file.read() + +NAME = '#artifactName' +VERSION = '#artifactVersion' + +REQUIRES = [] + +setup( + name=NAME, + version=VERSION, + description='Symbol Catbuffer Builders', + author='nemtech', + author_email='ravi@nem.foundation', + url='https://github.com/nemtech/catbuffer-generators', + keywords=['catbuffer-generators', 'catbuffer', 'builders', 'Symbol Catbuffer Builders'], + install_requires=REQUIRES, + package_dir={'': 'src'}, + packages=find_packages('src'), + include_package_data=True, + license='Apache 2.0', + long_description=README, + long_description_content_type='text/markdown', + classifiers=[ + 'Programming Language :: Python :: 3.7', + ] +) diff --git a/catbuffer-generators/generators/python/templates/Class.mako b/catbuffer-generators/generators/python/templates/Class.mako new file mode 100644 index 00000000..7fd02261 --- /dev/null +++ b/catbuffer-generators/generators/python/templates/Class.mako @@ -0,0 +1,311 @@ +## NOTE: do *not* touch `buffered` in render definitions, it will completely break output +<% + python_lib_import_statements = [] + catbuffer_lib_import_statements = [] + for a in sorted(generator.required_import): + if str(a).startswith('from .'): + catbuffer_lib_import_statements.append(a) + else: + python_lib_import_statements.append(a) +%>\ +from __future__ import annotations +% for a in python_lib_import_statements: +${a} +% endfor +from .GeneratorUtils import GeneratorUtils +% for a in catbuffer_lib_import_statements: +${a} +% endfor + +class ${generator.generated_class_name}${'(' + str(generator.generated_base_class_name) + ')' if generator.generated_base_class_name is not None else ''}: + """${helper.capitalize_first_character(generator.comments)}. + + Attributes: +% for a in [a for a in generator.attributes if not a.attribute_is_super and not a.attribute_is_inline and not a.kind == helper.AttributeKind.SIZE_FIELD and not a.attribute_is_reserved and a.attribute_name != 'size']: + ${a.attribute_name}: ${helper.capitalize_first_character(a.attribute_comment)}. +% endfor + """ +<%def name="renderCondition(a, useSelf=True)" filter="trim"> + ${helper.get_condition_operation_text(a.attribute['condition_operation']).format(('self.' if useSelf else '') + a.attribute['condition'], helper.get_generated_class_name(a.condition_type_attribute['type'], a.condition_type_attribute, generator.schema) + '.' + helper.create_enum_name(a.attribute['condition_value']))} +\ +## CONSTRUCTOR: +<% + constructor_params = generator.all_constructor_params + constructor_params_CSV = ', '.join([str(a.attribute_name) + ': ' + str(a.attribute_var_type) for a in constructor_params if a.attribute_condition_value is None and not a.attribute_is_aggregate and not a.attribute_is_reserved and not a.attribute_name == 'size']) + super_arguments_CSV = ', '.join([str(a.attribute_name) for a in constructor_params if a.attribute_is_super and not a.attribute_is_reserved and not a.attribute_is_aggregate and not a.attribute_name == 'size']) +%> +## condition should be the same as condition in ctor +% if 0 == len([a for a in constructor_params if not a.attribute_is_inline and not a.attribute_is_super and not a.attribute_is_reserved and not a.attribute_name == 'size']): + # pylint: disable=useless-super-delegation +% endif + def __init__(self, ${constructor_params_CSV}): + """Constructor. + Args: +% for a in [a for a in constructor_params if a.attribute_condition_value is None and not a.attribute_is_aggregate and not a.attribute_is_reserved and not a.kind == helper.AttributeKind.SIZE_FIELD and not a.attribute_name == 'size']: + ${a.attribute_name}: ${helper.capitalize_first_character(a.attribute_comment)}. +% endfor + """ + % if generator.base_class_name is not None: + super().__init__(${super_arguments_CSV}) + % endif + % for a in [a for a in constructor_params if not a.attribute_is_inline and not a.attribute_is_super and not a.attribute_is_reserved and not a.attribute_name == 'size']: + % if a.attribute_is_aggregate: + self.${a.attribute_name} = ${a.attribute_var_type}(${', '.join([str(inline.attribute_name) for inline in constructor_params if inline.attribute_aggregate_attribute_name == a.attribute_name and not inline.attribute_is_reserved and not inline.kind == helper.AttributeKind.SIZE_FIELD and inline.attribute_condition_value is None and not inline.attribute_is_aggregate])}) + % else: + self.${a.attribute_name} = ${a.attribute_name} + % endif + % endfor + +% if 'AggregateTransactionBody' in generator.generated_class_name: + @staticmethod + def _loadEmbeddedTransactions(transactions: List[EmbeddedTransactionBuilder], payload: bytes, payloadSize: int): + remainingByteSizes = payloadSize + while remainingByteSizes > 0: + item = EmbeddedTransactionBuilderFactory.createBuilder(payload) + transactions.append(item) + itemSize = item.getSize() + GeneratorUtils.getTransactionPaddingSize(item.getSize(), 8) + remainingByteSizes -= itemSize + payload = payload[itemSize:] + return payload +% endif +## LOAD FROM BINARY: +<%def name="renderReader(a)" filter="trim" buffered="True"> + % if a.kind == helper.AttributeKind.SIMPLE: + ${a.attribute_name} = GeneratorUtils.bufferToUint(GeneratorUtils.getBytes(bytes_, ${a.attribute_size})) # kind:SIMPLE + bytes_ = bytes_[${a.attribute_size}:] + % elif a.kind == helper.AttributeKind.BUFFER: + ${a.attribute_name} = GeneratorUtils.getBytes(bytes_, ${a.attribute_size}) # kind:BUFFER + bytes_ = bytes_[${a.attribute_size}:] + % elif a.kind == helper.AttributeKind.SIZE_FIELD: + ${a.attribute_name} = GeneratorUtils.bufferToUint(GeneratorUtils.getBytes(bytes_, ${a.attribute_size})) # kind:SIZE_FIELD + bytes_ = bytes_[${a.attribute_size}:] + % elif a.kind == helper.AttributeKind.ARRAY: + ${a.attribute_name}: ${a.attribute_var_type} = [] # kind:ARRAY + for _ in range(${a.attribute_size}): + item = ${a.attribute_class_name}.loadFromBinary(bytes_) + ${a.attribute_name}.append(item) + bytes_ = bytes_[item.getSize():] + % elif a.kind == helper.AttributeKind.CUSTOM and a.conditional_read_before: + ${a.attribute_name} = ${a.attribute_class_name}.loadFromBinary(${a.attribute['condition']}Condition) # kind:CUSTOM3 + % elif a.kind == helper.AttributeKind.CUSTOM and a.attribute_base_type == 'enum': + ${a.attribute_name} = ${a.attribute_class_name}.loadFromBinary(bytes_) # kind:CUSTOM2 + bytes_ = bytes_[${a.attribute_name}.getSize():] + % elif a.kind == helper.AttributeKind.CUSTOM: + ${a.attribute_name} = ${a.attribute_class_name}.loadFromBinary(bytes_) # kind:CUSTOM1 + bytes_ = bytes_[${a.attribute_name}.getSize():] + % elif a.kind == helper.AttributeKind.FILL_ARRAY: + ${a.attribute_name}: List[${a.attribute_class_name}] = [] + bytes_ = GeneratorUtils.loadFromBinary(${a.attribute_class_name}, ${a.attribute_name}, bytes_, len(bytes_)) + % elif a.kind == helper.AttributeKind.FLAGS: + ${a.attribute_name} = ${a.attribute_class_name}.bytesToFlags(bytes_, ${a.attribute_size}) # kind:FLAGS + bytes_ = bytes_[${a.attribute_size}:] + % elif a.kind == helper.AttributeKind.VAR_ARRAY: + transactions: List[${a.attribute_class_name}] = [] + bytes_ = ${generator.generated_class_name}._loadEmbeddedTransactions(transactions, bytes_, ${a.attribute_size}) + % else: + FIX ME! + % endif +\ +<% + possible_constructor_params = generator.constructor_attributes[0] + if generator.base_class_name is None: + constructor_arguments_CSV = ', '.join([str(a.attribute_name) + for a in possible_constructor_params if not a.attribute_is_aggregate and not a.attribute_is_reserved and not a.attribute_name == 'size']) + else: + constructor_arguments_CSV = ', '.join(['{0}{1}'.format('superObject.' if a.attribute_is_super else ('' if a.attribute_aggregate_attribute_name is None else a.attribute_aggregate_attribute_name + '.'), a.attribute_name) + for a in possible_constructor_params if not a.attribute_is_aggregate and not a.attribute_is_reserved and not a.attribute_name == 'size']) +%> + @classmethod + def loadFromBinary(cls, payload: bytes) -> ${generator.generated_class_name}: + """Creates an instance of ${generator.generated_class_name} from binary payload. + Args: + payload: Byte payload to use to serialize the object. + Returns: + Instance of ${generator.generated_class_name}. + """ + bytes_ = bytes(payload) + % if generator.base_class_name is not None: + superObject = ${generator.generated_base_class_name}.loadFromBinary(bytes_) + bytes_ = bytes_[superObject.getSize():] + % endif + % for a in set([(a.attribute['condition'], a.attribute_size, a.conditional_read_before) for a in generator.attributes if not a.attribute_is_super and not a.attribute_is_inline and a.conditional_read_before and a.attribute_is_conditional]): + ${a[0]}Condition = bytes_[0:${a[1]}] + bytes_ = bytes_[${a[1]}:] + % endfor + + % for a in [a for a in generator.attributes if not a.attribute_is_super and not a.attribute_is_inline and not a.conditional_read_before]: + %if a.attribute_is_conditional: + ${a.attribute_name} = None + if ${renderCondition(a, useSelf=False) | trim}: + ## handle py indents + % for line in map(lambda a: a.strip(), renderReader(a).splitlines()): + ${line} + % endfor + % else: + ${renderReader(a) | trim} + %endif + % endfor + % for a in [a for a in generator.attributes if not a.attribute_is_super and not a.attribute_is_inline and a.conditional_read_before]: + ${a.attribute_name} = None + if ${renderCondition(a, useSelf=False) | trim}: + ## handle py indents + % for line in map(lambda a: a.strip(), renderReader(a).splitlines()): + ${line} + % endfor + % endfor + return ${generator.generated_class_name}(${constructor_arguments_CSV}) + +## GETTERS: +% for a in [a for a in generator.attributes if not a.attribute_is_super and not a.attribute_is_reserved and not a.attribute_is_aggregate and not a.kind == helper.AttributeKind.SIZE_FIELD and (not a.attribute_is_reserved or not a.attribute_is_inline) and not a.attribute_name == 'size']: + def get${helper.capitalize_first_character(a.attribute_name) if a.attribute_name != 'size' else 'BytesSize'}(self) -> ${a.attribute_var_type}: + """Gets ${a.attribute_comment}. + Returns: + ${helper.capitalize_first_character(a.attribute_comment)}. + """ + % if a.attribute_is_conditional and not a.attribute_is_inline: + if not ${renderCondition(a) | trim}: + raise Exception('${a.attribute['condition']} is not set to ${helper.create_enum_name(a.attribute['condition_value'])}.') + % endif + % if a.attribute_is_inline: + return self.${a.attribute_aggregate_attribute_name}.get${helper.capitalize_first_character(a.attribute_name)}() + % else: + return self.${a.attribute_name} + % endif + +% endfor +% if 'AggregateTransactionBody' in generator.generated_class_name: + @classmethod + def _serialize_aligned(cls, transaction: EmbeddedTransactionBuilder) -> bytes: + """Serializes an embeded transaction with correct padding. + Returns: + Serialized embedded transaction. + """ + bytes_ = transaction.serialize() + padding = bytes(GeneratorUtils.getTransactionPaddingSize(len(bytes_), 8)) + return GeneratorUtils.concatTypedArrays(bytes_, padding) + + @classmethod + def _getSize_aligned(cls, transaction: EmbeddedTransactionBuilder) -> int: + """Serializes an embeded transaction with correct padding. + Returns: + Serialized embedded transaction. + """ + size = transaction.getSize() + paddingSize = GeneratorUtils.getTransactionPaddingSize(size, 8) + return size + paddingSize +% endif +## SIZE: +<%def name="renderSize(a)" filter="trim" buffered="True">\ + % if a.kind == helper.AttributeKind.SIMPLE: + size += ${a.attribute_size} # ${a.attribute_name} + % elif a.kind == helper.AttributeKind.SIZE_FIELD: + size += ${a.attribute_size} # ${a.attribute_name} + % elif a.kind == helper.AttributeKind.BUFFER: + size += len(self.${a.attribute_name}) + % elif a.kind == helper.AttributeKind.VAR_ARRAY: + for _ in self.${a.attribute_name}: + size += self._getSize_aligned(_) + % elif a.kind == helper.AttributeKind.ARRAY or a.kind == helper.AttributeKind.FILL_ARRAY: + for _ in self.${a.attribute_name}: + size += _.getSize() + % elif a.kind == helper.AttributeKind.FLAGS: + size += ${a.attribute_size} # ${a.attribute_name} + % else: + size += self.${a.attribute_name}.getSize() + % endif +\ + def getSize(self) -> int: + """Gets the size of the object. + Returns: + Size in bytes. + """ + size = ${'super().getSize()' if generator.base_class_name is not None else '0'} +% for a in [a for a in generator.attributes if not a.attribute_is_super and not a.attribute_is_inline]: + % if a.attribute_is_conditional: + if ${renderCondition(a) | trim}: + ## handle py indents + % for line in map(lambda a: a.strip(), renderSize(a).splitlines()): + ${line} + % endfor + % else: + ${renderSize(a).strip()} + % endif +% endfor + return size + +% if generator.base_class_name in ['Transaction', 'EmbeddedTransaction']: + def getBody(self) -> ${generator.body_class_name}Builder: + """Gets the body builder of the object. + Returns: + Body builder. + """ + return self.${helper.decapitalize_first_character(generator.body_class_name)} + +% endif +% if generator.name in ['Transaction', 'EmbeddedTransaction']: + def getBody(self) -> None: + """Gets the body builder of the object. + Returns: + Body builder. + """ + return None + +% endif +## SERIALIZE: +<%def name="renderSerialize(a)" filter="trim" buffered="True">\ + % if a.kind == helper.AttributeKind.SIMPLE and a.attribute_is_reserved: + bytes_ = GeneratorUtils.concatTypedArrays(bytes_, GeneratorUtils.uintToBuffer(0, ${a.attribute_size})) + % elif a.kind == helper.AttributeKind.SIMPLE and (generator.name != 'Receipt' or a.attribute_name != 'size'): + % if a.attribute_is_reserved: + bytes_ = GeneratorUtils.concatTypedArrays(bytes_, GeneratorUtils.uintToBuffer(0, ${a.attribute_size})) # kind:SIMPLE + % else: + bytes_ = GeneratorUtils.concatTypedArrays(bytes_, GeneratorUtils.uintToBuffer(self.get${helper.capitalize_first_character(a.attribute_name)}(), ${a.attribute_size})) # kind:SIMPLE + % endif + % elif a.kind == helper.AttributeKind.BUFFER: + bytes_ = GeneratorUtils.concatTypedArrays(bytes_, self.${a.attribute_name}) # kind:BUFFER + % elif a.kind == helper.AttributeKind.SIZE_FIELD: + ## note: it would be best to access parent 'kind' + % if 'AggregateTransactionBody' in generator.generated_class_name and a.attribute_name == 'payloadSize': + # calculate payload size + size_value = 0 + for _ in self.${a.parent_attribute['name']}: + size_value += self._getSize_aligned(_) + bytes_ = GeneratorUtils.concatTypedArrays(bytes_, GeneratorUtils.uintToBuffer(size_value, ${a.attribute_size})) # kind:SIZE_FIELD + % else: + bytes_ = GeneratorUtils.concatTypedArrays(bytes_, GeneratorUtils.uintToBuffer(len(self.get${helper.capitalize_first_character(a.parent_attribute['name'])}()), ${a.attribute_size})) # kind:SIZE_FIELD + % endif + % elif a.kind == helper.AttributeKind.ARRAY or a.kind == helper.AttributeKind.FILL_ARRAY: + for _ in self.${a.attribute_name}: # kind:ARRAY|FILL_ARRAY + bytes_ = GeneratorUtils.concatTypedArrays(bytes_, _.serialize()) + % elif a.kind == helper.AttributeKind.VAR_ARRAY: + for _ in self.${a.attribute_name}: # kind:VAR_ARRAY + bytes_ = GeneratorUtils.concatTypedArrays(bytes_, self._serialize_aligned(_)) + % elif a.kind == helper.AttributeKind.CUSTOM: + bytes_ = GeneratorUtils.concatTypedArrays(bytes_, self.${a.attribute_name}.serialize()) # kind:CUSTOM + % elif a.kind == helper.AttributeKind.FLAGS: + bytes_ = GeneratorUtils.concatTypedArrays(bytes_, GeneratorUtils.uintToBuffer(${a.attribute_class_name}.flagsToInt(self.get${helper.capitalize_first_character(a.attribute_name)}()), ${a.attribute_size})) # kind:FLAGS + % else: + # Ignored serialization: ${a.attribute_name} ${a.kind} + % endif +\ + def serialize(self) -> bytes: + """Serializes self to bytes. + Returns: + Serialized bytes. + """ + bytes_ = bytes() + % if generator.base_class_name is not None: + bytes_ = GeneratorUtils.concatTypedArrays(bytes_, super().serialize()) +% endif +% for a in [a for a in generator.attributes if not a.attribute_is_super and not a.attribute_is_inline]: + % if a.attribute_is_conditional: + if ${renderCondition(a) | trim}: + ## handle py indents + % for line in map(lambda a: a.strip(), renderSerialize(a).splitlines()): + ${line} + % endfor + % else: + ${renderSerialize(a)} + % endif +% endfor + return bytes_ \ No newline at end of file diff --git a/catbuffer-generators/generators/python/templates/EmbeddedTransactionBuilderFactory.mako b/catbuffer-generators/generators/python/templates/EmbeddedTransactionBuilderFactory.mako new file mode 100644 index 00000000..f4dc7601 --- /dev/null +++ b/catbuffer-generators/generators/python/templates/EmbeddedTransactionBuilderFactory.mako @@ -0,0 +1,42 @@ +# pylint: disable=R0911,R0912 + +# Imports for creating embedded transaction builders +from .EmbeddedTransactionBuilder import EmbeddedTransactionBuilder +% for name in sorted(generator.schema): +<% + layout = generator.schema[name].get("layout", [{type:""}]) + entityTypeValue = next(iter([x for x in layout if x.get('name','') == 'entityType']),{}).get('value',0) +%>\ +% if entityTypeValue > 0 and 'Aggregate' not in name and 'Block' not in name and name.startswith('Embedded'): +from .${name}Builder import ${name}Builder +% endif +% endfor + +class EmbeddedTransactionBuilderFactory: + """Factory in charge of creating the specific embedded transaction builder from the binary payload. + """ + + @classmethod + def createBuilder(cls, payload) -> EmbeddedTransactionBuilder: + """ + It creates the specific embedded transaction builder from the payload bytes. + Args: + payload: bytes + Returns: + the EmbeddedTransactionBuilder subclass + """ + headerBuilder = EmbeddedTransactionBuilder.loadFromBinary(payload) + entityType = headerBuilder.getType().value + entityTypeVersion = headerBuilder.getVersion() +% for name in generator.schema: +<% + layout = generator.schema[name].get("layout", [{type:""}]) + entityTypeValue = next(iter([x for x in layout if x.get('name','') == 'entityType']),{}).get('value',0) + entityTypeVersion = next(iter([x for x in layout if x.get('name','') == 'version']),{}).get('value',0) +%>\ +% if entityTypeValue > 0 and 'Aggregate' not in name and 'Block' not in name and name.startswith('Embedded'): + if entityType == 0x${'{:x}'.format(entityTypeValue)} and entityTypeVersion == ${entityTypeVersion}: + return ${name}Builder.loadFromBinary(payload) +% endif +% endfor + return headerBuilder \ No newline at end of file diff --git a/catbuffer-generators/generators/python/templates/Enum.mako b/catbuffer-generators/generators/python/templates/Enum.mako new file mode 100644 index 00000000..6aabc482 --- /dev/null +++ b/catbuffer-generators/generators/python/templates/Enum.mako @@ -0,0 +1,93 @@ +<% + base_class_name = 'Enum' + if generator.is_flag: + base_class_name = 'Flag' +%>\ +from __future__ import annotations +from enum import ${base_class_name} +% if generator.is_flag: +from typing import List +% endif +from .GeneratorUtils import GeneratorUtils + + +class ${generator.generated_class_name}(${base_class_name}): + """${helper.capitalize_first_character(generator.comments)} + + Attributes: +% for i, (name, (value, comment)) in enumerate(generator.enum_values.items()): + ${name}: ${comment}. +% endfor + """ + +% for i, (name, (value, comment)) in enumerate(generator.enum_values.items()): + ${name} = ${value} +% endfor + + @classmethod + def loadFromBinary(cls, payload: bytes) -> ${generator.generated_class_name}: + """Creates an instance of ${generator.generated_class_name} from binary payload. + Args: + payload: Byte payload to use to serialize the object. + Returns: + Instance of ${generator.generated_class_name}. + """ + value: int = GeneratorUtils.bufferToUint(GeneratorUtils.getBytes(bytes(payload), ${generator.size})) + return ${generator.generated_class_name}(value) + + @classmethod + def getSize(cls) -> int: + """Gets the size of the object. + Returns: + Size in bytes. + """ + return ${generator.size} + +% if generator.is_flag: + @classmethod + def bytesToFlags(cls, bitMaskValue: bytes, size: int) -> List[${generator.generated_class_name}]: + """Converts a bit representation to a list of ${generator.generated_class_name}. + Args: + bitMaskValue Bitmask bytes value. + Returns: + List of ${generator.generated_class_name} flags representing the int value. + """ + return cls.intToFlags(GeneratorUtils.bufferToUint(GeneratorUtils.getBytes(bitMaskValue, size))) + + @classmethod + def intToFlags(cls, bitMaskValue: int) -> List[${generator.generated_class_name}]: + """Converts a bit representation to a list of ${generator.generated_class_name}. + Args: + bitMaskValue Bitmask int value. + Returns: + List of ${generator.generated_class_name} flags representing the int value. + """ + results = [] + for flag in ${generator.generated_class_name}: + if 0 != flag.value & bitMaskValue: + results.append(flag) + return results + + @classmethod + def flagsToInt(cls, flags: List[${generator.generated_class_name}]) -> int: + """Converts a list of ${generator.generated_class_name} to a bit representation. + Args: + List of ${generator.generated_class_name} flags representing the int value. + Returns: + int value of the list of flags + """ + result = 0 + for flag in ${generator.generated_class_name}: + if flag in flags: + result += flag.value + return result + +% endif + def serialize(self) -> bytes: + """Serializes self to bytes. + Returns: + Serialized bytes. + """ + bytes_ = bytes() + bytes_ = GeneratorUtils.concatTypedArrays(bytes_, GeneratorUtils.uintToBuffer(self.value, ${generator.size})) + return bytes_ \ No newline at end of file diff --git a/catbuffer-generators/generators/python/templates/GeneratorUtils.mako b/catbuffer-generators/generators/python/templates/GeneratorUtils.mako new file mode 100644 index 00000000..3ed12fbc --- /dev/null +++ b/catbuffer-generators/generators/python/templates/GeneratorUtils.mako @@ -0,0 +1,50 @@ +from __future__ import annotations +from typing import List, TypeVar + +T = TypeVar('T') + +class GeneratorUtils: + """Generator utility class""" + + @staticmethod + def bufferToUint(buffer: bytes) -> int: + return int.from_bytes(buffer, byteorder='little', signed=False) + + @staticmethod + def uintToBuffer(uint: int, buffer_size: int) -> bytes: + return uint.to_bytes(buffer_size, byteorder='little', signed=False) + + @staticmethod + def concatTypedArrays(array1, array2): + return array1 + array2 + + @staticmethod + def uint8ToInt8(number: int) -> int: + if number > 127: + return number - 256 + return number + + @staticmethod + def getTransactionPaddingSize(size: int, alignment: int) -> int: + if size % alignment == 0: + return 0 + return alignment - (size % alignment) + + @staticmethod + def getBytes(binary: bytes, size: int) -> bytes: + if size > len(binary): + raise Exception('size should not exceed {0}. The value of size was: {1}'.format(len(binary), size)) + return binary[0:size] + + # pylint: disable=bad-staticmethod-argument + # cls argument is not GeneratorUtils + @staticmethod + def loadFromBinary(cls: T, items: List[cls], payload: bytes, payloadSize: int): + remainingByteSizes = payloadSize + while remainingByteSizes > 0: + item = cls.loadFromBinary(payload) + items.append(item) + itemSize = item.getSize() + remainingByteSizes -= itemSize + payload = payload[itemSize:] + return payload \ No newline at end of file diff --git a/catbuffer-generators/generators/python/templates/TransactionBuilderFactory.mako b/catbuffer-generators/generators/python/templates/TransactionBuilderFactory.mako new file mode 100644 index 00000000..53dc966a --- /dev/null +++ b/catbuffer-generators/generators/python/templates/TransactionBuilderFactory.mako @@ -0,0 +1,43 @@ +# pylint: disable=R0911,R0912 + +# Imports for creating transaction builders +from .TransactionBuilder import TransactionBuilder +% for name in sorted(generator.schema): +<% + layout = generator.schema[name].get("layout", [{type:""}]) + entityTypeValue = next(iter([x for x in layout if x.get('name','') == 'entityType']),{}).get('value',0) +%>\ +% if entityTypeValue > 0 and 'Block' not in name and not name.startswith('Embedded'): +from .${name}Builder import ${name}Builder +% endif +% endfor + + +class TransactionBuilderFactory: + """Factory in charge of creating the specific transaction builder from the binary payload. + """ + + @classmethod + def createBuilder(cls, payload) -> TransactionBuilder: + """ + It creates the specific transaction builder from the payload bytes. + Args: + payload: bytes + Returns: + the TransactionBuilder subclass + """ + headerBuilder = TransactionBuilder.loadFromBinary(payload) + entityType = headerBuilder.getType().value + entityTypeVersion = headerBuilder.getVersion() +% for name in generator.schema: +<% + layout = generator.schema[name].get("layout", [{type:""}]) + entityTypeValue = next(iter([x for x in layout if x.get('name','') == 'entityType']),{}).get('value',0) + entityTypeVersion = next(iter([x for x in layout if x.get('name','') == 'version']),{}).get('value',0) +%>\ + % if entityTypeValue > 0 and 'Block' not in name and not name.startswith('Embedded'): + if entityType == 0x${'{:x}'.format(entityTypeValue)} and entityTypeVersion == ${entityTypeVersion}: + return ${name}Builder.loadFromBinary(payload) + % endif +% endfor + return headerBuilder \ No newline at end of file diff --git a/catbuffer-generators/generators/python/templates/Type.mako b/catbuffer-generators/generators/python/templates/Type.mako new file mode 100644 index 00000000..bd79168c --- /dev/null +++ b/catbuffer-generators/generators/python/templates/Type.mako @@ -0,0 +1,65 @@ +from __future__ import annotations +from .GeneratorUtils import GeneratorUtils + + +class ${generator.generated_class_name}: + """${generator.comments}. + + Attributes: + ${generator.attribute_name}: ${generator.comments}. + """ + + def __init__(self, ${generator.attribute_name}: ${generator.attribute_type}): + """Constructor. + + Args: + ${generator.attribute_name}: ${generator.comments}. + """ + self.${generator.attribute_name} = ${generator.attribute_name} + + @classmethod + def loadFromBinary(cls, payload: bytes) -> ${generator.generated_class_name}: + """Creates an instance of ${generator.generated_class_name} from binary payload. + + Args: + payload: Byte payload to use to serialize the object. + Returns: + Instance of ${generator.generated_class_name}. + """ + bytes_ = bytes(payload) +% if generator.attribute_kind == helper.AttributeKind.BUFFER: + ${generator.attribute_name} = GeneratorUtils.getBytes(bytes_, ${generator.size}) +% else: + ${generator.attribute_name} = GeneratorUtils.bufferToUint(GeneratorUtils.getBytes(bytes_, ${generator.size})) +% endif + return ${generator.generated_class_name}(${generator.attribute_name}) + + @classmethod + def getSize(cls) -> int: + """Gets the size of the object. + Returns: + Size in bytes. + """ + return ${generator.size} + + def get${generator.name}(self) -> ${generator.attribute_type}: + """Gets ${generator.comments}. + + Returns: + ${generator.comments}. + """ + return self.${generator.attribute_name} + + def serialize(self) -> bytes: + """Serializes self to bytes. + + Returns: + Serialized bytes. + """ + bytes_ = bytes() +% if generator.attribute_kind == helper.AttributeKind.BUFFER: + bytes_ = GeneratorUtils.concatTypedArrays(bytes_, self.${generator.attribute_name}) +% else: + bytes_ = GeneratorUtils.concatTypedArrays(bytes_, GeneratorUtils.uintToBuffer(self.get${generator.name}(), ${generator.size})) +% endif + return bytes_ \ No newline at end of file diff --git a/catbuffer-generators/generators/python/templates/setup.mako b/catbuffer-generators/generators/python/templates/setup.mako new file mode 100644 index 00000000..adb738a4 --- /dev/null +++ b/catbuffer-generators/generators/python/templates/setup.mako @@ -0,0 +1,29 @@ +from setuptools import setup, find_packages # noqa: H301 + +with open('README.md', 'r') as readme_file: + README = readme_file.read() + +NAME = '#artifactName' +VERSION = '#artifactVersion' + +REQUIRES = [] + +setup( + name=NAME, + version=VERSION, + description='Symbol Catbuffer Builders', + author='nemtech', + author_email='ravi@nem.foundation', + url='https://github.com/nemtech/catbuffer-generators', + keywords=['catbuffer-generators', 'catbuffer', 'builders', 'Symbol Catbuffer Builders'], + install_requires=REQUIRES, + package_dir={'': 'src'}, + packages=find_packages('src'), + include_package_data=True, + license='Apache 2.0', + long_description=README, + long_description_content_type='text/markdown', + classifiers=[ + 'Programming Language :: Python :: 3.7', + ] +) diff --git a/catbuffer-generators/generators/python/test_VectorTest.py b/catbuffer-generators/generators/python/test_VectorTest.py new file mode 100644 index 00000000..ad843515 --- /dev/null +++ b/catbuffer-generators/generators/python/test_VectorTest.py @@ -0,0 +1,50 @@ +from binascii import hexlify, unhexlify +from collections import defaultdict +import importlib +from pathlib import Path +import pytest +import yaml + + +def read_test_vectors_file(filepath): + with open(filepath, 'rt') as inFd: + return yaml.load(inFd) + + +def prepare_test_cases(): + cases = [] + for filepath in Path('vector').iterdir(): + cases += read_test_vectors_file(filepath) + return cases + + +def to_hex_string(binary: bytes): + return hexlify(binary).decode('ascii').upper() + + +g_ids = defaultdict(int) + + +def generate_pretty_id(val): + # pylint: disable=global-statement + global g_ids + g_ids[val['builder']] += 1 + return '{}_{}'.format(val['builder'], g_ids[val['builder']]) + + +def prepare_payload(payload): + # some basevalue items in yaml are enclosed in qutoes + return unhexlify(payload.replace('\'', '')) + + +@pytest.mark.parametrize('item', prepare_test_cases(), ids=generate_pretty_id) +def test_serialize(item): + builderName = item['builder'] + comment = item['comment'] if 'comment' in item else '' + payload = item['payload'] + + builderModule = importlib.import_module('symbol_catbuffer.{}'.format(builderName)) + builderClass = getattr(builderModule, builderName) + builder = builderClass.loadFromBinary(prepare_payload(item['payload'])) + serialized = builder.serialize() + assert to_hex_string(serialized) == payload.upper(), comment diff --git a/catbuffer-generators/generators/typescript/.eslintrc.js b/catbuffer-generators/generators/typescript/.eslintrc.js new file mode 100644 index 00000000..64903c1e --- /dev/null +++ b/catbuffer-generators/generators/typescript/.eslintrc.js @@ -0,0 +1,17 @@ +module.exports = { + parser: '@typescript-eslint/parser', // Specifies the ESLint parser + extends: [ + 'plugin:@typescript-eslint/recommended', // Uses the recommended rules from the @typescript-eslint/eslint-plugin + 'prettier/@typescript-eslint', // Uses eslint-config-prettier to disable ESLint rules from @typescript-eslint/eslint-plugin that would conflict with prettier + 'plugin:prettier/recommended', // Enables eslint-plugin-prettier and eslint-config-prettier. This will display prettier errors as ESLint errors. Make sure this is always the last configuration in the extends array. + ], + parserOptions: { + ecmaVersion: 2018, // Allows for the parsing of modern ECMAScript features + sourceType: 'module', // Allows for the use of imports + }, + rules: { + '@typescript-eslint/no-non-null-assertion': 'off', + '@typescript-eslint/interface-name-prefix': 'off', + '@typescript-eslint/camelcase': 'off', + }, +}; diff --git a/catbuffer-generators/generators/typescript/.npmignore b/catbuffer-generators/generators/typescript/.npmignore new file mode 100644 index 00000000..3c3629e6 --- /dev/null +++ b/catbuffer-generators/generators/typescript/.npmignore @@ -0,0 +1 @@ +node_modules diff --git a/catbuffer-generators/generators/typescript/.npmrc b/catbuffer-generators/generators/typescript/.npmrc new file mode 100644 index 00000000..ae643592 --- /dev/null +++ b/catbuffer-generators/generators/typescript/.npmrc @@ -0,0 +1 @@ +//registry.npmjs.org/:_authToken=${NPM_TOKEN} diff --git a/catbuffer-generators/generators/typescript/.prettierrc.js b/catbuffer-generators/generators/typescript/.prettierrc.js new file mode 100644 index 00000000..212e486a --- /dev/null +++ b/catbuffer-generators/generators/typescript/.prettierrc.js @@ -0,0 +1,23 @@ +/* + * Copyright 2020 NEM + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +module.exports = { + semi: true, + trailingComma: "all", + singleQuote: true, + printWidth: 140, + tabWidth: 4 +}; diff --git a/catbuffer-generators/generators/typescript/README.md b/catbuffer-generators/generators/typescript/README.md new file mode 100644 index 00000000..30eb2b4b --- /dev/null +++ b/catbuffer-generators/generators/typescript/README.md @@ -0,0 +1,7 @@ +# catbuffer-typescript + +The catbuffer library helps serialize and deserialize NEM's Catapult entities in Typescript / Javascript applications. + +The library's main client is the NEM typescript SDK [symbol-sdk-typescript-javascript](https://github.com/nemtech/symbol-sdk-typescript-javascript) but it can be used alone. + +It has been generated using [catbuffer-generators](https://github.com/nemtech/catbuffer-generators) from the [catbuffer](https://github.com/nemtech/catbuffer) specification. diff --git a/catbuffer-generators/generators/typescript/TypescriptFileGenerator.py b/catbuffer-generators/generators/typescript/TypescriptFileGenerator.py new file mode 100644 index 00000000..0b00d1e5 --- /dev/null +++ b/catbuffer-generators/generators/typescript/TypescriptFileGenerator.py @@ -0,0 +1,18 @@ +from generators.common.FileGenerator import FileGenerator +from .TypescriptHelper import TypescriptHelper + + +class TypescriptFileGenerator(FileGenerator): + """Typescript file generator""" + + def get_template_path(self): + return '../typescript/templates/' + + def get_static_templates_file_names(self): + return ['GeneratorUtils', 'TransactionHelper', 'EmbeddedTransactionHelper', 'Serializer', 'index'] + + def get_main_file_extension(self): + return '.ts' + + def create_helper(self): + return TypescriptHelper() diff --git a/catbuffer-generators/generators/typescript/TypescriptHelper.py b/catbuffer-generators/generators/typescript/TypescriptHelper.py new file mode 100644 index 00000000..e4efb13f --- /dev/null +++ b/catbuffer-generators/generators/typescript/TypescriptHelper.py @@ -0,0 +1,80 @@ +import re +from generators.common.Helper import Helper, AttributeKind + + +class TypescriptHelper(Helper): + + @staticmethod + def add_required_import(required_import: set, import_type, class_name, base_class_name): + for typename in re.split('[\\[\\]]', import_type): + if typename and typename not in ['List', 'Uint8Array']: + if 'TransactionHeaderBuilder' in typename: + if typename == base_class_name: + required_import.add(typename) + if 'EmbeddedTransactionBuilder' in typename: + required_import.add('EmbeddedTransactionHelper') + required_import.add(typename) + elif typename != class_name and str(typename)[0].isupper(): + required_import.add(typename) + return required_import + + def get_body_class_name(self, name): + body_name = name if not name.startswith('Embedded') else name[8:] + if name.startswith('Aggregate') and name.endswith('Transaction'): + body_name = 'AggregateTransaction' + return '{0}Body'.format(body_name) + + def get_builtin_type(self, size): + if size == 8: + return 'number[]' + return 'number' + + def get_read_method_name(self, size, var_name): + if isinstance(size, str) or size > 8: + return 'GeneratorUtils.getBytes(Uint8Array.from({0}), {1})'.format(var_name, size) + if size == 8: + return 'GeneratorUtils.bufferToUint64(Uint8Array.from({0}))'.format(var_name) + if size == 4: + return 'GeneratorUtils.bufferToUint32(Uint8Array.from({0}))'.format(var_name) + if size == 2: + return 'GeneratorUtils.bufferToUint16(Uint8Array.from({0}))'.format(var_name) + if size == 1: + return 'GeneratorUtils.bufferToUint8(Uint8Array.from({0}))'.format(var_name) + return 'GeneratorUtils.getBytes(Uint8Array.from({0}), {1})'.format(var_name, size) + + def get_serialize_method_name(self, size): + if isinstance(size, str) or size > 8: + return '' + if size == 8: + return 'GeneratorUtils.uint64ToBuffer' + if size == 4: + return 'GeneratorUtils.uint32ToBuffer' + if size == 2: + return 'GeneratorUtils.uint16ToBuffer' + if size == 1: + return 'GeneratorUtils.uint8ToBuffer' + return '' + + def get_load_from_binary_factory(self, attribute_class_name): + if attribute_class_name == 'EmbeddedTransactionBuilder': + return 'EmbeddedTransactionHelper' + return attribute_class_name + + def get_condition_operation_text(self, op): + if op == 'has': + return '{0}.indexOf({1}) > -1' + return '{0} === {1}' + + def get_generated_type(self, schema, attribute, attribute_kind): + typename = attribute['type'] + if attribute_kind in (AttributeKind.SIMPLE, AttributeKind.SIZE_FIELD): + return self.get_builtin_type(self.get_attribute_size(schema, attribute)) + if attribute_kind == AttributeKind.BUFFER: + return 'Uint8Array' + if not self.is_byte_type(typename): + typename = self.get_generated_class_name(typename, attribute, schema) + if self.is_any_array_kind(attribute_kind): + return '{0}[]'.format(typename) + if attribute_kind == AttributeKind.FLAGS: + return '{0}[]'.format(typename) + return typename diff --git a/catbuffer-generators/generators/typescript/VectorTest.test.ts b/catbuffer-generators/generators/typescript/VectorTest.test.ts new file mode 100644 index 00000000..cc4118c0 --- /dev/null +++ b/catbuffer-generators/generators/typescript/VectorTest.test.ts @@ -0,0 +1,44 @@ +import * as YAML from 'yaml'; +import * as assert from 'assert'; +import * as fs from 'fs'; +import * as builders from '../src'; + +interface BuilderTestItem { + filename: string; + builder: string; + payload: string; + comment: string; +} + +const fromHexString = (hexString: string) => + new Uint8Array((hexString.match(/.{1,2}/g) || []).map(byte => parseInt(byte, 16))); + +const toHexString = (bytes: Uint8Array) => + bytes.reduce((str, byte) => str + byte.toString(16).padStart(2, '0'), '').toUpperCase(); + +const vectorDirectory = 'test/vector'; +const files = fs.readdirSync(vectorDirectory); + +const items: BuilderTestItem[] = files.map(filename => { + const yamlText = fs.readFileSync(vectorDirectory + '/' + filename, 'utf8'); + const yamlList = YAML.parse(yamlText) + return yamlList.map((a: BuilderTestItem) => { + const builder = a.builder; + return ({ + ...a, builder: builder, filename + } as BuilderTestItem); + }); +}).reduce((acc, val) => acc.concat(val), []); + + +describe('serialize', function () { + items.forEach(item => { + const stringPayload = item.payload + ''; + it(item.filename + " - " + item.builder + " - " + (item.comment || stringPayload), function () { + const builderClass = (builders)[item.builder] + const serializer = builderClass.loadFromBinary(fromHexString(stringPayload)); + assert.equal(toHexString(serializer.serialize()), stringPayload.toUpperCase()) + assert.equal(serializer.getSize(), stringPayload.length / 2) + }); + }) +}); diff --git a/catbuffer-generators/generators/typescript/package.json b/catbuffer-generators/generators/typescript/package.json new file mode 100644 index 00000000..f670f216 --- /dev/null +++ b/catbuffer-generators/generators/typescript/package.json @@ -0,0 +1,42 @@ +{ + "name": "#artifactName", + "version": "#artifactVersion", + "description": "NEM Catbuffer builders", + "repository": { + "type": "git", + "url": "git+https://github.com/nemtech/catbuffer-generators.git" + }, + "scripts": { + "clean": "rm -Rf node_modules/ *.js", + "build": "tsc", + "test": "mocha -r ts-node/register ./test/**/*.test.ts", + "prettier": "prettier --write ./src", + "lint": "eslint --cache src/ --ext .ts", + "lint:fix": "eslint src/ --ext .ts --fix", + "style:fix": "npm run prettier && npm run lint:fix" + }, + "bugs": { + "url": "https://github.com/nemtech/catbuffer-generators" + }, + "main": "dist/index.js", + "types": "dist/index.d.ts", + "homepage": "https://github.com/nemtech/catbuffer-generators", + "license": "Apache-2.0", + "dependencies": {}, + "devDependencies": { + "@types/mocha": "^8.0.3", + "@types/node": "13.9.8", + "mocha": "^8.2.1", + "nyc": "^15.1.0", + "ts-node": "^9.0.0", + "typescript": "^3.9.7", + "yaml": "^1.10.0", + "@typescript-eslint/eslint-plugin": "^2.34.0", + "@typescript-eslint/parser": "^2.34.0", + "eslint": "^6.8.0", + "eslint-config-prettier": "^6.12.0", + "eslint-plugin-prettier": "^3.1.4", + "prettier-plugin-organize-imports": "^1.1.1", + "prettier": "^2.1.2" + } +} diff --git a/catbuffer-generators/generators/typescript/templates/Class.mako b/catbuffer-generators/generators/typescript/templates/Class.mako new file mode 100644 index 00000000..ebabf92e --- /dev/null +++ b/catbuffer-generators/generators/typescript/templates/Class.mako @@ -0,0 +1,324 @@ +import { Serializer } from './Serializer'; +import { GeneratorUtils } from './GeneratorUtils'; +% for a in sorted(generator.required_import): +import { ${a} } from './${a}'; +% endfor + +/** +* ${helper.capitalize_first_character(generator.comments)} +**/ +export class ${generator.generated_class_name}${(' extends ' + str(generator.generated_base_class_name)) if generator.generated_base_class_name is not None else ''} implements Serializer { + +% for a in [a for a in generator.attributes if not a.attribute_is_super and not a.attribute_is_inline and not a.kind == helper.AttributeKind.SIZE_FIELD and not a.attribute_is_reserved and a.attribute_name != 'size']: + /** ${helper.capitalize_first_character(a.attribute_comment)}. **/ + readonly ${a.attribute_name}${('?:' if a.attribute_is_conditional else ':')} ${a.attribute_var_type}; + +% endfor\ + +<%def name="renderCondition(a)" filter="trim"> + ${helper.get_condition_operation_text(a.attribute['condition_operation']).format(a.attribute['condition'], helper.get_generated_class_name(a.condition_type_attribute['type'], a.condition_type_attribute, generator.schema) + '.' + helper.create_enum_name(a.attribute['condition_value']))} +\ + <% + constructor_params = generator.all_constructor_params + constructor_params_CSV = ', '.join([ str(a.attribute_name) + ': ' + str(a.attribute_var_type) + (' | undefined' if a.attribute_is_conditional else '') for a in constructor_params if a.attribute_condition_value == None and not a.attribute_is_aggregate and not a.attribute_is_reserved and not a.attribute_name == 'size']) + super_arguments_CSV = ', '.join([str(a.attribute_name) for a in constructor_params if a.attribute_is_super and not a.attribute_is_reserved and not a.attribute_is_aggregate and not a.attribute_name == 'size']) + %> + /** + * Constructor. + * +% for a in [a for a in constructor_params if a.attribute_condition_value == None and not a.attribute_is_aggregate and not a.attribute_is_reserved and not a.kind == helper.AttributeKind.SIZE_FIELD and not a.attribute_name == 'size']: + * @param ${a.attribute_name} ${helper.capitalize_first_character(a.attribute_comment)}. +% endfor + */ + public constructor(${constructor_params_CSV}) { + % if generator.base_class_name is not None: + super(${super_arguments_CSV}); + % endif + % for a in [a for a in constructor_params if a.attribute_condition_value == None and not a.attribute_is_aggregate and not a.attribute_is_reserved and not a.attribute_name == 'size']: + %if not a.attribute_is_super and not a.attribute_is_aggregate and not a.attribute_is_inline: + % if a.attribute_is_conditional: + if (${renderCondition(a) | trim}) { + GeneratorUtils.notNull(${a.attribute_name}, "${a.attribute_name} is null or undefined"); + } + %else: + GeneratorUtils.notNull(${a.attribute_name}, "${a.attribute_name} is null or undefined"); + % endif + % endif + % endfor + % for a in [a for a in constructor_params if not a.attribute_is_inline and not a.attribute_is_super and not a.attribute_is_reserved and not a.attribute_name == 'size']: + % if a.attribute_is_aggregate: + this.${a.attribute_name} = new ${a.attribute_var_type}(${', '.join([str(inline.attribute_name) for inline in constructor_params if inline.attribute_aggregate_attribute_name == a.attribute_name and not inline.attribute_is_reserved and not inline.kind == helper.AttributeKind.SIZE_FIELD and inline.attribute_condition_value is None and not inline.attribute_is_aggregate])}); + % else: + this.${a.attribute_name} = ${a.attribute_name}; + % endif + % endfor + } + + ## STREAM CONSTRUCTORS +<%def name="renderReader(a)" filter="trim"> +<% + this_attribute_definition = 'const ' + a.attribute_name + ': ' + a.attribute_var_type + ' = ' + if a.attribute_is_conditional: + this_attribute_definition = a.attribute_name + ' = ' + if a.attribute_is_reserved: + this_attribute_definition = '' +%>\ + + % if a.kind == helper.AttributeKind.SIMPLE: + ${this_attribute_definition}${helper.get_read_method_name(a.attribute_size, 'byteArray')}; + byteArray.splice(0, ${a.attribute_size}); + + % elif a.kind == helper.AttributeKind.BUFFER: + ${this_attribute_definition}GeneratorUtils.getBytes(Uint8Array.from(byteArray), ${a.attribute_size}); + byteArray.splice(0, ${a.attribute_size}); + + % elif a.kind == helper.AttributeKind.SIZE_FIELD: + ${this_attribute_definition}${helper.get_read_method_name(a.attribute_size, 'byteArray')}; + byteArray.splice(0, ${a.attribute_size}); + + % elif a.kind == helper.AttributeKind.ARRAY and a.attribute_base_type == 'enum': +## TODO. Size 2 is hardcoded here. Improve! + ${this_attribute_definition}GeneratorUtils.loadFromBinaryEnums(Uint8Array.from(byteArray), ${a.attribute_size}, 2); + byteArray.splice(0, ${a.attribute_name}.reduce((sum) => sum + 2, 0)); + + % elif a.kind == helper.AttributeKind.ARRAY: + ${this_attribute_definition}GeneratorUtils.loadFromBinary(${helper.get_load_from_binary_factory(a.attribute_class_name)}.loadFromBinary, Uint8Array.from(byteArray), ${a.attribute_size}); + byteArray.splice(0, ${a.attribute_name}.reduce((sum, c) => sum + c.getSize(), 0)); + + % elif a.kind == helper.AttributeKind.CUSTOM and a.conditional_read_before: + ${this_attribute_definition}new ${a.attribute_class_name}(${a.attribute['condition']}Condition); + + % elif a.kind == helper.AttributeKind.CUSTOM and a.attribute_base_type == 'enum': + ${this_attribute_definition}${helper.get_read_method_name(a.attribute_size, 'byteArray')}; + byteArray.splice(0, ${a.attribute_size}); + + % elif a.kind == helper.AttributeKind.CUSTOM: + ${this_attribute_definition}${helper.get_load_from_binary_factory(a.attribute_class_name)}.loadFromBinary(Uint8Array.from(byteArray)); + byteArray.splice(0, ${a.attribute_name}.getSize()); + + + % elif a.kind == helper.AttributeKind.FILL_ARRAY: + ${this_attribute_definition}GeneratorUtils.loadFromBinaryRemaining(${helper.get_load_from_binary_factory(a.attribute_class_name)}.loadFromBinary, Uint8Array.from(byteArray), byteArray.length, ${helper.resolve_alignment(a)}); + byteArray.splice(0, ${a.attribute_name}.reduce((sum, c) => sum + GeneratorUtils.getSizeWithPadding(c.getSize(), ${helper.resolve_alignment(a)}), 0)); + + % elif a.kind == helper.AttributeKind.FLAGS: + ${this_attribute_definition}GeneratorUtils.toFlags(${a.attribute_class_name}, ${helper.get_read_method_name(a.attribute_size, 'byteArray')}); + byteArray.splice(0, ${a.attribute_size}); + + % elif a.kind == helper.AttributeKind.VAR_ARRAY: + ${this_attribute_definition}GeneratorUtils.loadFromBinaryRemaining(${helper.get_load_from_binary_factory(a.attribute_class_name)}.loadFromBinary, Uint8Array.from(byteArray), payloadSize, ${helper.resolve_alignment(a)}); + byteArray.splice(0, ${a.attribute_name}.reduce((sum, c) => sum + GeneratorUtils.getSizeWithPadding(c.getSize(), ${helper.resolve_alignment(a)}), 0)); + + % else: + FIX ME! + % endif + +\ + + /** + * Load from binary array - Creates an object from payload. + * + * @param payload Byte payload to use to serialize the object. + */ + <% + constructor_params = generator.all_constructor_params + constructor_params_CSV = ', '.join([ str(a.attribute_name) + ': ' + str(a.attribute_var_type) + (' | undefined' if a.attribute_is_conditional else '') for a in constructor_params if a.attribute_condition_value == None and not a.attribute_is_aggregate and not a.attribute_is_reserved and not a.attribute_name == 'size']) + arguments_CSV = ', '.join(['superObject.' + str(a.attribute_name) if a.attribute_is_super else (a.attribute_aggregate_attribute_name + '.' + str(a.attribute_name) if a.attribute_is_inline else str(a.attribute_name)) for a in constructor_params if not a.attribute_is_reserved and not a.attribute_is_aggregate and not a.attribute_name == 'size']) + %> + public static loadFromBinary(payload: Uint8Array): ${generator.generated_class_name} { + const byteArray = Array.from(payload); + % if generator.base_class_name is not None: + const superObject = ${generator.generated_base_class_name}.loadFromBinary(payload); + byteArray.splice(0, superObject.getSize()); + % endif + % for a in set([(a.attribute['condition'], a.attribute_size, a.conditional_read_before) for a in generator.attributes if not a.attribute_is_inline and a.conditional_read_before and a.attribute_is_conditional]): + const ${a[0]}Condition = ${helper.get_read_method_name(a[1], 'byteArray')}; + byteArray.splice(0, ${a[1]}); + % endfor + % for a in [a for a in generator.attributes if not a.attribute_is_super and not a.attribute_is_inline and not a.conditional_read_before]: + %if a.attribute_is_conditional: + let ${a.attribute_name} : ${a.attribute_var_type} | undefined = undefined; + if (${renderCondition(a) | trim}) { + ${renderReader(a) | trim} + } + % else: + ${renderReader(a) | trim} + %endif + % endfor + % for a in [a for a in generator.attributes if not a.attribute_is_super and not a.attribute_is_inline and a.conditional_read_before]: + let ${a.attribute_name} : ${a.attribute_var_type} | undefined = undefined; + if (${renderCondition(a) | trim}) { + ${renderReader(a) | trim} + } + % endfor + return new ${generator.generated_class_name}(${arguments_CSV}); + } + +## CONDITIONAL CONSTRUCTORS +% for possible_constructor_params in generator.constructor_attributes: + <% + constructor_params = [a for a in possible_constructor_params if a.attribute_condition_value is None and a.attribute_condition_provide and not a.attribute_is_reserved and not a.attribute_is_aggregate] + constructor_params_CSV = ', '.join([ str(a.attribute_name) + ': ' + str(a.attribute_var_type) for a in constructor_params]) + default_value_attributes = [a for a in possible_constructor_params if a.attribute_condition_value is not None] + create_name_suffix = ''.join([helper.capitalize_first_character(a.attribute_condition_value) for a in default_value_attributes]) + constructor_arguments_CSV = ', '.join([str(a.attribute_name) + if a.attribute_condition_value is not None or a.attribute_condition_provide else 'undefined' + for a in possible_constructor_params if not a.attribute_is_aggregate and not a.attribute_is_reserved and not a.attribute_name == 'size']) + %> + /** + * Creates an instance of ${generator.generated_class_name}. + * +% for a in [a for a in constructor_params if a.attribute_condition_value == None and not a.attribute_is_aggregate and not a.attribute_is_reserved and not a.attribute_name == 'size']: + * @param ${a.attribute_name} ${helper.capitalize_first_character(a.attribute_comment)}. +% endfor + * @return Instance of ${generator.generated_class_name}. + */ + public static create${generator.generated_class_name}${create_name_suffix}(${constructor_params_CSV}): ${generator.generated_class_name} { + % for a in default_value_attributes: + const ${a.attribute_name} = ${helper.get_generated_class_name(a.attribute['type'], a.attribute, generator.schema)}.${helper.create_enum_name(a.attribute_condition_value)}; + % endfor + return new ${generator.generated_class_name}(${constructor_arguments_CSV}); + } +% endfor + +## GETTERS: +% for a in [a for a in generator.attributes if not a.attribute_is_super and not a.attribute_is_reserved and not a.attribute_is_aggregate and not a.kind == helper.AttributeKind.SIZE_FIELD and (not a.attribute_is_reserved or not a.attribute_is_inline) and a.attribute_name != 'size']: + /** + * Gets ${a.attribute_comment}. + * + * @return ${helper.capitalize_first_character(a.attribute_comment)}. + */ + public get${helper.capitalize_first_character(a.attribute_name) if a.attribute_name != 'size' else 'StreamSize'}(): ${a.attribute_var_type} { + % if a.attribute_is_conditional and not a.attribute_is_inline: + if (!(this.${renderCondition(a) | trim} && this.${a.attribute_name})) { + throw new Error("${a.attribute['condition']} is not set to ${helper.create_enum_name(a.attribute['condition_value'])}."); + } + % endif + % if a.attribute_is_inline: + return this.${a.attribute_aggregate_attribute_name}.get${helper.capitalize_first_character(a.attribute_name)}(); + % else: + return this.${a.attribute_name}; + % endif + } + +% endfor +## SIZE: +<%def name="renderSize(a)" filter="trim">\ +<% +this_attribute_name = 'this.' + a.attribute_name + ('!' if a.attribute_is_conditional else '') +%>\ + % if a.kind == helper.AttributeKind.SIMPLE: + size += ${a.attribute_size}; // ${a.attribute_name} + % elif a.kind == helper.AttributeKind.SIZE_FIELD: + size += ${a.attribute_size}; // ${a.attribute_name} + % elif a.kind == helper.AttributeKind.BUFFER: + size += ${this_attribute_name}.length; // ${a.attribute_name} + % elif a.kind == helper.AttributeKind.ARRAY and a.attribute_base_type == 'enum': + size += ${this_attribute_name}.reduce((sum) => sum + 2, 0); + % elif a.kind == helper.AttributeKind.ARRAY or a.kind == helper.AttributeKind.VAR_ARRAY or a.kind == helper.AttributeKind.FILL_ARRAY: + size += ${this_attribute_name}.reduce((sum, c) => sum + GeneratorUtils.getSizeWithPadding(c.getSize(), ${helper.resolve_alignment(a)}), 0); // ${a.attribute_name} + % elif a.kind == helper.AttributeKind.FLAGS: + size += ${a.attribute_size}; // ${a.attribute_name} + % elif a.kind == helper.AttributeKind.CUSTOM and a.attribute_base_type == 'enum': + size += ${a.attribute_size}; // ${a.attribute_name} + % else: + size += ${this_attribute_name}.getSize(); // ${a.attribute_name} + % endif +\ + + /** + * Gets the size of the object. + * + * @return Size in bytes. + */ + public getSize(): number { + let size = ${'super.getSize()' if generator.base_class_name is not None else '0'}; +% for a in [a for a in generator.attributes if not a.attribute_is_super and not a.attribute_is_inline]: + % if a.attribute_is_conditional: + if (this.${renderCondition(a) | trim}) { + ${renderSize(a).strip()} + } + % else: + ${renderSize(a).strip()} + % endif +% endfor + return size; + } + +% if generator.base_class_name in ['Transaction', 'EmbeddedTransaction']: + /** + * Gets the body builder of the object. + * + * @return Body builder. + */ + public getBody(): ${generator.body_class_name}Builder { + return this.${helper.decapitalize_first_character(generator.body_class_name)}; + } +% endif + +% if generator.name in ['Transaction', 'EmbeddedTransaction']: + /** + * Gets the body builder of the object. + * + * @return Body builder. + */ + public getBody(): undefined | Serializer { + return undefined; + } +% endif + +<%def name="renderSerialize(a)" filter="trim">\ +<% +this_attribute_name = 'this.' + a.attribute_name + ('!' if a.attribute_is_conditional else '') +this_attribute_getter = 'this.get' + helper.capitalize_first_character(a.attribute_name) + '()' +%>\ + % if a.kind == helper.AttributeKind.SIMPLE and a.attribute_is_reserved: + const ${a.attribute_name}Bytes = ${helper.get_serialize_method_name(a.attribute_size)}(0); + % elif a.kind == helper.AttributeKind.SIMPLE and (generator.name != 'Receipt' or a.attribute_name != 'size'): + const ${a.attribute_name}Bytes = ${helper.get_serialize_method_name(a.attribute_size)}(${this_attribute_getter}); + % elif a.kind == helper.AttributeKind.BUFFER: + const ${a.attribute_name}Bytes = ${this_attribute_name}; + % elif a.kind == helper.AttributeKind.SIZE_FIELD and 'disposition' in a.parent_attribute and a.parent_attribute['disposition'] == 'var': + const ${a.attribute_name} = this.${a.parent_attribute['name']}.reduce((sum, c) => sum + GeneratorUtils.getSizeWithPadding(c.getSize(), ${helper.resolve_alignment(a)}), 0); + const ${a.attribute_name}Bytes = ${helper.get_serialize_method_name(a.attribute_size)}(${a.attribute_name}); + % elif a.kind == helper.AttributeKind.SIZE_FIELD: + const ${a.attribute_name}Bytes = ${helper.get_serialize_method_name(a.attribute_size)}(this.${a.parent_attribute['name']}.length); + % elif a.kind == helper.AttributeKind.ARRAY and a.attribute_base_type == 'enum': + const ${a.attribute_name}Bytes = GeneratorUtils.writeListEnum(${this_attribute_name}, ${helper.resolve_alignment(a)}); + % elif a.kind == helper.AttributeKind.ARRAY or a.kind == helper.AttributeKind.VAR_ARRAY or a.kind == helper.AttributeKind.FILL_ARRAY: + const ${a.attribute_name}Bytes = GeneratorUtils.writeList(${this_attribute_name}, ${helper.resolve_alignment(a)}); + % elif a.kind == helper.AttributeKind.CUSTOM and a.attribute_base_type == 'enum': + const ${a.attribute_name}Bytes = ${helper.get_serialize_method_name(a.attribute_size)}(${this_attribute_name}); + % elif a.kind == helper.AttributeKind.CUSTOM: + const ${a.attribute_name}Bytes = ${this_attribute_name}.serialize(); + % elif a.kind == helper.AttributeKind.FLAGS: + const ${a.attribute_name}Bytes = ${helper.get_serialize_method_name(a.attribute_size)}(GeneratorUtils.fromFlags(${a.attribute_class_name}, ${this_attribute_name})); + % else: + const ${a.attribute_name}Bytes = Uint8Array.from([]); // Ignored serialization: ${a.attribute_name} ${a.kind} + % endif +\ + /** + * Serializes an object to bytes. + * + * @return Serialized bytes. + */ + public serialize(): Uint8Array { + let newArray = Uint8Array.from([]); + % if generator.base_class_name is not None: + const superBytes = super.serialize(); + newArray = GeneratorUtils.concatTypedArrays(newArray, superBytes); +% endif + % for a in [a for a in generator.attributes if not a.attribute_is_super and not a.attribute_is_inline]: + % if a.attribute_is_conditional: + if (this.${renderCondition(a) | trim}) { + ${renderSerialize(a)} + newArray = GeneratorUtils.concatTypedArrays(newArray, ${a.attribute_name}Bytes); + } + % else: + ${renderSerialize(a)} + newArray = GeneratorUtils.concatTypedArrays(newArray, ${a.attribute_name}Bytes); + % endif + % endfor + return newArray; + } +} diff --git a/catbuffer-generators/generators/typescript/templates/EmbeddedTransactionHelper.mako b/catbuffer-generators/generators/typescript/templates/EmbeddedTransactionHelper.mako new file mode 100644 index 00000000..6e061394 --- /dev/null +++ b/catbuffer-generators/generators/typescript/templates/EmbeddedTransactionHelper.mako @@ -0,0 +1,36 @@ +import { EmbeddedTransactionBuilder } from './EmbeddedTransactionBuilder'; +% for name in generator.schema: +<% + layout = generator.schema[name].get("layout", [{type:""}]) + entityTypeValue = next(iter([x for x in layout if x.get('name','') == 'entityType']),{}).get('value',0) +%>\ +%if (entityTypeValue > 0 and 'Aggregate' not in name and 'Block' not in name and name.startswith('Embedded')): +import { ${name}Builder } from './${name}Builder'; +%endif +% endfor + +/** Helper class for embedded transaction serialization */ +export class EmbeddedTransactionHelper { + + /** Deserialize an embedded transaction builder from binary */ + public static loadFromBinary(payload: Uint8Array): EmbeddedTransactionBuilder { + + const header = EmbeddedTransactionBuilder.loadFromBinary(payload); +% for name in generator.schema: + <% + layout = generator.schema[name].get("layout", [{type:""}]) + entityTypeValue = next(iter([x for x in layout if x.get('name','') == 'entityType']),{}).get('value',0) + entityTypeVersion = next(iter([x for x in layout if x.get('name','') == 'version']),{}).get('value',0) + %>\ + %if (entityTypeValue > 0 and 'Aggregate' not in name and 'Block' not in name and name.startswith('Embedded')): + + if (header.type === ${entityTypeValue} && header.version == ${entityTypeVersion}) { + return ${name}Builder.loadFromBinary(payload); + } + %endif +% endfor + + return header; + } + +} diff --git a/catbuffer-generators/generators/typescript/templates/Enum.mako b/catbuffer-generators/generators/typescript/templates/Enum.mako new file mode 100644 index 00000000..ae4fda28 --- /dev/null +++ b/catbuffer-generators/generators/typescript/templates/Enum.mako @@ -0,0 +1,12 @@ + +/** +* ${helper.capitalize_first_character(generator.comments)} +**/ +export enum ${generator.generated_class_name} { + +% for i, (name, (value, comment)) in enumerate(generator.enum_values.items()): + /** ${comment}. */ + ${name} = ${value}, + +% endfor +} diff --git a/catbuffer-generators/generators/typescript/templates/GeneratorUtils.mako b/catbuffer-generators/generators/typescript/templates/GeneratorUtils.mako new file mode 100644 index 00000000..2256e713 --- /dev/null +++ b/catbuffer-generators/generators/typescript/templates/GeneratorUtils.mako @@ -0,0 +1,361 @@ +import { Serializer } from './Serializer'; + +/** + * Generator utility class. + */ +export class GeneratorUtils { + + /** + * Convert a UInt8Array input into an array of 2 numbers. + * Numbers in the returned array are cast to UInt32. + * @param {Uint8Array} input A uint8 array. + * @returns {number[]} The uint64 representation of the input. + */ + public static bufferToUint64(input: Uint8Array): number[] { + const view = new DataView(input.slice(0, 8).reverse().buffer); + return [view.getUint32(4), view.getUint32(0)]; + } + + /** + * Read 4 bytes as a uint32 value from buffer bytes starting at given index. + * @param {Uint8Array} bytes A uint8 array. + * @param {number} index Index. + * @returns {number} 32bits integer. + */ + public static readUint32At(bytes: Uint8Array, index: number): number { + return (bytes[index] + (bytes[index + 1] << 8) + (bytes[index + 2] << 16) + (bytes[index + 3] << 24)) >>> 0; + } + + /** + * Convert uint value into buffer + * @param {number} uintValue A uint8 array. + * @param {number} bufferSize Buffer size. + * @returns {Uint8Array} + */ + public static uintToBuffer(uintValue: number, bufferSize: number): Uint8Array { + const buffer = new ArrayBuffer(bufferSize); + const dataView = new DataView(buffer); + try { + if (1 === bufferSize) { + dataView.setUint8(0, uintValue); + } else if (2 === bufferSize) { + dataView.setUint16(0, uintValue, true); + } else if (4 === bufferSize) { + dataView.setUint32(0, uintValue, true); + } else { + throw new Error('Unexpected bufferSize ' + bufferSize); + } + return new Uint8Array(buffer); + } catch (e) { + throw new Error(`Converting uint value ` + uintValue + ` into buffer with error: ` + e); + } + } + /** + * Convert uint value into buffer + * @param {number} uintValue A uint8 array. + * @returns {Uint8Array} + */ + public static uint8ToBuffer(uintValue: number): Uint8Array { + return GeneratorUtils.uintToBuffer(uintValue, 1); + } + + /** + * Convert uint value into buffer + * @param {number} uintValue A uint8 array. + * @returns {Uint8Array} + */ + public static uint16ToBuffer(uintValue: number): Uint8Array { + return GeneratorUtils.uintToBuffer(uintValue, 2); + } + + /** + * Convert uint value into buffer + * @param {number} uintValue A uint8 array. + * @returns {Uint8Array} + */ + public static uint32ToBuffer(uintValue: number): Uint8Array { + return GeneratorUtils.uintToBuffer(uintValue, 4); + } + + /** + * It validates that a value is not undefined or null + * @param value the value + * @param message the message in the exception if the value is null or undefined. + */ + public static notNull(value: any, message: string): void { + if (value === undefined || value === null) { + throw new Error(message); + } + } + + /** + * Convert uint8 array buffer into number + * @param {Uint8Array} buffer A uint8 array. + * @returns {number} + */ + public static bufferToUint(buffer: Uint8Array, size: number): number { + const dataView = new DataView(buffer.buffer); + try { + if (1 === size) { + return dataView.getUint8(0); + } else if (2 === size) { + return dataView.getUint16(0, true); + } else if (4 === size) { + return dataView.getUint32(0, true); + } + throw new Error('Unexpected size ' + size); + } catch (e) { + throw new Error(`Converting buffer into number with error:` + e); + } + } + + /** + * Convert uint8 array buffer into number + * @param {Uint8Array} buffer A uint8 array. + * @returns {number} + */ + public static bufferToUint8(buffer: Uint8Array): number { + return GeneratorUtils.bufferToUint(buffer, 1); + } + + /** + * Convert uint8 array buffer into number + * @param {Uint8Array} buffer A uint8 array. + * @returns {number} + */ + public static bufferToUint16(buffer: Uint8Array): number { + return GeneratorUtils.bufferToUint(buffer, 2); + } + + /** + * Convert uint8 array buffer into number + * @param {Uint8Array} buffer A uint8 array. + * @returns {number} + */ + public static bufferToUint32(buffer: Uint8Array): number { + return GeneratorUtils.bufferToUint(buffer, 4); + } + + /** + * Convert unit64 into buffer + * @param {number} uintValue Uint64 (number[]). + * @returns {Uint8Array} + */ + public static uint64ToBuffer(uintValue: number[] | number): Uint8Array { + const uint32Array = new Uint32Array(GeneratorUtils.fromUint(uintValue)); + return new Uint8Array(uint32Array.buffer); + } + + /** + * Concatenate two arrays + * @param {Uint8Array} array1 A Uint8Array. + * @param {Uint8Array} array2 A Uint8Array. + * @returns {Uint8Array} + */ + public static concatTypedArrays(array1: Uint8Array, array2: Uint8Array): Uint8Array { + const newArray = new Uint8Array(array1.length + array2.length); + newArray.set(array1); + newArray.set(array2, array1.length); + return newArray; + } + + /** Converts an unsigned byte to a signed byte with the same binary representation. + * @param {number} input An unsigned byte. + * @returns {number} A signed byte with the same binary representation as the input. + * + */ + public static uint8ToInt8 = (input: number): number => { + if (0xff < input) { + throw Error(`input '` + input + `' is out of range`); + } + return (input << 24) >> 24; + }; + + /** Get bytes by given sub array size. + * @param {Uint8Array} binary Binary bytes array. + * @param {number} size Subarray size. + * @returns {Uint8Array} + * + */ + public static getBytes(binary: Uint8Array, size: number): Uint8Array { + if (size > binary.length) { + throw new RangeError(); + } + const bytes = binary.slice(0, size); + return bytes; + } + + /** + * Gets the padding size that rounds up \a size to the next multiple of \a alignment. + * @param size Inner element size + * @param alignment Next multiple alignment + */ + public static getPaddingSize(size: number, alignment: number): number { + if (alignment === 0) { + return 0; + } + return 0 === size % alignment ? 0 : alignment - (size % alignment); + } + + /** + * Adds the padding to the reported size according to the alignment + * @param size the size + * @param alignment the alignment + */ + public static getSizeWithPadding(size: number, alignment: number): number { + return size + GeneratorUtils.getPaddingSize(size, alignment); + } + + /** + * Tries to compact a uint64 into a simple numeric. + * @param {module:coders/uint64~uint64} uint64 A uint64 value. + * @returns {number|module:coders/uint64~uint64} + * A numeric if the uint64 is no greater than Number.MAX_SAFE_INTEGER or the original uint64 value otherwise. + */ + public static compact(uint64: number[] | number): number { + if (Array.isArray(uint64)) { + const low = uint64[0]; + const high = uint64[1]; + // don't compact if the value is >= 2^53 + if (0x00200000 <= high) { + throw new Error('Cannot compact number: ' + uint64); + } + // multiply because javascript bit operations operate on 32bit values + return high * 0x100000000 + low; + } else { + return uint64; + } + } + + /** + * Converts a numeric unsigned integer into a uint64. + * @param {number} number The unsigned integer. + * @returns {module:coders/uint64~uint64} The uint64 representation of the input. + */ + public static fromUint(number: number | number[]): number[] { + if (Array.isArray(number)) { + return number; + } + return [(number & 0xffffffff) >>> 0, (number / 0x100000000) >>> 0]; + } + + /** + * It loads a static list of entities from the payload + * @param loadFromBinary the factory function + * @param payload the payload + * @param count the amount of entities + */ + public static loadFromBinary( + loadFromBinary: (payload: Uint8Array) => T, + payload: Uint8Array, + count: number | number[], + ): T[] { + const byteArray = Array.from(payload); + const values: T[] = []; + for (let i = 0; i < GeneratorUtils.compact(count); i++) { + const item = loadFromBinary(Uint8Array.from(byteArray)); + const itemSize = item.getSize(); + values.push(item); + byteArray.splice(0, itemSize); + } + return values; + } + + /** + * Loads a list of numbers from the array based on the count and number size. + * @param payload the payload + * @param count the count + * @param itemSize the number size. + */ + public static loadFromBinaryEnums(payload: Uint8Array, count: number | number[], itemSize: number): number[] { + const byteArray = Array.from(payload); + const values: number[] = []; + for (let i = 0; i < GeneratorUtils.compact(count); i++) { + values.push(GeneratorUtils.bufferToUint(payload, 2)); + byteArray.splice(0, itemSize); + } + return values; + } + + /** + * It loads a static list of entities from the payload + * @param loadFromBinary the factory function + * @param payload the payload + * @param payloadSize the amount of bytes to process. + * @param alignment for the padding + */ + public static loadFromBinaryRemaining( + loadFromBinary: (payload: Uint8Array) => T, + payload: Uint8Array, + payloadSize: number, + alignment: number, + ): T[] { + const byteArray = Array.from(payload); + let remainingByteSizes: number = payloadSize; + const transactions: T[] = []; + while (remainingByteSizes > 0) { + const item = loadFromBinary(Uint8Array.from(byteArray)); + transactions.push(item); + let size = item.getSize(); + const itemSize = size + GeneratorUtils.getPaddingSize(item.getSize(), alignment); + remainingByteSizes -= itemSize; + byteArray.splice(0, itemSize); + } + return transactions; + } + + /** + * It converts a list of buffers into an Uint8Array + * @param elements the buffers to serialize + * @param alignment add padding to each element according to the alignment. + * @return the serialized buffer + */ + public static writeList(elements: Serializer[], alignment: number): Uint8Array { + return elements.reduce((newArray, item) => { + const byte = item.serialize(); + const padding = new Uint8Array(GeneratorUtils.getPaddingSize(byte.length, alignment)); + return GeneratorUtils.concatTypedArrays(newArray, GeneratorUtils.concatTypedArrays(byte, padding)); + }, Uint8Array.from([])); + } + + /** + * It serializes a list of number to a Uint8Array + * @param elements + * @param alignment + */ + public static writeListEnum(elements: number[], alignment: number): Uint8Array { + return elements.reduce((newArray, item) => { + const byte = GeneratorUtils.uint16ToBuffer(item); + const padding = new Uint8Array(GeneratorUtils.getPaddingSize(byte.length, alignment)); + return GeneratorUtils.concatTypedArrays(newArray, GeneratorUtils.concatTypedArrays(byte, padding)); + }, Uint8Array.from([])); + } + + /** + * It generates a list of flags from an aggregated value + * + * @param enumClass the enum class holding all the possible values + * @param bitMaskValue the aggregate value + * @param the flags + */ + public static toFlags(enumClass: any, bitMaskValue: number): number[] { + const values: number[] = Object.keys(enumClass) + .map((key) => enumClass[key]) + .filter((k) => parseInt(k) >= 0) + .map((k) => parseInt(k)); + return values.filter((value) => (value & bitMaskValue) !== 0); + } + + /** + * It converts a list of flag into an aggregated number + * @param enumClass the enum class to know the valid numbers + * @param flags the flags + */ + public static fromFlags(enumClass: any, flags: number[]): number { + const values: number[] = Object.keys(enumClass) + .map((key) => enumClass[key]) + .filter((k) => parseInt(k) >= 0) + .map((k) => parseInt(k)); + return flags.filter((f) => values.indexOf(f) > -1).reduce((a, b) => a + b, 0); + } +} diff --git a/catbuffer-generators/generators/typescript/templates/Serializer.mako b/catbuffer-generators/generators/typescript/templates/Serializer.mako new file mode 100644 index 00000000..dcb2064e --- /dev/null +++ b/catbuffer-generators/generators/typescript/templates/Serializer.mako @@ -0,0 +1,17 @@ +/** Objects of this interface knows how to serialize a catbuffer object. */ +export interface Serializer { + + /** + * Serializes an object to bytes. + * + * @return Serialized bytes. + */ + serialize(): Uint8Array; + + /** + * Gets the size of the object. + * + * @return Size in bytes. + */ + getSize(): number; +} diff --git a/catbuffer-generators/generators/typescript/templates/TransactionHelper.mako b/catbuffer-generators/generators/typescript/templates/TransactionHelper.mako new file mode 100644 index 00000000..f1554e86 --- /dev/null +++ b/catbuffer-generators/generators/typescript/templates/TransactionHelper.mako @@ -0,0 +1,35 @@ +import { TransactionBuilder } from "./TransactionBuilder"; +% for name in generator.schema: +<% + layout = generator.schema[name].get("layout", [{type:""}]) + entityTypeValue = next(iter([x for x in layout if x.get('name','') == 'entityType']),{}).get('value',0) +%>\ +%if (entityTypeValue > 0 and 'Block' not in name and not name.startswith('Embedded')): +import { ${name}Builder } from './${name}Builder'; +%endif +% endfor + +/** Helper class for embedded transaction serialization */ +export class TransactionHelper { + + /** Deserialize an transaction builder from binary */ + public static loadFromBinary(payload: Uint8Array): TransactionBuilder { + + const header = TransactionBuilder.loadFromBinary(payload); +% for name in generator.schema: + <% + layout = generator.schema[name].get("layout", [{type:""}]) + entityTypeValue = next(iter([x for x in layout if x.get('name','') == 'entityType']),{}).get('value',0) + entityTypeVersion = next(iter([x for x in layout if x.get('name','') == 'version']),{}).get('value',0) + %>\ + %if (entityTypeValue > 0 and 'Block' not in name and not name.startswith('Embedded')): + if (header.type === ${entityTypeValue} && header.version === ${entityTypeVersion}) { + return ${name}Builder.loadFromBinary(payload); + } + %endif +% endfor + + return header; + } + +} diff --git a/catbuffer-generators/generators/typescript/templates/Type.mako b/catbuffer-generators/generators/typescript/templates/Type.mako new file mode 100644 index 00000000..7f4e8740 --- /dev/null +++ b/catbuffer-generators/generators/typescript/templates/Type.mako @@ -0,0 +1,55 @@ +import { Serializer } from './Serializer'; +import { GeneratorUtils } from './GeneratorUtils'; + +/** ${generator.comments}. */ +export class ${generator.generated_class_name} implements Serializer { + /** ${generator.comments}. */ + readonly ${generator.attribute_name}: ${generator.attribute_type}; + + /** + * Constructor. + * + * @param ${generator.attribute_name} ${generator.comments}. + */ + constructor(${generator.attribute_name}: ${generator.attribute_type}) { + this.${generator.attribute_name} = ${generator.attribute_name}; + } + + /** + * Creates an instance of ${generator.generated_class_name} from binary payload. + * + * @param payload Byte payload to use to serialize the object. + * @return Instance of ${generator.generated_class_name}. + */ + public static loadFromBinary(payload: Uint8Array): ${generator.generated_class_name} { + const ${generator.attribute_name} = ${helper.get_read_method_name(generator.size, 'payload')}; + return new ${generator.generated_class_name}(${generator.attribute_name}); + } + + /** + * Gets ${generator.comments}. + * + * @return ${generator.comments}. + */ + public get${generator.name}(): ${generator.attribute_type} { + return this.${generator.attribute_name}; + } + + /** + * Gets the size of the object. + * + * @return Size in bytes. + */ + public getSize(): number { + return ${generator.size}; + } + + /** + * Serializes an object to bytes. + * + * @return Serialized bytes. + */ + public serialize(): Uint8Array { + return ${helper.get_serialize_method_name(generator.size)}(${'this.get' + generator.name + '()'}); + } +} diff --git a/catbuffer-generators/generators/typescript/templates/index.mako b/catbuffer-generators/generators/typescript/templates/index.mako new file mode 100644 index 00000000..acf24c78 --- /dev/null +++ b/catbuffer-generators/generators/typescript/templates/index.mako @@ -0,0 +1,12 @@ +% for type_name, class_schema in generator.schema.items(): +<% + generated_class_name = helper.get_generated_class_name(type_name, class_schema, generator.schema) +%>\ +%if helper.should_generate_class(type_name): +export * from './${generated_class_name}'; +%endif +% endfor +export * from './TransactionHelper' +export * from './EmbeddedTransactionHelper' +export * from './GeneratorUtils' +export * from './Serializer' diff --git a/catbuffer-generators/generators/typescript/tsconfig.json b/catbuffer-generators/generators/typescript/tsconfig.json new file mode 100644 index 00000000..cda05d3e --- /dev/null +++ b/catbuffer-generators/generators/typescript/tsconfig.json @@ -0,0 +1,32 @@ +{ + "compilerOptions": { + "module": "commonjs", + "noImplicitAny": false, + "suppressImplicitAnyIndexErrors": true, + "target": "es2019", + "strict": true, + "moduleResolution": "node", + "removeComments": true, + "sourceMap": true, + "noLib": false, + "declaration": true, + "lib": [ + "dom", + "es6", + "es2019", + "es2020.bigint", + "es2020.string", + "es2020.symbol.wellknown", + "dom.iterable", + "scripthost" + ], + "outDir": "dist", + "typeRoots": [ + "node_modules/@types" + ] + }, + "include": [ + "src" + ], + "exclude": [] +} diff --git a/catbuffer-generators/gradle/wrapper/gradle-wrapper.jar b/catbuffer-generators/gradle/wrapper/gradle-wrapper.jar new file mode 100644 index 00000000..5c2d1cf0 Binary files /dev/null and b/catbuffer-generators/gradle/wrapper/gradle-wrapper.jar differ diff --git a/catbuffer-generators/gradle/wrapper/gradle-wrapper.properties b/catbuffer-generators/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 00000000..f04d6a20 --- /dev/null +++ b/catbuffer-generators/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,5 @@ +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.3-bin.zip +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists diff --git a/catbuffer-generators/gradlew b/catbuffer-generators/gradlew new file mode 100644 index 00000000..83f2acfd --- /dev/null +++ b/catbuffer-generators/gradlew @@ -0,0 +1,188 @@ +#!/usr/bin/env sh + +# +# Copyright 2015 the original author or authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +############################################################################## +## +## Gradle start up script for UN*X +## +############################################################################## + +# Attempt to set APP_HOME +# Resolve links: $0 may be a link +PRG="$0" +# Need this for relative symlinks. +while [ -h "$PRG" ] ; do + ls=`ls -ld "$PRG"` + link=`expr "$ls" : '.*-> \(.*\)$'` + if expr "$link" : '/.*' > /dev/null; then + PRG="$link" + else + PRG=`dirname "$PRG"`"/$link" + fi +done +SAVED="`pwd`" +cd "`dirname \"$PRG\"`/" >/dev/null +APP_HOME="`pwd -P`" +cd "$SAVED" >/dev/null + +APP_NAME="Gradle" +APP_BASE_NAME=`basename "$0"` + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD="maximum" + +warn () { + echo "$*" +} + +die () { + echo + echo "$*" + echo + exit 1 +} + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +nonstop=false +case "`uname`" in + CYGWIN* ) + cygwin=true + ;; + Darwin* ) + darwin=true + ;; + MINGW* ) + msys=true + ;; + NONSTOP* ) + nonstop=true + ;; +esac + +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD="java" + which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." +fi + +# Increase the maximum file descriptors if we can. +if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then + MAX_FD_LIMIT=`ulimit -H -n` + if [ $? -eq 0 ] ; then + if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then + MAX_FD="$MAX_FD_LIMIT" + fi + ulimit -n $MAX_FD + if [ $? -ne 0 ] ; then + warn "Could not set maximum file descriptor limit: $MAX_FD" + fi + else + warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" + fi +fi + +# For Darwin, add options to specify how the application appears in the dock +if $darwin; then + GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" +fi + +# For Cygwin or MSYS, switch paths to Windows format before running java +if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then + APP_HOME=`cygpath --path --mixed "$APP_HOME"` + CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` + JAVACMD=`cygpath --unix "$JAVACMD"` + + # We build the pattern for arguments to be converted via cygpath + ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` + SEP="" + for dir in $ROOTDIRSRAW ; do + ROOTDIRS="$ROOTDIRS$SEP$dir" + SEP="|" + done + OURCYGPATTERN="(^($ROOTDIRS))" + # Add a user-defined pattern to the cygpath arguments + if [ "$GRADLE_CYGPATTERN" != "" ] ; then + OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" + fi + # Now convert the arguments - kludge to limit ourselves to /bin/sh + i=0 + for arg in "$@" ; do + CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` + CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option + + if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition + eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` + else + eval `echo args$i`="\"$arg\"" + fi + i=$((i+1)) + done + case $i in + (0) set -- ;; + (1) set -- "$args0" ;; + (2) set -- "$args0" "$args1" ;; + (3) set -- "$args0" "$args1" "$args2" ;; + (4) set -- "$args0" "$args1" "$args2" "$args3" ;; + (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; + (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; + (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; + (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; + (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; + esac +fi + +# Escape application args +save () { + for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done + echo " " +} +APP_ARGS=$(save "$@") + +# Collect all arguments for the java command, following the shell quoting and substitution rules +eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" + +# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong +if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then + cd "$(dirname "$0")" +fi + +exec "$JAVACMD" "$@" diff --git a/catbuffer-generators/gradlew.bat b/catbuffer-generators/gradlew.bat new file mode 100644 index 00000000..9618d8d9 --- /dev/null +++ b/catbuffer-generators/gradlew.bat @@ -0,0 +1,100 @@ +@rem +@rem Copyright 2015 the original author or authors. +@rem +@rem Licensed under the Apache License, Version 2.0 (the "License"); +@rem you may not use this file except in compliance with the License. +@rem You may obtain a copy of the License at +@rem +@rem https://www.apache.org/licenses/LICENSE-2.0 +@rem +@rem Unless required by applicable law or agreed to in writing, software +@rem distributed under the License is distributed on an "AS IS" BASIS, +@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +@rem See the License for the specific language governing permissions and +@rem limitations under the License. +@rem + +@if "%DEBUG%" == "" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +set DIRNAME=%~dp0 +if "%DIRNAME%" == "" set DIRNAME=. +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if "%ERRORLEVEL%" == "0" goto init + +echo. +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto init + +echo. +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:init +@rem Get command-line arguments, handling Windows variants + +if not "%OS%" == "Windows_NT" goto win9xME_args + +:win9xME_args +@rem Slurp the command line arguments. +set CMD_LINE_ARGS= +set _SKIP=2 + +:win9xME_args_slurp +if "x%~1" == "x" goto execute + +set CMD_LINE_ARGS=%* + +:execute +@rem Setup the command line + +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% + +:end +@rem End local scope for the variables with windows NT shell +if "%ERRORLEVEL%"=="0" goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 +exit /b 1 + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/catbuffer-generators/requirements.txt b/catbuffer-generators/requirements.txt new file mode 100644 index 00000000..bf0198e5 --- /dev/null +++ b/catbuffer-generators/requirements.txt @@ -0,0 +1,9 @@ +pyyaml==4.2b1 +pytest>=5.4.1 +pycodestyle>=2.5.0 +pylint==2.4.4 +pylint-quotes>=0.2.1 +mako>=1.1.0 +keyring==21.4.0 +twine>=3.1.1 +wheel>=0.30.0 diff --git a/catbuffer-generators/scripts/generate_all.sh b/catbuffer-generators/scripts/generate_all.sh new file mode 100644 index 00000000..201caef7 --- /dev/null +++ b/catbuffer-generators/scripts/generate_all.sh @@ -0,0 +1,26 @@ +#!/bin/bash + +source "$(dirname $0)/../catbuffer/scripts/schema_lists.sh" +source "$(dirname $0)/../catbuffer/scripts/generate_batch.sh" + +if [ "$#" -lt 1 ]; then + echo "usage: script " + exit 1 +fi + +builder="$1" +transaction_inputs=("${transaction_inputs[@]}") +if [ "${builder}" = "cpp_builder" ]; then + # "aggregate/aggregate" tracked by issue #26 + delete=("aggregate/aggregate") + transaction_inputs=("${transaction_inputs[@]/${delete}}") +fi + +if [ "$#" -lt 2 ]; then + PYTHONPATH=".:${PYTHONPATH}" generate_batch transaction_inputs "catbuffer" ${builder} +else + nis2_root="$2" + rm -rf catbuffer/_generated/${builder} + PYTHONPATH=".:${PYTHONPATH}" generate_batch transaction_inputs "catbuffer" ${builder} + cp catbuffer/_generated/${builder}/* ${nis2_root}/sdk/src/builders/ +fi diff --git a/catbuffer-generators/scripts/generate_cpp.sh b/catbuffer-generators/scripts/generate_cpp.sh new file mode 100644 index 00000000..4a8d04c9 --- /dev/null +++ b/catbuffer-generators/scripts/generate_cpp.sh @@ -0,0 +1,19 @@ +#!/bin/bash +set -e + +rootDir="$(dirname $0)/.." + +ARTIFACT_NAME="catbuffer-cpp" +RELEASE_VERSION="$(head -n 1 ${rootDir}/version.txt)" +OPERATION="$1" +SNAPSHOT_VERSION="${RELEASE_VERSION}-SNAPSHOT" +CURRENT_VERSION="$SNAPSHOT_VERSION" +if [[ $OPERATION == "release" ]]; then + CURRENT_VERSION="$RELEASE_VERSION" +fi + +echo "Building C++ version $CURRENT_VERSION, operation $OPERATION" + +${rootDir}/scripts/generate_all.sh cpp_builder + +# TODO Fix aggregate and compile c++ diff --git a/catbuffer-generators/scripts/generate_java.sh b/catbuffer-generators/scripts/generate_java.sh new file mode 100644 index 00000000..782aeda4 --- /dev/null +++ b/catbuffer-generators/scripts/generate_java.sh @@ -0,0 +1,82 @@ +#!/bin/bash +set -e + +generatorsRootDir="$(dirname $0)/.." +topLevelRoot="$(git rev-parse --show-superproject-working-tree)" + +ARTIFACT_NAME="catbuffer-java" +RELEASE_VERSION="$(head -n 1 ${generatorsRootDir}/version.txt)" +OPERATION="$1" +SNAPSHOT_VERSION="${RELEASE_VERSION}-SNAPSHOT" +CURRENT_VERSION="$SNAPSHOT_VERSION" +if [[ $OPERATION == "release" ]]; then + CURRENT_VERSION="$RELEASE_VERSION" +fi + +echo "Building Java version $CURRENT_VERSION, operation $OPERATION, rootDir = ${generatorsRootDir}" + + +rm -rf "${generatorsRootDir}/build/java/$ARTIFACT_NAME" +mkdir -p "${generatorsRootDir}/build/java/$ARTIFACT_NAME/src/main/java/io/nem/symbol/catapult/builders" +PYTHONPATH=".:${topLevelRoot}/catbuffer-parser:${PYTHONPATH}" python3 "${topLevelRoot}/catbuffer-parser/main.py" \ + --schema "${topLevelRoot}/catbuffer-schemas/schemas/all.cats" \ + --include "${topLevelRoot}/catbuffer-schemas/schemas" \ + --output "${generatorsRootDir}/build/java/$ARTIFACT_NAME/src/main/java/io/nem/symbol/catapult/builders" \ + --generator java \ + --copyright catbuffer/HEADER.inc + +#python3 -m generators \ +# --input "${generatorsRootDir}/schemas/symbol.yml" \ +# --output "${generatorsRootDir}/build/java/$ARTIFACT_NAME/src/main/java/io/nem/symbol/catapult/builders" \ +# --generator java \ +# --copyright HEADER.inc + +if [[ $OPERATION == "release" ]]; then + ARTIFACT_VERSION="${ARTIFACT_VERSION%$SNAPSHOT_PREFIX}" +fi +echo $ARTIFACT_VERSION + +mkdir -p "${generatorsRootDir}/build/java/$ARTIFACT_NAME/src/test/java/io/nem/symbol/catapult/builders" +mkdir -p "${generatorsRootDir}/build/java/$ARTIFACT_NAME/src/test/resources" +cp -r "${generatorsRootDir}/test/vector" "${generatorsRootDir}/build/java/$ARTIFACT_NAME/src/test/resources" + +rm "${generatorsRootDir}/build/java/$ARTIFACT_NAME/src/test/resources/vector/states.yml" + +#echo find "${generatorsRootDir}/test/vector" -name '*.yml' -print0 \| xargs -0 -I FILES cp FILES "${generatorsRootDir}/build/java/$ARTIFACT_NAME/src/test/resources/" +#find "${generatorsRootDir}/test/vector" -name '*.yml' -print0 | xargs -0 -I FILES cp FILES "${generatorsRootDir}/build/java/$ARTIFACT_NAME/src/test/resources/" +cp "${generatorsRootDir}/generators/java/VectorTest.java" "${generatorsRootDir}/build/java/$ARTIFACT_NAME/src/test/java/io/nem/symbol/catapult/builders" + + +cp "${generatorsRootDir}/generators/java/build.gradle" "${generatorsRootDir}/build/java/$ARTIFACT_NAME" +cp "${generatorsRootDir}/generators/java/settings.gradle" "${generatorsRootDir}/build/java/$ARTIFACT_NAME" + +sed -i -e "s/#artifactName/$ARTIFACT_NAME/g" "${generatorsRootDir}/build/java/$ARTIFACT_NAME/settings.gradle" +sed -i -e "s/#artifactVersion/$CURRENT_VERSION/g" "${generatorsRootDir}/build/java/$ARTIFACT_NAME/build.gradle" + +# if [[ $OPERATION == "release" ]]; then +# echo "Releasing artifact $CURRENT_VERSION" +# #${generatorsRootDir}/gradlew -p "${generatorsRootDir}/build/java/$ARTIFACT_NAME/" test publish closeAndReleaseRepository +# echo gradle -p "${generatorsRootDir}/build/java/$ARTIFACT_NAME/" test publish +# #gradle -p "${generatorsRootDir}/build/java/$ARTIFACT_NAME/" test publish closeAndReleaseRepository +# elif [[ $OPERATION == "publish" ]]; then +# echo "Publishing artifact $CURRENT_VERSION" +# #${generatorsRootDir}/gradlew -p "${generatorsRootDir}/build/java/$ARTIFACT_NAME/" test publish +# echo gradle -p "${generatorsRootDir}/build/java/$ARTIFACT_NAME/" test publish +# #gradle -p "${generatorsRootDir}/build/java/$ARTIFACT_NAME/" test publish +# else +# echo "Installing artifact $CURRENT_VERSION" +# #${generatorsRootDir}/gradlew -p "${generatorsRootDir}/build/java/$ARTIFACT_NAME/" test install +# echo gradle -p "${generatorsRootDir}/build/java/$ARTIFACT_NAME/" test install +# #gradle -p "${generatorsRootDir}/build/java/$ARTIFACT_NAME/" test install +# fi + +# if [[ $OPERATION == "release" ]]; then +# echo "Releasing artifact $CURRENT_VERSION" +# ${generatorsRootDir}/gradlew -p "${generatorsRootDir}/build/java/$ARTIFACT_NAME/" test publish closeAndReleaseRepository +# elif [[ $OPERATION == "publish" ]]; then +# echo "Publishing artifact $CURRENT_VERSION" +# ${generatorsRootDir}/gradlew -p "${generatorsRootDir}/build/java/$ARTIFACT_NAME/" test publish +# else + echo "Installing artifact $CURRENT_VERSION" + ${generatorsRootDir}/gradlew -p "${generatorsRootDir}/build/java/$ARTIFACT_NAME/" test install +# fi diff --git a/catbuffer-generators/scripts/generate_python.sh b/catbuffer-generators/scripts/generate_python.sh new file mode 100644 index 00000000..d713d8b5 --- /dev/null +++ b/catbuffer-generators/scripts/generate_python.sh @@ -0,0 +1,135 @@ +#!/bin/bash +set -e + +HOME=~/ +rootDir="$(dirname "$0")/.." +echo "${rootDir}" + +# Artifact naming +artifactPrefix="symbol" +artifactName="catbuffer" + +# PEP 440 compliant semantic version is used for uploading to PyPI +# Examples +# 1.2.0.dev1 # Development release +# 1.2.0a1 # Alpha Release +# 1.2.0b1 # Beta Release +# 1.2.0rc1 # Release Candidate +# 1.2.0 # Final Release +# 1.2.0.post1 # Post Release +# For pre-releases (alpha, beta, rc): +# - A UTC timestamp (YYYYMMDD.HHMMSS) is embedded for automatic publishing; no need to increment the prerelease version. +# e.g. 0.0.3.20200522.070728a1 +# - Any leading zeros in the date and/or time portions are dropped during the package build version normalization. +# e.g. 0.0.3.20200522.70728a1 +# PyPI: https://pypi.org/project/catbuffer/ +# Test: https://test.pypi.org/project/catbuffer/ + +releaseArtifactVersion="$(head -n 1 ${rootDir}/version.txt)" # Artifact version +prereleaseSuffix="a1" # Pre-release suffix +snapshotDateTime=".$(date -u +'%Y%m%d.%H%M%S')" # Pre-release timestamp +prereleaseVersion="${releaseArtifactVersion}${snapshotDateTime}${prereleaseSuffix}" +snapshot=true +repo="pypi" +upload=false +OPERATION="$1" +artifactVersion="${prereleaseVersion}" + +if [[ $OPERATION == "publish" ]]; then + upload=true +elif [[ $OPERATION == "test" ]]; then + repo="testpypi" + REPO_URL="https://test.pypi.org/legacy/" + upload=true +elif [[ $OPERATION == "release" ]]; then + artifactVersion="${releaseArtifactVersion}" + snapshot=false + upload=true +fi + +echo "Building Python version $artifactVersion, operation $OPERATION" + +echo "artifactName=${artifactName}" +echo "artifactVersion=${artifactVersion}" +echo "snapshot=${snapshot}" +echo "repo=${repo}" + +GIT_USER_ID="$(cut -d'/' -f1 <<<"$TRAVIS_REPO_SLUG")" +GIT_REPO_ID="$(cut -d'/' -f2 <<<"$TRAVIS_REPO_SLUG")" +echo "Travis Repo Slug: $TRAVIS_REPO_SLUG" +echo "Git User ID: $GIT_USER_ID" +echo "Git Repo ID: $GIT_REPO_ID" +if [[ $upload == true ]] && [[ $repo == "pypi" ]] && [[ -n $TRAVIS_REPO_SLUG ]] && [[ $GIT_USER_ID != 'nemtech' ]]; then + upload=false + echo "User is not 'nemtech': Disable upload to PyPI" +fi + +artifactProjectName="catbuffer-python" +artifactBuildDir="${rootDir}/build/python/${artifactProjectName}" +artifactSrcDir="${artifactBuildDir}/src" +artifactPackageDir="${artifactSrcDir}/${artifactPrefix}_${artifactName}" +artifactTestDir="${artifactBuildDir}/test" + +rm -rf "${rootDir}/catbuffer/_generated/python" +rm -rf "${artifactBuildDir}" + +mkdir -p "${artifactPackageDir}" +PYTHONPATH=".:${PYTHONPATH}" python3 "catbuffer/main.py" \ + --schema catbuffer/schemas/all.cats \ + --include catbuffer/schemas \ + --output "${artifactPackageDir}" \ + --generator python \ + --copyright catbuffer/HEADER.inc + +touch "${artifactPackageDir}/__init__.py" +cp "$rootDir/LICENSE" "${artifactBuildDir}" +cp "$rootDir/.pylintrc" "${artifactBuildDir}" +cp "$rootDir/generators/python/README.md" "${artifactBuildDir}" +cp "$rootDir/generators/python/setup.py" "${artifactBuildDir}" +cp "$rootDir/generators/python/"test_*.py "${artifactBuildDir}" +cp -r "$rootDir/test/vector" "${artifactBuildDir}" +cp "$rootDir/generators/python/.pypirc" "${HOME}" +sed -i -e "s/#artifactName/$artifactName/g" "${artifactBuildDir}/setup.py" +sed -i -e "s/#artifactVersion/$artifactVersion/g" "${artifactBuildDir}/setup.py" + +mkdir -p "${artifactTestDir}" +PYTEST_CACHE="$rootDir/test/python/.pytest_cache/" +if [ -d "$PYTEST_CACHE" ]; then rm -Rf "$PYTEST_CACHE"; fi + +# Build +cd "${artifactBuildDir}" +echo "Building..." +PYTHONPATH=".:${PYTHONPATH}" python3 setup.py sdist bdist_wheel build + +# Test +echo "Testing..." +PYTHONPATH="./src:${PYTHONPATH}" pytest -v --color=yes --exitfirst --showlocals --durations=5 +# Linter +echo "Linting..." +PYTHONPATH="./src:${PYTHONPATH}" pylint --rcfile .pylintrc --load-plugins pylint_quotes symbol_catbuffer +# Deploy +if [[ $upload == true ]]; then + # Log intention + if [[ $OPERATION == "release" ]]; then + echo "Releasing python artifact[$artifactName $artifactVersion] to $repo" + else + echo "Publishing python artifact[$artifactName $artifactVersion] to $repo" + fi + # Do upload + if [[ $repo == "pypi" ]]; then + if [[ -n ${PYPI_USER} ]] && [[ -n ${PYPI_PASS} ]]; then + echo "PYPI_USER and PYPI_PASS are already set: Uploading to PyPI" + PYTHONPATH=".:${PYTHONPATH}" python3 -m twine upload -u "$PYPI_USER" -p "$PYPI_PASS" dist/* + else + echo "PYPI_USER and/or PYPI_PASS not set: Cancelled upload to PyPI" + fi + else + if [[ -n ${TEST_PYPI_USER} ]] && [[ -n ${TEST_PYPI_PASS} ]]; then + echo "TEST_PYPI_USER and TEST_PYPI_PASS are already set: Uploading to PyPI" + PYTHONPATH=".:${PYTHONPATH}" python3 -m twine upload --repository-url $REPO_URL -u "$TEST_PYPI_USER" -p "$TEST_PYPI_PASS" dist/* + else + echo "TEST_PYPI_USER and/or TEST_PYPI_PASS not set: Initiated manual upload" + PYTHONPATH=".:${PYTHONPATH}" python3 -m twine upload --repository $repo dist/* + fi + fi +fi diff --git a/catbuffer-generators/scripts/generate_typescript.sh b/catbuffer-generators/scripts/generate_typescript.sh new file mode 100644 index 00000000..b7321e27 --- /dev/null +++ b/catbuffer-generators/scripts/generate_typescript.sh @@ -0,0 +1,55 @@ +#!/bin/bash +set -e + +rootDir="$(dirname $0)/.." + +RELEASE_VERSION="$(head -n 1 ${rootDir}/version.txt)" +OPERATION="$1" +ARTIFACT_NAME="catbuffer-typescript" +ALPHA_VERSION="${RELEASE_VERSION}-alpha-$(date +%Y%m%d%H%M)" +CURRENT_VERSION="$ALPHA_VERSION" +if [[ $OPERATION == "release" ]]; then + CURRENT_VERSION="$RELEASE_VERSION" +fi + +echo "Building Typescript version $CURRENT_VERSION, operation $OPERATION" + +#rm -rf "$rootDir/build/typescript/$ARTIFACT_NAME" + +mkdir -p "$rootDir/build/typescript/$ARTIFACT_NAME/src/" +PYTHONPATH=".:${PYTHONPATH}" python3 "catbuffer/main.py" \ + --schema catbuffer/schemas/all.cats \ + --include catbuffer/schemas \ + --output "$rootDir/build/typescript/$ARTIFACT_NAME/src" \ + --generator typescript \ + --copyright catbuffer/HEADER.inc + +mkdir -p "$rootDir/build/typescript/$ARTIFACT_NAME/test/vector" +cp -r "$rootDir/test/vector" "$rootDir/build/typescript/$ARTIFACT_NAME/test" +cp "$rootDir/generators/typescript/VectorTest.test.ts" "$rootDir/build/typescript/$ARTIFACT_NAME/test" + +cp "$rootDir/generators/typescript/.npmignore" "$rootDir/build/typescript/$ARTIFACT_NAME" +cp "$rootDir/generators/typescript/package.json" "$rootDir/build/typescript/$ARTIFACT_NAME" +cp "$rootDir/generators/typescript/README.md" "$rootDir/build/typescript/$ARTIFACT_NAME" +cp "$rootDir/generators/typescript/tsconfig.json" "$rootDir/build/typescript/$ARTIFACT_NAME" +cp "$rootDir/generators/typescript/.eslintrc.js" "$rootDir/build/typescript/$ARTIFACT_NAME" +cp "$rootDir/generators/typescript/.prettierrc.js" "$rootDir/build/typescript/$ARTIFACT_NAME" +sed -i -e "s/#artifactName/$ARTIFACT_NAME/g" "$rootDir/build/typescript/$ARTIFACT_NAME/package.json" +sed -i -e "s/#artifactVersion/$CURRENT_VERSION/g" "$rootDir/build/typescript/$ARTIFACT_NAME/package.json" + +npm install --prefix "$rootDir/build/typescript/$ARTIFACT_NAME/" +npm run style:fix --prefix "$rootDir/build/typescript/$ARTIFACT_NAME/" +npm run test --prefix "$rootDir/build/typescript/$ARTIFACT_NAME/" +npm run build --prefix "$rootDir/build/typescript/$ARTIFACT_NAME/" + +if [[ $OPERATION == "release" ]]; then + echo "Releasing artifact $CURRENT_VERSION" + cp "$rootDir/generators/typescript/.npmignore" "$rootDir/build/typescript/$ARTIFACT_NAME/" + cp "$rootDir/generators/typescript/.npmrc" "$rootDir/build/typescript/$ARTIFACT_NAME/" + cd "$rootDir/build/typescript/$ARTIFACT_NAME/" && npm publish +elif [[ $OPERATION == "publish" ]]; then + echo "Publishing artifact $CURRENT_VERSION" + cp "$rootDir/generators/typescript/.npmignore" "$rootDir/build/typescript/$ARTIFACT_NAME/" + cp "$rootDir/generators/typescript/.npmrc" "$rootDir/build/typescript/$ARTIFACT_NAME/" + cd "$rootDir/build/typescript/$ARTIFACT_NAME/" && npm publish --tag alpha +fi diff --git a/catbuffer-generators/test/vector/basic.yml b/catbuffer-generators/test/vector/basic.yml new file mode 100644 index 00000000..63c9f1cb --- /dev/null +++ b/catbuffer-generators/test/vector/basic.yml @@ -0,0 +1,13 @@ +- builder: TimestampDto + payload: '0100000000000000' + comment: Add single quotes to avoid being used as a number with left zeros. +- builder: TimestampDto + payload: '1234567891234567' +- builder: KeyDto + payload: FA8EC085AE64CF30E44ADD18A3133D9B2190F9A20C08667A5EF44E5E9962E720 +- builder: Hash256Dto + payload: 4DC6F0524C486D78A6D9D775F5508C0362125420728D03DE74435EB1E3778891 +- builder: AmountDto + payload: '0A00000000000000' +- builder: UnresolvedAddressDto + payload: 90F36CA680C35D630662A0C38DC89D4978D10B511B3D241A diff --git a/catbuffer-generators/test/vector/states.yml b/catbuffer-generators/test/vector/states.yml new file mode 100644 index 00000000..6d77cf94 --- /dev/null +++ b/catbuffer-generators/test/vector/states.yml @@ -0,0 +1,25 @@ +- builder: MosaicDefinitionBuilder + payload: 000000000000000090F1B694E1801EEFE42846E9239B54C9D381FCDF2A04A4210100000007030A00000000000000 +- builder: MetadataEntryBuilder + payload: 0100900E96DC85F6B24AC9C8DB5FFC59C35880C0B722C7A416A790FD35818960C7B18B72F49A5598FA9F712A354DB38EB0760A0000000000000068E0AE3A0168EDBD020B00536F6D6520537472696E67 +- builder: MetadataEntryBuilder + payload: 0100900E96DC85F6B24AC9C8DB5FFC59C35880C0B722C7A416A790FD35818960C7B18B72F49A5598FA9F712A354DB38EB0760A000000000000000000000000000000000B00536F6D6520537472696E67 +- builder: MetadataEntryBuilder + payload: 0100900E96DC85F6B24AC9C8DB5FFC59C35880C0B722C7A416A790FD35818960C7B18B72F49A5598FA9F712A354DB38EB0760A000000000000004460BA6E125F9C1C010B00536F6D6520537472696E67 +- builder: AccountRestrictionsBuilder + payload: 01009050B9837EFAB4BBE8A4B9BB32D812F9885C00D8FC1650E101000000000000000440020000000000000054415441 +- builder: AccountRestrictionsBuilder + payload: 01009050B9837EFAB4BBE8A4B9BB32D812F9885C00D8FC1650E10200000000000000044002000000000000005441544101000000000000000000 +- builder: AccountRestrictionsBuilder + payload: 01009050B9837EFAB4BBE8A4B9BB32D812F9885C00D8FC1650E1010000000000000001400000000000000000 +- builder: AccountRestrictionsBuilder + payload: 01009050B9837EFAB4BBE8A4B9BB32D812F9885C00D8FC1650E10000000000000000 + comment: Emtpy Account Restriction +- builder: FinalizedBlockHeaderBuilder + payload: 90FD35818960C7B18B72F49A5598FA9F712A354DB38EB076C40300000000000011111111111111111111111111111111 +#- builder: AccountStateBuilder +# payload: 90FD35818960C7B18B72F49A5598FA9F712A354DB38EB076C4030000000000001111111111111111111111111111111111111111111111111111111111111111C403000000000000030107012222222222222222222222222222222222222222222222222222222222222222333333333333333333333333333333333333333333333333333333333333333344444444444444444444444444444444444444444444444444444444444444445555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555550A000000140000000903000000000000000000000000000001000000000000000A0000000000000064000000E80300000000000002000000000000001400000000000000C8000000D00700000000000003000000000000001E000000000000002C010000B80B0000000000000400000000000000280000000000000090010000A00F00000000000005000000000000003200000000000000F4010000881300000000000001000A000000000000001027000000000000 +#- builder: AccountStateBuilder +# payload: 90FD35818960C7B18B72F49A5598FA9F712A354DB38EB076C4030000000000001111111111111111111111111111111111111111111111111111111111111111C403000000000000030007012222222222222222222222222222222222222222222222222222222222222222333333333333333333333333333333333333333333333333333333333333333344444444444444444444444444444444444444444444444444444444444444445555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555550A0000001400000001000A000000000000001027000000000000 +#- builder: AccountStateBuilder +# payload: 90FD35818960C7B18B72F49A5598FA9F712A354DB38EB076C4030000000000001111111111111111111111111111111111111111111111111111111111111111C403000000000000030007012222222222222222222222222222222222222222222222222222222222222222333333333333333333333333333333333333333333333333333333333333333344444444444444444444444444444444444444444444444444444444444444445555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555550A0000001400000001000A000000000000001027000000000000 diff --git a/catbuffer-generators/test/vector/transactions.yml b/catbuffer-generators/test/vector/transactions.yml new file mode 100644 index 00000000..488d8581 --- /dev/null +++ b/catbuffer-generators/test/vector/transactions.yml @@ -0,0 +1,277 @@ +- builder: MosaicMetadataTransactionBuilder + payload: B20000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E60000000001904442000000000000000001000000000000009083025FF3A8AB5AD104631FB370F290004952CD1FDDC4C90A00000000000000E8030000000000000A000600313233414243 +- builder: MosaicMetadataTransactionBuilder + payload: B20000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E60000000001904442000000000000000001000000000000009083025FF3A8AB5AD104631FB370F290004952CD1FDDC4C90A00000000000000E8030000000000000A000600313233414243 +- builder: AggregateBondedTransactionBuilder + payload: 100100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000019041420A00000000000000010000000000000097EE38BB7E04C0C915F3B69B5D6CF77E04B893A86090E417A42660A073515E9C680000000000000062000000000000004871937A9B3872130EB65765BF5E0AE326C49ABB39A3F353711AE782D95FF2CB00000000019044429083025FF3A8AB5AD104631FB370F290004952CD1FDDC4C90A00000000000000E8030000000000000A000600313233414243000000000000 +- builder: MosaicAliasTransactionBuilder + payload: 910000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E60000000001904E4300000000000000000100000000000000A487791451FDF1B60A0000000000000001 +- builder: MosaicAliasTransactionBuilder + payload: 910000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E60000000001904E4300000000000000000100000000000000A487791451FDF1B60A0000000000000001 +- builder: AggregateBondedTransactionBuilder + payload: F00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000019041420A0000000000000001000000000000003856BB39656064ADA2971A0791F228069B1F2293DBF1344B687C8E743AEC6B23480000000000000041000000000000004E0184D19FD1D9427BB6888BDDC56756686829A1CAD6686D8D53339E9844CE000000000001904E43A487791451FDF1B60A000000000000000100000000000000 +- builder: AddressAliasTransactionBuilder + payload: A100000000000000164DC06341FE6FAC16EF51663F04113049B5CC3B648043EDE1D8BBF4BF16B7B8933F7E42A30B84A6D1EAB5CCECD8E4462923323E5816BED2134D54013B937D1A68B3FBB18729C1FDE225C57F8CE080FA828F0067E451A3FD81FA628842B0B7630000000001904E42010000000000000001000000000000004BFA5F372D55B3849049E14BEBCA93758EB36805BAE760A57239976F009A545C01 +- builder: AddressAliasTransactionBuilder + payload: A100000000000000164DC06341FE6FAC16EF51663F04113049B5CC3B648043EDE1D8BBF4BF16B7B8933F7E42A30B84A6D1EAB5CCECD8E4462923323E5816BED2134D54013B937D1A68B3FBB18729C1FDE225C57F8CE080FA828F0067E451A3FD81FA628842B0B7630000000001904E42010000000000000001000000000000004BFA5F372D55B3849049E14BEBCA93758EB36805BAE760A57239976F009A545C01 +- builder: AggregateBondedTransactionBuilder + payload: 000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000019041420A0000000000000001000000000000006A155E534B494F0CC34AFBB78371B1446EAD405FFFE4B263AA05A43D3167C5D958000000000000005100000000000000ACBD55170BE49239EC69279A3921578B5701CA7AF4A1A5FF87B2B2957120AC870000000001904E424BFA5F372D55B3849049E14BEBCA93758EB36805BAE760A57239976F009A545C0100000000000000 +- builder: AccountMetadataTransactionBuilder + payload: AA0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E60000000001904441000000000000000001000000000000009083025FF3A8AB5AD104631FB370F290004952CD1FDDC4C90A000000000000000A000600313233424143 +- builder: AccountMetadataTransactionBuilder + payload: AA0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E60000000001904441000000000000000001000000000000009083025FF3A8AB5AD104631FB370F290004952CD1FDDC4C90A000000000000000A000600313233424143 +- builder: AggregateBondedTransactionBuilder + payload: 080100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000019041420A000000000000000100000000000000AD9E3FFDF99E080F6715FE208A9D96A09B39A3BD7B065AF7F6B11827F1EEE1DF60000000000000005A00000000000000FA8EC085AE64CF30E44ADD18A3133D9B2190F9A20C08667A5EF44E5E9962E72000000000019044419083025FF3A8AB5AD104631FB370F290004952CD1FDDC4C90A000000000000000A000600313233424143000000000000 +- builder: AccountAddressRestrictionTransactionBuilder + payload: B80000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E600000000019050410000000000000000010000000000000001000101000000009083025FF3A8AB5AD104631FB370F290004952CD1FDDC4C990B387A39C0E4607DB7056EEAAF0A0EF43B45C667EB790FF +- builder: AccountAddressRestrictionTransactionBuilder + payload: B80000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E600000000019050410000000000000000010000000000000001000101000000009083025FF3A8AB5AD104631FB370F290004952CD1FDDC4C990B387A39C0E4607DB7056EEAAF0A0EF43B45C667EB790FF +- builder: AggregateBondedTransactionBuilder + payload: 100100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000019041420A0000000000000001000000000000004DC6F0524C486D78A6D9D775F5508C0362125420728D03DE74435EB1E3778891680000000000000068000000000000005085C164D5D55CD6AF5A4FDABF88D1A2EE2C1C1422D431BAFDC14714ED63E3F5000000000190504101000101000000009083025FF3A8AB5AD104631FB370F290004952CD1FDDC4C990B387A39C0E4607DB7056EEAAF0A0EF43B45C667EB790FF +- builder: AggregateBondedTransactionBuilder + payload: 6001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000190414200000000000000000100000000000000B4C97320255A2F755F6BE2F4DDAC0BB3EBDD25508DBE460EA6988366F404706AB8000000000000006D00000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E6000000000190544190E8FEBD671DD41BEE94EC3BA5831CB608A312C2F203BA840D00010000000000672B0000CE560000640000000000000000536F6D65204D6573736167650000004100000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E60000000001904D428869746E9B1A70570A000000000000000100000000000000 +- builder: AggregateBondedTransactionBuilder + payload: 30020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001904142000000000000000001000000000000006C610D61B3E6839AE85AC18465CF6AD06D8F17A4F145F720BD324880B4FBB12BB8000000000000006D00000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E6000000000190544190F36CA680C35D630662A0C38DC89D4978D10B511B3D241A0D00010000000000672B0000CE560000640000000000000000536F6D65204D6573736167650000004100000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E60000000001904D428869746E9B1A70570A00000000000000010000000000000000000000000000009A49366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456111AAA9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456111AAA9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA6545611100000000000000009A49366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456222BBB9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456222BBB9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456222 +- builder: AggregateBondedTransactionBuilder + payload: 60010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001904142000000000000000001000000000000006C610D61B3E6839AE85AC18465CF6AD06D8F17A4F145F720BD324880B4FBB12BB8000000000000006D00000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E6000000000190544190F36CA680C35D630662A0C38DC89D4978D10B511B3D241A0D00010000000000672B0000CE560000640000000000000000536F6D65204D6573736167650000004100000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E60000000001904D428869746E9B1A70570A000000000000000100000000000000 +- builder: AggregateBondedTransactionBuilder + payload: 30020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001904142000000000000000001000000000000006C610D61B3E6839AE85AC18465CF6AD06D8F17A4F145F720BD324880B4FBB12BB8000000000000006D00000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E6000000000190544190F36CA680C35D630662A0C38DC89D4978D10B511B3D241A0D00010000000000672B0000CE560000640000000000000000536F6D65204D6573736167650000004100000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E60000000001904D428869746E9B1A70570A00000000000000010000000000000000000000000000009A49366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456111AAA9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456111AAA9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA6545611100000000000000009A49366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456222BBB9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456222BBB9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456222 +- builder: HashLockTransactionBuilder + payload: B800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002134E47AEE6F2392A5B3D1238CD7714EABEB739361B7CCF24BAE127F10DF17F200000000019048410000000000000000010000000000000044B262C46CEABB85809698000000000064000000000000008498B38D89C1DC8A448EA5824938FF828926CD9F7747B1844B59B4B6807E878B +- builder: HashLockTransactionBuilder + payload: B800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002134E47AEE6F2392A5B3D1238CD7714EABEB739361B7CCF24BAE127F10DF17F200000000019048410000000000000000010000000000000044B262C46CEABB85809698000000000064000000000000008498B38D89C1DC8A448EA5824938FF828926CD9F7747B1844B59B4B6807E878B +- builder: AggregateBondedTransactionBuilder + payload: 100100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000019041420A0000000000000001000000000000009A5B5EAFD005A7A45417F12E70BD568E510597BF23C54174CB4D9F5D9FF3165D68000000000000006800000000000000CE4046D3C66C81725042A1C54A584A00DC171F065EA75603B8799A273735DA72000000000190484144B262C46CEABB85809698000000000064000000000000008498B38D89C1DC8A448EA5824938FF828926CD9F7747B1844B59B4B6807E878B +- builder: AccountKeyLinkTransactionBuilder + payload: A10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E60000000001904C4100000000000000000100000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E601 +- builder: AccountKeyLinkTransactionBuilder + payload: A10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E60000000001904C4100000000000000000100000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E601 +- builder: AggregateBondedTransactionBuilder + payload: 000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000019041420A000000000000000100000000000000584F02318224C48C34CB4E321F293EA6BA2A12EF68E82713003F0AC31A8AD34A58000000000000005100000000000000AE4A52DDFF64ECDEB4B017228B5411DDFFB1474ED7B7C7D2671C71B8D789F3EB0000000001904C41F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E60100000000000000 +- builder: MultisigAccountModificationTransactionBuilder + payload: B80000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E60000000001905541000000000000000001000000000000000102010100000000905ED2343582DFB4D14DC837BF18E3C9BE5271FF9B8A9EC1908760369DC78761E7EBCC6CFAEA44EE946ED0637B67EE55 +- builder: MultisigAccountModificationTransactionBuilder + payload: B80000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E60000000001905541000000000000000001000000000000000102010100000000905ED2343582DFB4D14DC837BF18E3C9BE5271FF9B8A9EC1908760369DC78761E7EBCC6CFAEA44EE946ED0637B67EE55 +- builder: AggregateBondedTransactionBuilder + payload: 100100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000019041420A000000000000000100000000000000A7197AB28F6EACC591B5D16D77BA56160BDA7827B159C0586DF6152802C9CFEB680000000000000068000000000000003ACBD9C4989E7DDFD0AAB3E3CD014DF6A8F301E6FF63874A50981DF75EEF86DC00000000019055410102010100000000905ED2343582DFB4D14DC837BF18E3C9BE5271FF9B8A9EC1908760369DC78761E7EBCC6CFAEA44EE946ED0637B67EE55 +- builder: AccountOperationRestrictionTransactionBuilder + payload: 8C0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E6000000000190504300000000000000000100000000000000044001010000000052425441 +- builder: AccountOperationRestrictionTransactionBuilder + payload: 8C0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E6000000000190504300000000000000000100000000000000044001010000000052425441 +- builder: AggregateBondedTransactionBuilder + payload: E80000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000019041420A00000000000000010000000000000097B9CB6B4A5C7EF2B809322AD34DE85BB6CBB2F1A6A2265A77CE3C5276E9CDAE40000000000000003C0000000000000010DE38DE7D9DB4B9DB52BC61191A6EF5F835DF56459E25E279C72194E03B1F37000000000190504304400101000000005242544100000000 +- builder: NamespaceMetadataTransactionBuilder + payload: B20000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E60000000001904443000000000000000001000000000000009083025FF3A8AB5AD104631FB370F290004952CD1FDDC4C90A00000000000000E8030000000000000A000600414243313233 +- builder: NamespaceMetadataTransactionBuilder + payload: B20000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E60000000001904443000000000000000001000000000000009083025FF3A8AB5AD104631FB370F290004952CD1FDDC4C90A00000000000000E8030000000000000A000600414243313233 +- builder: AggregateBondedTransactionBuilder + payload: 100100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000019041420A000000000000000100000000000000F12AD5DBD30E62E64BCE9404809CBB4F741BE711520F3FB17764760E91570EE2680000000000000062000000000000005E541F8B249C363CDFA0188C88A1E26DB75F30A366A05AA0D6400E9DC41D025700000000019044439083025FF3A8AB5AD104631FB370F290004952CD1FDDC4C90A00000000000000E8030000000000000A000600414243313233000000000000 +- builder: MosaicSupplyChangeTransactionBuilder + payload: 91000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001904D42000000000000000001000000000000008869746E9B1A70570A0000000000000001 +- builder: MosaicSupplyChangeTransactionBuilder + payload: 91000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001904D42000000000000000001000000000000008869746E9B1A70570A0000000000000001 +- builder: AggregateBondedTransactionBuilder + payload: F00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000019041420A000000000000000100000000000000D0A68EB4096C1589E531F5B11091ADB71A4D93DE23C7A52A97FE49CDEF35C1E848000000000000004100000000000000CC3E835D179670514BE008521130441316AFD4FB09050377B850C612715994050000000001904D428869746E9B1A70570A000000000000000100000000000000 +- builder: SecretLockTransactionBuilder + payload: D100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009A49366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456B2400000000019052410000000000000000010000000000000090F36CA680C35D630662A0C38DC89D4978D10B511B3D241A3FC8BA10229AB5778D05D9C4B7F56676A88BF9295C185ACFC0F961DB5408CAFE44B262C46CEABB858096980000000000640000000000000000 +- builder: SecretLockTransactionBuilder + payload: D100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009A49366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456B2400000000019052410000000000000000010000000000000090F36CA680C35D630662A0C38DC89D4978D10B511B3D241A3FC8BA10229AB5778D05D9C4B7F56676A88BF9295C185ACFC0F961DB5408CAFE44B262C46CEABB858096980000000000640000000000000000 +- builder: AggregateBondedTransactionBuilder + payload: 300100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000019041420A000000000000000100000000000000DDF18516A2274AB73210FEF6D7AFA32212678E709475986BD067ED688D6E4B858800000000000000810000000000000083703D89B194758D2119B4A6EE56C2544120C9FA6CADE47103AE7FA7875570C0000000000190524190F36CA680C35D630662A0C38DC89D4978D10B511B3D241A3FC8BA10229AB5778D05D9C4B7F56676A88BF9295C185ACFC0F961DB5408CAFE44B262C46CEABB85809698000000000064000000000000000000000000000000 +- builder: SecretLockTransactionBuilder + payload: D10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000019052410000000000000000010000000000000090F36CA680C35D630662A0C38DC89D4978D10B511B3D241A3FC8BA10229AB5778D05D9C4B7F56676A88BF9295C185ACFC0F961DB5408CAFE44B262C46CEABB858096980000000000640000000000000000 +- builder: SecretLockTransactionBuilder + payload: D10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000019052410000000000000000010000000000000090F36CA680C35D630662A0C38DC89D4978D10B511B3D241A3FC8BA10229AB5778D05D9C4B7F56676A88BF9295C185ACFC0F961DB5408CAFE44B262C46CEABB858096980000000000640000000000000000 +- builder: AggregateBondedTransactionBuilder + payload: 300100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000019041420A0000000000000001000000000000000A05CEB203C6BAC9DB1548077121C7F7A885AFDC463F300DD0976653570FD5BB8800000000000000810000000000000054DE12E65453D160A3F52794BC32789AC4A3E97ADD0F7967ADF84B0D90738BBA000000000190524190F36CA680C35D630662A0C38DC89D4978D10B511B3D241A3FC8BA10229AB5778D05D9C4B7F56676A88BF9295C185ACFC0F961DB5408CAFE44B262C46CEABB85809698000000000064000000000000000000000000000000 +- builder: MosaicAddressRestrictionTransactionBuilder + payload: B800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009801508C58666C746F471538E43002B85B1CD542F9874B2861183919BA8787B6000000000190514200000000000000000100000000000000010000000000000001000000000000000900000000000000080000000000000090D66C33420E5411995BACFCA2B28CF1C9F5DD7AB1204EA4 +- builder: MosaicAddressRestrictionTransactionBuilder + payload: B800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009801508C58666C746F471538E43002B85B1CD542F9874B2861183919BA8787B6000000000190514200000000000000000100000000000000010000000000000001000000000000000900000000000000080000000000000090D66C33420E5411995BACFCA2B28CF1C9F5DD7AB1204EA4 +- builder: AggregateBondedTransactionBuilder + payload: 100100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000019041420A000000000000000100000000000000FCF315E8CB3EC984365EB7AB7B3CED8816E62C0F1E329C01D80919590CB21743680000000000000068000000000000002A5029790873C060C001F01FDEC0FC51D9DFE6501A14D28CD2EAC5E711E0CB170000000001905142010000000000000001000000000000000900000000000000080000000000000090D66C33420E5411995BACFCA2B28CF1C9F5DD7AB1204EA4 +- builder: SecretProofTransactionBuilder + payload: BF000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001905242000000000000000001000000000000009022D04812D05000F96C283657B0C17990932BC84926CDE63FC8BA10229AB5778D05D9C4B7F56676A88BF9295C185ACFC0F961DB5408CAFE0400009A493664 +- builder: SecretProofTransactionBuilder + payload: BF000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001905242000000000000000001000000000000009022D04812D05000F96C283657B0C17990932BC84926CDE63FC8BA10229AB5778D05D9C4B7F56676A88BF9295C185ACFC0F961DB5408CAFE0400009A493664 +- builder: AggregateBondedTransactionBuilder + payload: 180100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000019041420A0000000000000001000000000000009E82B0F493ABF3FA0C8635C0849025A980EEB66236FF81B6220814D06424AF7F70000000000000006F00000000000000DE9B1F9A22014D3E7BC0B336B6D397325D5ABF24A9100B00282C11DD1160730D00000000019052429022D04812D05000F96C283657B0C17990932BC84926CDE63FC8BA10229AB5778D05D9C4B7F56676A88BF9295C185ACFC0F961DB5408CAFE0400009A49366400 +- builder: MosaicDefinitionTransactionBuilder + payload: 96000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001904D410000000000000000010000000000000000000000000000001027000000000000000000000504 +- builder: MosaicDefinitionTransactionBuilder + payload: 96000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001904D410000000000000000010000000000000000000000000000001027000000000000000000000504 +- builder: AggregateBondedTransactionBuilder + payload: F00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000019041420A000000000000000100000000000000D7C1075FF2D14C78AD1CB5974BB53370DB3F964BBB0E2DB693E4FDB1F23827F748000000000000004600000000000000AF906081C12C065AE0DF8AE5822513A5A5E1046D86C272A42A3283E9DFF9C9D30000000001904D41000000000000000010270000000000000000000005040000 +- builder: NamespaceRegistrationTransactionBuilder + payload: 9E0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E60000000001904E410000000000000000010000000000000010270000000000007EE9B3B8AFDF53C0000C6E65776E616D657370616365 +- builder: NamespaceRegistrationTransactionBuilder + payload: 9E0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E60000000001904E410000000000000000010000000000000010270000000000007EE9B3B8AFDF53C0000C6E65776E616D657370616365 +- builder: AggregateBondedTransactionBuilder + payload: F80000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000019041420A0000000000000001000000000000000B98E5B3791D95C41B943E480FD744480DB058CE5C1E80F57BCFA32D5CD09BC750000000000000004E000000000000001E6132AA03A256A450AD0A12BC8A0560584FCBD79BEC86AA502521E5EEA35DF60000000001904E4110270000000000007EE9B3B8AFDF53C0000C6E65776E616D6573706163650000 +- builder: NamespaceRegistrationTransactionBuilder + payload: 9E0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E60000000001904E41000000000000000001000000000000007EE9B3B8AFDF53400312981B7879A3F1010C7375626E616D657370616365 +- builder: NamespaceRegistrationTransactionBuilder + payload: 9E0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E60000000001904E41000000000000000001000000000000007EE9B3B8AFDF53400312981B7879A3F1010C7375626E616D657370616365 +- builder: AggregateBondedTransactionBuilder + payload: F80000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000019041420A0000000000000001000000000000000897D255849178FAFB1CA484FBFA9AAB023EE41880B445B4F598A207299022B650000000000000004E00000000000000E940F3DBCD87D52455691DB6E54F1ACDE7030DC2B6895A7562C4CF41BE2A8B5B0000000001904E417EE9B3B8AFDF53400312981B7879A3F1010C7375626E616D6573706163650000 +- builder: TransferTransactionBuilder + payload: C10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000019054410000000000000000010000000000000090F36CA680C35D630662A0C38DC89D4978D10B511B3D241A010002000000000064000000000000000200000000000000C800000000000000010000000000000000 +- builder: TransferTransactionBuilder + payload: C4000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001905441000000000000000001000000000000009151776168D24257D80000000000000000000000000000001400010000000000672B0000CE560000640000000000000000536F6D65204D65737361676520E6BCA2E5AD97 +- builder: TransferTransactionBuilder + payload: C4000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001905441000000000000000001000000000000009151776168D24257D80000000000000000000000000000001400010000000000672B0000CE560000640000000000000000536F6D65204D65737361676520E6BCA2E5AD97 +- builder: AggregateBondedTransactionBuilder + payload: 200100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000019041420A000000000000000100000000000000E1BC0003684C91F467AD20A6312AFC8FC1E578B633DDC53AF0C7CED047C4339378000000000000007400000000000000C613FD58985D6BBEDFD9BDFD1CA9D8C72B5B30E4A3BC99114D0BFCAB133E395A00000000019054419151776168D24257D80000000000000000000000000000001400010000000000672B0000CE560000640000000000000000536F6D65204D65737361676520E6BCA2E5AD9700000000 +- builder: TransferTransactionBuilder + payload: B10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000019054410000000000000000010000000000000090F36CA680C35D630662A0C38DC89D4978D10B511B3D241A0100010000000000672B0000CE560000640000000000000000 +- builder: TransferTransactionBuilder + payload: B10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000019054410000000000000000010000000000000090F36CA680C35D630662A0C38DC89D4978D10B511B3D241A0100010000000000672B0000CE560000640000000000000000 +- builder: AggregateBondedTransactionBuilder + payload: 100100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000019041420A000000000000000100000000000000CF4DE1675E204A2D06C2B468F37EE71DC4DF918CB0A6D6783FBD142AD07F774C68000000000000006100000000000000AF07FF69FF95F4FCC5B83CC750EC20E37F2E9968644FA18D603A3F1CDC6A7E46000000000190544190F36CA680C35D630662A0C38DC89D4978D10B511B3D241A0100010000000000672B0000CE56000064000000000000000000000000000000 +- builder: TransferTransactionBuilder + payload: D10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000019054410000000000000000010000000000000090F36CA680C35D630662A0C38DC89D4978D10B511B3D241A0100030000000000BA36BD286FB7F2670300000000000000D787D9329996A177020000000000000029CF5FD941AD25D5010000000000000000 +- builder: TransferTransactionBuilder + payload: 29020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001905441000000000000000006DC7B9B7501000090E5740C367E76398B61D4D1216777E8DE1DC3BC25D12E668901000000000000FE4532303137333537363138303241464537323734393635324138374136434533363439453836453145443533443331453935413333453630443945303644354338414535454637373137323335453039363246303133444532343537304331323946314533383432344430413542334434384237364233464634393433414241434544393437383231373637314644464430323730443046384344353437384430313734313530453246433930393638363242383945374332384444373246463144353334413938313843353142343833303246324146463031433030454238443445444641334637413142353842453437433234443644454338304537394132454243303731343243453038434330454633453535424231334538324436313042423642353546393335413936383132333543453941304543433636423143314138334643414639334434353546313943383537364631443342384336433134344637414135414136434234444333333242443035314441394233434541304330363437464646 +- builder: TransferTransactionBuilder + payload: BD000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001905441000000000000000001000000000000009188DD7D72227ECAE70000000000000000000000000000000D0001000000000044B262C46CEABB85010000000000000000746573742D6D657373616765 +- builder: TransferTransactionBuilder + payload: D400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002134E47AEE6F2392A5B3D1238CD7714EABEB739361B7CCF24BAE127F10DF17F200000000019054410000000000000000010000000000000090F36CA680C35D630662A0C38DC89D4978D10B511B3D241A1400020000000000671305C6390B00002C01000000000000672B0000CE560000640000000000000000536F6D65204D65737361676520E6BCA2E5AD97 +- builder: TransferTransactionBuilder + payload: D400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002134E47AEE6F2392A5B3D1238CD7714EABEB739361B7CCF24BAE127F10DF17F200000000019054410000000000000000010000000000000090F36CA680C35D630662A0C38DC89D4978D10B511B3D241A1400020000000000671305C6390B00002C01000000000000672B0000CE560000640000000000000000536F6D65204D65737361676520E6BCA2E5AD97 +- builder: AggregateBondedTransactionBuilder + payload: 300100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000019041420A00000000000000010000000000000045D0CD037E228D93D8D4A3CFFCBE09A7308F9E9B3B357B1D7DD55E80B48111CF88000000000000008400000000000000C898C0939F037B6F2D0ECCA8BC80783B6901ED744ADDC1B0C985C9CC27A11B7E000000000190544190F36CA680C35D630662A0C38DC89D4978D10B511B3D241A1400020000000000671305C6390B00002C01000000000000672B0000CE560000640000000000000000536F6D65204D65737361676520E6BCA2E5AD9700000000 +- builder: TransferTransactionBuilder + payload: B00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000019054410000000000000000010000000000000090F36CA680C35D630662A0C38DC89D4978D10B511B3D241A0000010000000000672B0000CE5600006400000000000000 +- builder: TransferTransactionBuilder + payload: B00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000019054410000000000000000010000000000000090F36CA680C35D630662A0C38DC89D4978D10B511B3D241A0000010000000000672B0000CE5600006400000000000000 +- builder: AggregateBondedTransactionBuilder + payload: 080100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000019041420A000000000000000100000000000000A733A9FB12496D8E8BA9907E7395BE54E2EC8AA9DE0915B3F8C14E38F96A678E60000000000000006000000000000000216152AB1229A8CC65811F325B9852EA28C1BC223ABCF2FD10A9845B797DA9DB000000000190544190F36CA680C35D630662A0C38DC89D4978D10B511B3D241A0000010000000000672B0000CE5600006400000000000000 +- builder: MosaicGlobalRestrictionTransactionBuilder + payload: AA00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009801508C58666C746F471538E43002B85B1CD542F9874B2861183919BA8787B6000000000190514100000000000000000100000000000000800D00000000000002000000000000000100000000000000090000000000000008000000000000000106 +- builder: MosaicGlobalRestrictionTransactionBuilder + payload: AA00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009801508C58666C746F471538E43002B85B1CD542F9874B2861183919BA8787B6000000000190514100000000000000000100000000000000800D00000000000002000000000000000100000000000000090000000000000008000000000000000106 +- builder: AggregateBondedTransactionBuilder + payload: 080100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000019041420A000000000000000100000000000000EAB01ABB1677368FAC2F6BF706A28ED8B659F40ECD0D1BD3BC718D9BF83108CD60000000000000005A00000000000000687FE391241D67448B0E02C5D34E09A4EF880509EF8C5B42570749F012AA09B40000000001905141800D00000000000002000000000000000100000000000000090000000000000008000000000000000106000000000000 +- builder: AccountMosaicRestrictionTransactionBuilder + payload: 980000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E60000000001905042000000000000000001000000000000000200010100000000E803000000000000D007000000000000 +- builder: AccountMosaicRestrictionTransactionBuilder + payload: 980000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E60000000001905042000000000000000001000000000000000200010100000000E803000000000000D007000000000000 +- builder: AggregateBondedTransactionBuilder + payload: F00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000019041420A000000000000000100000000000000A5D2296871C6DCEAD69D52F96110BC22D26F4E2DE963DCF268AEF848C33668224800000000000000480000000000000016ED504C968BAD1F09EBE902E91CF8D7660381CFC10214FB2D141E8DAEDC91B400000000019050420200010100000000E803000000000000D007000000000000 +- builder: AggregateCompleteTransactionBuilder + payload: D0010000000000008C281CF19399A4CD7C97336B73F21D395BF296DB2FF8020A5BFDE51BD3314506C0C4BE23A625E71DEAE87E20E565B7684D5ECD7C941DF9847691834D19652C0830EC782177FFEFEE6B8C2B6C38BDFF7413A7872386D4B8A600E255DFD042090300000000019041410000000000000000010000000000000061A53A5CD380F63A506A1059FE2D13FC0DA712E4B39B217407ECECB5DB7DA60DC0000000000000005D00000000000000F7847D28C15F11FED0C16401DA9F1D3D67E5BE14DD00521CB293D13CD28F06A10000000001905441905969EEEC46B0CC6DD90A4910306425B7FCC458DF884F0B0D0000000000000000746573742D6D6573736167650000005D00000000000000BAC8F60B0467AFCDA153477D2446921543D3C2BEB5E964F26F9AA62D4FB0A916000000000190544190B0A565B66E6D10B8151AD76F7C404016C444C81CD9D5BD0D0000000000000000746573742D6D6573736167650000000000000000000000BAC8F60B0467AFCDA153477D2446921543D3C2BEB5E964F26F9AA62D4FB0A916677E32A0DA9F62FC71BD5728350EEF38BF4968AA052EAED678DEBD17CF099BF0C3A37C6FE4D585392411418A1892530B423DB7F791D17A44781B10EB398E4605 +- builder: AggregateCompleteTransactionBuilder + payload: 7001000000000000D9ADCBC1F31F1370B236510B6A8B184A4A81F4227FFAD2918EDA68827CF36B85BB6991A8AD6FC0D8CE012F2CAED37393CDCAA10C28A08DB8BD3DDB0CF129260D1CF46D84415E6C36658D4A96964A0DB51264CC9CE3C916590C5BDD60A94A89D10000000001904141000000000000000001000000000000008857FF8809FDA3AE529CD0EF67EEE479769BBFF0F46847D7AAED856E02EB391160000000000000005D000000000000000AA72E6B094163E34DDF9F6210EDA4535426E193D3098B47625BB11B016DA454000000000190544190E158EA9578CE96E6989A8B70714257C4DAEDFC80232AFC0D0000000000000000746573742D6D65737361676500000000000000000000004C2AABC45AAEF79B98A752C94ADA39B0B385C485582A1529C33978371BEE2904BFBA7ED597EE29300BEDD4CE05C56E67F8D0ACE8B14E26D62CF5B038A144DE96BFB1048B9776DAD3A45977D20ABA34E7A34103B3B174FFD900E3B5092F8C8F01 +- builder: AggregateCompleteTransactionBuilder + payload: 7001000000000000D9ADCBC1F31F1370B236510B6A8B184A4A81F4227FFAD2918EDA68827CF36B85BB6991A8AD6FC0D8CE012F2CAED37393CDCAA10C28A08DB8BD3DDB0CF129260D1CF46D84415E6C36658D4A96964A0DB51264CC9CE3C916590C5BDD60A94A89D10000000001904141000000000000000001000000000000008857FF8809FDA3AE529CD0EF67EEE479769BBFF0F46847D7AAED856E02EB391160000000000000005D000000000000000AA72E6B094163E34DDF9F6210EDA4535426E193D3098B47625BB11B016DA454000000000190544190E158EA9578CE96E6989A8B70714257C4DAEDFC80232AFC0D0000000000000000746573742D6D65737361676500000000000000000000004C2AABC45AAEF79B98A752C94ADA39B0B385C485582A1529C33978371BEE2904BFBA7ED597EE29300BEDD4CE05C56E67F8D0ACE8B14E26D62CF5B038A144DE96BFB1048B9776DAD3A45977D20ABA34E7A34103B3B174FFD900E3B5092F8C8F01 +- builder: AggregateCompleteTransactionBuilder + payload: 08010000000000002DEFDA9AE95AF71F4A79A478C47C55600F0FD63C5CC99547118467417F8DAA34E57506E9777E56358043D722BE2DFCA642321D83BE66A539AF31CAA3B395840D07DB7062C734B5CDEDC1C86D0C6A2688892A8831F724216B5D0E799742F6DCA6000000000190414100000000000000000100000000000000484A6E128BA0CCFBE1A8111FEB9341B3BAE4B04B7C220BD45316822C52A00EBC60000000000000005D00000000000000E0B29B19E4FE4CF99155ECF9204A0250DA09F62FDEFDCA6BBD82D1D36046A2D8000000000190544190D474E287F744DDCAB6860ACBCB3DF91D616424210D06710D0000000000000000746573742D6D657373616765000000 +- builder: AggregateCompleteTransactionBuilder + payload: 080100000000000095F997295188DF9898B0A5E8DADBC78B205FFA0401A6621C0EEDCECD53471C09CFC626C19D630B0EC3BEC007084B39A5057DC143C20717CF46136BCE8A8B020332060636AB9CFDFBDF49DFD6E205C32BF6E5EF880F6ACD6FE5563304010028CC000000000190414100000000000000000100000000000000052579A0684C80256E2F7400283C3676545378A6A1472F7B2784A62B4E7D718260000000000000005D0000000000000032060636AB9CFDFBDF49DFD6E205C32BF6E5EF880F6ACD6FE5563304010028CC000000000190544190ABBEFE56A225780E565ED98CFAD142993631F3A43DF6BF0D0000000000000000746573742D6D657373616765000000 +- builder: AggregateCompleteTransactionBuilder + payload: 380200000000000097235CFDC7ABA2500CF0A349F25EB607046E3CCBBF1A1DB5C6E5CE40426FD686E64861CF9A412E8254ADF5E2A3AA8BCA3BDBE40E480A0C148CBC1E30F7FC2E0BE02E67EF8B71B8D6521C5B3B7CB02431B7A97054DA276B139F9D2F3E3644EF8800000000019041410000000000000000010000000000000000419BCE435AC874552B3D0706E6DFC34533FBA60F9BFA281A67A6A4DAF1AC16C0000000000000005D00000000000000A52D9ABD9B2DD81E65E3ECE86D8EA2758B185657AEA543EC14C4C82EBC835211000000000190544190E5E0E301E4F0938A247B83179AF54527D48DFB662BBA2B0D0000000000000000746573742D6D6573736167650000005D0000000000000099A38966220E8B3F50466FC126700D5ECF56EEC8C9C65CDB884CE36ADB31585B000000000190544190E5E0E301E4F0938A247B83179AF54527D48DFB662BBA2B0D0000000000000000746573742D6D657373616765000000000000000000000099A38966220E8B3F50466FC126700D5ECF56EEC8C9C65CDB884CE36ADB31585BC83BA60B6A86EB03FEBA96CCD325ECE600917B2CFFF09432F79C4FB9C7447F4AA7D352B67C9339F76CF4A4C5EA412C7816114782D87D159CB95E020134C8980000000000000000007FB15317341AE0053BDBAF72AFD42B275FAE6F442CEBD59590388E348F8510B0E6CC6741E175700D529446022EA0220E948AA012A182011BC70056702186086C5EA97E707E75A65FF9F19598262068FD1B1CF8156926F45C95F2FEEC6DAE5700 +- builder: AggregateCompleteTransactionBuilder + payload: 08010000000000003B4528319A3087FFB7FD5F4D8D45FBAE2F5450885AF398A9048A747F3718A97B31B358623D9465910AD6C8116696DD6E617B85E7AC4729905DCA3E226D39480351A4458636D21A5DDC33034574E43DE8B7FB9097418B0B41DA24312590355E88000000000190414100000000000000000100000000000000B1C41CBC5C8FF9B68AF648D90B63D563EDA33DFE3C5638CD3DB5BCED1F78AA9660000000000000005D000000000000006A5B17934641B25F8A6D44763AD06F951DC9DD8A7E9FDECC424C13B5D7EDB6F40000000001905441900EE6EAF7DBC50AACA1967B3D822EADB4773810D06A79910D0000000000000000746573742D6D657373616765000000 +- builder: AggregateCompleteTransactionBuilder + payload: 08010000000000003B4528319A3087FFB7FD5F4D8D45FBAE2F5450885AF398A9048A747F3718A97B31B358623D9465910AD6C8116696DD6E617B85E7AC4729905DCA3E226D39480351A4458636D21A5DDC33034574E43DE8B7FB9097418B0B41DA24312590355E88000000000190414100000000000000000100000000000000B1C41CBC5C8FF9B68AF648D90B63D563EDA33DFE3C5638CD3DB5BCED1F78AA9660000000000000005D000000000000006A5B17934641B25F8A6D44763AD06F951DC9DD8A7E9FDECC424C13B5D7EDB6F40000000001905441900EE6EAF7DBC50AACA1967B3D822EADB4773810D06A79910D0000000000000000746573742D6D657373616765000000 +- builder: AggregateCompleteTransactionBuilder + payload: 700100000000000069AAA1BFC3E1B1507B7D1D2EF68EE62EA02522B86D702F076D8C11B41A7FD0860DDE5450CD3D4D584DF164CF5AF3A777FD3DBCA1C12C54DB6CF26813FF74CC0DF61A834C1C9D4F132AC38CC9AF46E4ABC82D0AB7DAD9280CE9CB69F9EE8AAA23000000000190414100000000000000000100000000000000FB482B2C78876E91BD8EBDFB58B1E59A949134CB50522F8F8470C5613A42020460000000000000005D000000000000000B30AFBE29CDD0B8BD191CCAA97EF787F26A5D822A2A1FC95041959017700809000000000190544190FA036C22E0BD9964DE6F7728D8A10153E4F9CA2225F8610D0000000000000000746573742D6D657373616765000000000000000000000095C68125575891B5F403259E2AAFEC1D7CCA9A750AF462900BEFF31B16351754A232CA3F026E5FD73B56695C15E710AD66B3120009A2B0FB62CAA72B55F4FBFBD90598D56D9CC55DD9C97DC54D88BE3F7C117BD5CC2AF9CD4E577CE3062E250C +- builder: AggregateCompleteTransactionBuilder + payload: 700100000000000069AAA1BFC3E1B1507B7D1D2EF68EE62EA02522B86D702F076D8C11B41A7FD0860DDE5450CD3D4D584DF164CF5AF3A777FD3DBCA1C12C54DB6CF26813FF74CC0DF61A834C1C9D4F132AC38CC9AF46E4ABC82D0AB7DAD9280CE9CB69F9EE8AAA23000000000190414100000000000000000100000000000000FB482B2C78876E91BD8EBDFB58B1E59A949134CB50522F8F8470C5613A42020460000000000000005D000000000000000B30AFBE29CDD0B8BD191CCAA97EF787F26A5D822A2A1FC95041959017700809000000000190544190FA036C22E0BD9964DE6F7728D8A10153E4F9CA2225F8610D0000000000000000746573742D6D657373616765000000000000000000000095C68125575891B5F403259E2AAFEC1D7CCA9A750AF462900BEFF31B16351754A232CA3F026E5FD73B56695C15E710AD66B3120009A2B0FB62CAA72B55F4FBFBD90598D56D9CC55DD9C97DC54D88BE3F7C117BD5CC2AF9CD4E577CE3062E250C +- builder: AggregateCompleteTransactionBuilder + payload: F8000000000000007628D261A89D8281BB5202D55FAD2977D7439059F2F1B971AFF6719236E623670DD0EB74F5B3FB8F540E1FA2CB08EA82A3FD637697BD01690875792FC4D8A707D4C20FDCB197500B29599940BC84BE2F5DCEF68853F1E7094CAD9F8B84C61CC800000000019041410000000000000000010000000000000083F5ABA0102618F4837F28421819803E42A79B70C89E583D1784FFB7D19DF71D500000000000000050000000000000002DF750A9F35247EEDF21AB13264C46B2756E915060E48A21B0C7107A837188730000000001905541010100010000000090FB0C0BB172BAB2EC30776A5A910FCB68AF09FCB14EF1D0 +- builder: AggregateCompleteTransactionBuilder + payload: 700100000000000087FEA49B4308A286A050690C1E079980E69E9E65B1F99E39194317A444D37DFE82EBC2EB95823D586726CA55B4097EFD1285D38D1E9BBD0632772A8F3B5C980AA6827555599AC64F1CF33CEF784A4E04039118971D0DFC39149317E43AE1B810000000000190414100000000000000000100000000000000921DDFF1DD6B7358B9C4A52BB4CC9BF0A1E2CC91F6CBCBDEA24B662ABB012A0960000000000000005D00000000000000E822E490EF2A32A81821DCD970AD2C9E3B348BEC246EFAB71E75C72DBCE98AF5000000000190544190D0DC0125F0A87096ABA3B9781A3EEF60813546A182A6520D0000000000000000746573742D6D6573736167650000000000000000000000D8B2A466CF8967CFA5C9D8A9989FFE7E620E8A84D00696ACFB80FBFABB83FDD2EF36E6D8B4A339E12E77C857DF7462A319DC41D099F63805A85CD6AF7EE44D4591F54B3A5B0FFC61BC6DC98BF461CC59FE4F364E425A8D22674151A78FC3C104 +- builder: AggregateCompleteTransactionBuilder + payload: 68010000000000009EF4DBFD12C05BE89ECE867D36C6498032DE4B5550DA0ACF437D46A14609C7378A8C4D3B4FBD37DFD2C9FCEC7B3CEB1D33A0AE1409932A06EB199930CB65810F0675600F69145174A73300C71B35DF199C94B4EAD66E51F83A54F6C78FC2B7A70000000001904141000000000000000001000000000000009E918C130953B0948AE3133039DBFE27FF6FDBB5F0FF2837BB647D329AD3A7EDC0000000000000005D00000000000000F6E9D20ED1F24CC2329AE7FC32641D561BEB0E79938F2EEA193CB831D00320830000000001905441905004D6A462DDC8C0AA5F0F27A47B1BDC8931D3FD7665CA0D0000000000000000746573742D6D6573736167650000005D000000000000000675600F69145174A73300C71B35DF199C94B4EAD66E51F83A54F6C78FC2B7A7000000000190544190962E087BD1C13B6180573AF6EF04A10598F298AFEE5BF90D0000000000000000746573742D6D657373616765000000 +- builder: AggregateCompleteTransactionBuilder + payload: 08010000000000003E8ADFC708C78D43A23E34647FAC45396B0AAB4FE76392BCB5E9DA48DEB59C01799629A4E49A9E4AAF3201525DA44902217DAF85F530701F3711164CC711010F44D2E54915FD2ECAF79B8895C4B454B4245B2509207EC8C490A66BD699D2D1E00000000001904141000000000000000001000000000000008A50B8F1A89BFC88B5459514935CF03A42590EF0B5ABA08C3516EEC54B984A8960000000000000005D000000000000006CADBD0098519C6BA27C49927DC1B626960F135FF5824293FFAF42066F8C8233000000000190544190705902102E897D8887478A61F0BAD5EE607061E315BB610D0000000000000000746573742D6D657373616765000000 +- builder: AggregateCompleteTransactionBuilder + payload: 680100000000000032D56CCF5DD17E5CB0BBD36709BB81C40C039A4357D073FB12D2E4B7723E78AD2BF8F11F061CDCB7084F19447BE0C0787B6E6440857E62519D28877F0F1A1602A1DE029466D3728ABA901B2F0BB1E73A70524537189F576AB9ED52FAB41959AF000000000190414100000000000000000100000000000000D6B4830385A4701925CBF4414829BC8E92696A70A06BF70BCF70CA8C3F99162CC0000000000000005D00000000000000CB0778C7D596F0AD608F479E686EDB284A7330291A31E25070508898AE57EDCC000000000190544190F55C794DFAA192E66AC8A57606AD1DE9CD10FE60A9D8000D0000000000000000746573742D6D6573736167650000005D00000000000000A1DE029466D3728ABA901B2F0BB1E73A70524537189F576AB9ED52FAB41959AF0000000001905441909D4AA4C68CD857EAFA8A28F371DCAD37A85415A3642F690D0000000000000000746573742D6D657373616765000000 +- builder: AggregateCompleteTransactionBuilder + payload: D00100000000000032D56CCF5DD17E5CB0BBD36709BB81C40C039A4357D073FB12D2E4B7723E78AD2BF8F11F061CDCB7084F19447BE0C0787B6E6440857E62519D28877F0F1A1602A1DE029466D3728ABA901B2F0BB1E73A70524537189F576AB9ED52FAB41959AF000000000190414100000000000000000100000000000000D6B4830385A4701925CBF4414829BC8E92696A70A06BF70BCF70CA8C3F99162CC0000000000000005D00000000000000CB0778C7D596F0AD608F479E686EDB284A7330291A31E25070508898AE57EDCC000000000190544190F55C794DFAA192E66AC8A57606AD1DE9CD10FE60A9D8000D0000000000000000746573742D6D6573736167650000005D00000000000000A1DE029466D3728ABA901B2F0BB1E73A70524537189F576AB9ED52FAB41959AF0000000001905441909D4AA4C68CD857EAFA8A28F371DCAD37A85415A3642F690D0000000000000000746573742D6D6573736167650000000000000000000000CB0778C7D596F0AD608F479E686EDB284A7330291A31E25070508898AE57EDCC16AE9A3F67BB5B3D6C613E5B67E3DDBD8B771F64A973ACF71DC072CC3F6EEE7C7900FCF02472353E39A80F4F4180C38580227C92E882EDD89A0629F88A37BA0A +- builder: TransferTransactionBuilder + payload: BD0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000019054410000000000000000010000000000000090E342239BF195E01384EB6EC353639D4A687A29466A76680D00010000000000672B0000CE560000640000000000000000536F6D65204D657373616765 +- builder: TransferTransactionBuilder + payload: BD00000000000000182AF94BD22DF48D81EE9CD758CA14D79FEA9362DBF5339325A8244F3EB8DF20E6432DA8FA9E0B2B5F2AB8A815CE29453795B6298B90F46959EC1FAA377A880C2134E47AEE6F2392A5B3D1238CD7714EABEB739361B7CCF24BAE127F10DF17F2000000000190544100000000000000000100000000000000906E4FC9E4A56B4E70F7FA30D06D92D1FAF62B576905C9530D00010000000000672B0000CE560000640000000000000000536F6D65204D657373616765 +- builder: TransferTransactionBuilder + payload: BD00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000190544100000000000000000100000000000000906E4FC9E4A56B4E70F7FA30D06D92D1FAF62B576905C9530D00010000000000672B0000CE560000640000000000000000536F6D65204D657373616765 +- builder: AggregateCompleteTransactionBuilder + payload: D8010000000000006B71F5C71EF109939916DBA83924679B93F3CF72F084AD499A0FCD3E75B1CA81F97E43E18B23830771909FF920BB69FEB11160E6230C080C57C196C095B8C80137962F00E294752191B622118BDE0BBA4E1837711C62303A02AC394CC50384B4000000000190414100000000000000000100000000000000208D00C01487C8DE8ED820DF2C8BEEA36522E5AD8FC8554021D0CCE0BBCD61A160000000000000005D00000000000000B694186EE4AB0558CA4AFCFDD43B42114AE71094F5A1FC4A913FE9971CACD21D000000000190544190915B01E1448E50040D29C354D440C56B799F8BE87F34430D0000000000000000746573742D6D657373616765000000000000000000000048053805A88A714194E24E520D2416DDBBE1CD120C36BE7EB0BD4F9B39BB8A0C9B80C94CF5EA4928A11ACAE561292EB03FA43942EB48F0C1D30D7F67262D001C07A6E3741C1A08EF8CDACC849D1893B75D93D8DA2C13DC4A9B08BC032979E6060000000000000000A980D122695475FC24456100579C3030BE0AFC1551E0ED53B66BDA1847AF9C5A9005335A1D770BFB14F8CF7CD04296C868E7B93D01724775D36AE22983E586D9BCAB0EC59E0AC61F1539F47E2C993D32A55CAFA847E4294C84061B28D5F7510C +- builder: AggregateCompleteTransactionBuilder + payload: 1001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000190414100000000000000000100000000000000F773DA9734E852B72FD07B6F5A0FCC3E5B4E502F91959C413E148E3349BBA6C56800000000000000610000000000000067C2EC49DF614757B22153909EEF2FFF09D5EA5CF3DB4B7B414C11083A76829B000000000190544190E3956684D4B7A9A1F8E3F3D78D83441162D0300D563A48010001000000000044B262C46CEABB8580969800000000000000000000000000 +- builder: AggregateCompleteTransactionBuilder + payload: D8010000000000006B71F5C71EF109939916DBA83924679B93F3CF72F084AD499A0FCD3E75B1CA81F97E43E18B23830771909FF920BB69FEB11160E6230C080C57C196C095B8C80137962F00E294752191B622118BDE0BBA4E1837711C62303A02AC394CC50384B4000000000190414100000000000000000100000000000000208D00C01487C8DE8ED820DF2C8BEEA36522E5AD8FC8554021D0CCE0BBCD61A160000000000000005D00000000000000B694186EE4AB0558CA4AFCFDD43B42114AE71094F5A1FC4A913FE9971CACD21D000000000190544190915B01E1448E50040D29C354D440C56B799F8BE87F34430D0000000000000000746573742D6D657373616765000000000000000000000048053805A88A714194E24E520D2416DDBBE1CD120C36BE7EB0BD4F9B39BB8A0C9B80C94CF5EA4928A11ACAE561292EB03FA43942EB48F0C1D30D7F67262D001C07A6E3741C1A08EF8CDACC849D1893B75D93D8DA2C13DC4A9B08BC032979E6060000000000000000A980D122695475FC24456100579C3030BE0AFC1551E0ED53B66BDA1847AF9C5A9005335A1D770BFB14F8CF7CD04296C868E7B93D01724775D36AE22983E586D9BCAB0EC59E0AC61F1539F47E2C993D32A55CAFA847E4294C84061B28D5F7510C +- builder: AggregateCompleteTransactionBuilder + payload: 1001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000190414100000000000000000100000000000000F773DA9734E852B72FD07B6F5A0FCC3E5B4E502F91959C413E148E3349BBA6C56800000000000000610000000000000067C2EC49DF614757B22153909EEF2FFF09D5EA5CF3DB4B7B414C11083A76829B000000000190544190E3956684D4B7A9A1F8E3F3D78D83441162D0300D563A48010001000000000044B262C46CEABB8580969800000000000000000000000000 +- builder: AccountAddressRestrictionTransactionBuilder + payload: A0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001A8504100000000000000002B0200000000000001000100000000008026D27E1D0A26CA4E316F901E23E55C8711DB20DFBE8F3A +- builder: AccountMosaicRestrictionTransactionBuilder + payload: 90000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001A8504200000000000000002B0200000000000002000100000000004CCCD78612DDF5CA +- builder: AccountOperationRestrictionTransactionBuilder + payload: 8A000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001A8504300000000000000002B0200000000000004400100000000004E42 +- builder: AddressAliasTransactionBuilder + payload: A1000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001A84E4200000000000000002B020000000000002AD8FC018D9A49E18026D27E1D0A26CA4E316F901E23E55C8711DB20DFBE8F3A01 +- builder: MosaicAliasTransactionBuilder + payload: 91000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001A84E4300000000000000002B020000000000002AD8FC018D9A49E14CCCD78612DDF5CA01 +- builder: MosaicDefinitionTransactionBuilder + payload: 96000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001A84D4100000000000000002B020000000000000100000000000000E803000000000000E6DE84B80003 +- builder: MosaicDefinitionTransactionBuilder + payload: 96000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001A84D4100000000000000002B0200000000000001000000000000000000000000000000E6DE84B80003 +- builder: MosaicDefinitionTransactionBuilder + payload: 96000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001A84D4100000000000000002B0200000000000001000000000000000000000000000000E6DE84B80003 +- builder: MosaicDefinitionTransactionBuilder + payload: 96000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001A84D4100000000000000002B0200000000000001000000000000000000000000000000E6DE84B80003 +- builder: MosaicDefinitionTransactionBuilder + payload: 96000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001A84D4100000000000000002B0200000000000001000000000000000000000000000000E6DE84B80003 +- builder: MosaicSupplyChangeTransactionBuilder + payload: 91000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001A84D4200000000000000002B020000000000004CCCD78612DDF5CA0A0000000000000001 +- builder: TransferTransactionBuilder + payload: BD000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001A8544100000000000000002B020000000000008026D27E1D0A26CA4E316F901E23E55C8711DB20DFBE8F3A0D0001000000000044B262C46CEABB8500E1F5050000000000746573742D6D657373616765 +- builder: SecretLockTransactionBuilder + payload: D1000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001A8524100000000000000002B02000000000000809D7AE580CA673E2365F9019C19A68FFE3F59F38910CD469B3155B37159DA50AA52D5967C509B410F5A36A3B1E31ECB5AC76675D79B4A5E44B262C46CEABB850A00000000000000640000000000000000 +- builder: SecretProofTransactionBuilder + payload: DB000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001A8524200000000000000002B0200000000000080D66C33420E5411995BACFCA2B28CF1C9F5DD7AB1A9C05C9B3155B37159DA50AA52D5967C509B410F5A36A3B1E31ECB5AC76675D79B4A5E200000B778A39A3663719DFC5E48C9D78431B1E45C2AF9DF538782BF199C189DABEAC7 +- builder: MultisigAccountModificationTransactionBuilder + payload: A0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001A8554100000000000000002B020000000000000102010000000000809FC4844A5206CFA44603EFA1FFC76FE9B0564D96735562 +- builder: AggregateBondedTransactionBuilder + payload: 08010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001A8414200000000000000002B02000000000000887DE1026EA57A350FF35BD13163D4C8D5E149A3DC281D3686400AD2906D15BC60000000000000005D000000000000009801508C58666C746F471538E43002B85B1CD542F9874B2861183919BA8787B60000000001A854418026D27E1D0A26CA4E316F901E23E55C8711DB20DFBE8F3A0D0000000000000000746573742D6D657373616765000000 +- builder: AggregateBondedTransactionBuilder + payload: A8000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001A8414200000000000000002B0200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 +- builder: AccountKeyLinkTransactionBuilder + payload: A1000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001A84C4100000000000000002B020000000000009801508C58666C746F471538E43002B85B1CD542F9874B2861183919BA8787B601 +- builder: VrfKeyLinkTransactionBuilder + payload: A1000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001A8434200000000000000002B020000000000009801508C58666C746F471538E43002B85B1CD542F9874B2861183919BA8787B601 +- builder: NodeKeyLinkTransactionBuilder + payload: A1000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001A84C4200000000000000002B020000000000009801508C58666C746F471538E43002B85B1CD542F9874B2861183919BA8787B601 +- builder: NamespaceRegistrationTransactionBuilder + payload: A5000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001A84E4100000000000000002B02000000000000E803000000000000CFCBE72D994BE69B0013726F6F742D746573742D6E616D657370616365 +- builder: MosaicGlobalRestrictionTransactionBuilder + payload: AA000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001A8514100000000000000002B02000000000000010000000000000000000000000000005C11000000000000000000000000000000000000000000000006 +- builder: MosaicAddressRestrictionTransactionBuilder + payload: B8000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001A8514200000000000000002B0200000000000001000000000000005C110000000000000000000000000000000000000000000080D66C33420E5411995BACFCA2B28CF1C9F5DD7AB1A9C05C +- builder: MosaicAddressRestrictionTransactionBuilder + payload: B8000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001A8514200000000000000002B020000000000006EC265194C0501D45C110000000000000000000000000000000000000000000080D66C33420E5411995BACFCA2B28CF1C9F5DD7AB1A9C05C +- builder: AccountMetadataTransactionBuilder + payload: AE000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001A8444100000000000000002B0200000000000080D66C33420E5411995BACFCA2B28CF1C9F5DD7AB1A9C05CE80300000000000001000A0000000000000000000000 +- builder: MosaicMetadataTransactionBuilder + payload: B6000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001A8444200000000000000002B0200000000000080D66C33420E5411995BACFCA2B28CF1C9F5DD7AB1A9C05CE8030000000000004CCCD78612DDF5CA01000A0000000000000000000000 +- builder: NamespaceMetadataTransactionBuilder + payload: B6000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001A8444300000000000000002B0200000000000080D66C33420E5411995BACFCA2B28CF1C9F5DD7AB1A9C05CE8030000000000004CCCD78612DDF5CA01000A0000000000000000000000 +- builder: AggregateBondedTransactionBuilder + payload: a80000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000019041420000000000000000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 + comment: Empty Aggregate +- builder: AggregateBondedTransactionBuilder + payload: 60010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001904142000000000000000001000000000000006C610D61B3E6839AE85AC18465CF6AD06D8F17A4F145F720BD324880B4FBB12BB8000000000000006D00000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E6000000000190544190F36CA680C35D630662A0C38DC89D4978D10B511B3D241A0D00010000000000672B0000CE560000640000000000000000536F6D65204D6573736167650000004100000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E60000000001904D428869746E9B1A70570A000000000000000100000000000000 + comment: 2 Transactions +- builder: AggregateBondedTransactionBuilder + payload: 60010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001904142000000000000000001000000000000006C610D61B3E6839AE85AC18465CF6AD06D8F17A4F145F720BD324880B4FBB12BB8000000000000006D00000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E6000000000190544190F36CA680C35D630662A0C38DC89D4978D10B511B3D241A0D00010000000000672B0000CE560000640000000000000000536F6D65204D6573736167650000004100000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E60000000001904D428869746E9B1A70570A000000000000000100000000000000 + comment: 3 Cosignatures +- builder: AggregateBondedTransactionBuilder + payload: 30020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001904142000000000000000001000000000000006C610D61B3E6839AE85AC18465CF6AD06D8F17A4F145F720BD324880B4FBB12BB8000000000000006D00000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E6000000000190544190F36CA680C35D630662A0C38DC89D4978D10B511B3D241A0D00010000000000672B0000CE560000640000000000000000536F6D65204D6573736167650000004100000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E60000000001904D428869746E9B1A70570A00000000000000010000000000000000000000000000009A49366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456111AAA9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456111AAA9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA6545611100000000000000009A49366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456222BBB9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456222BBB9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456222 + comment: 2 Transactions 2 Cosignatures +- builder: VotingKeyLinkTransactionBuilder + payload: A9000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001A8434100000000000000002B02000000000000C614558647D02037384A2FECA80ACE95B235D9B9D90035FA46102FE79ECCBA75010000000300000001 +# - builder: AggregateCompleteTransactionBuilder +# payload: A8050000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001A8414100000000000000002B02000000000000AADD3F16575A7757B60700DB023526BEDC4F3C8D7123557F21626613F106569B00050000000000005D000000000000009801508C58666C746F471538E43002B85B1CD542F9874B2861183919BA8787B60000000001A854418026D27E1D0A26CA4E316F901E23E55C8711DB20DFBE8F3A0D0000000000000000746573742D6D65737361676500000051000000000000009801508C58666C746F471538E43002B85B1CD542F9874B2861183919BA8787B60000000001A84C419801508C58666C746F471538E43002B85B1CD542F9874B2861183919BA8787B6010000000000000051000000000000009801508C58666C746F471538E43002B85B1CD542F9874B2861183919BA8787B60000000001A843429801508C58666C746F471538E43002B85B1CD542F9874B2861183919BA8787B6010000000000000051000000000000009801508C58666C746F471538E43002B85B1CD542F9874B2861183919BA8787B60000000001A84C429801508C58666C746F471538E43002B85B1CD542F9874B2861183919BA8787B6010000000000000069000000000000009801508C58666C746F471538E43002B85B1CD542F9874B2861183919BA8787B60000000001A843413D6BA38329836BFD245489FA3C5700FA6349259D06EAF92ECE2034AA0A33045B013B49349FB9E8832D24858D03A6E0220100000003000000010000000000000055000000000000009801508C58666C746F471538E43002B85B1CD542F9874B2861183919BA8787B60000000001A84E41E803000000000000CFCBE72D994BE69B0013726F6F742D746573742D6E616D6573706163650000005A000000000000009801508C58666C746F471538E43002B85B1CD542F9874B2861183919BA8787B60000000001A85141010000000000000000000000000000005C1100000000000000000000000000000000000000000000000600000000000068000000000000009801508C58666C746F471538E43002B85B1CD542F9874B2861183919BA8787B60000000001A851426EC265194C0501D45C110000000000000000000000000000000000000000000080D66C33420E5411995BACFCA2B28CF1C9F5DD7AB1A9C05C66000000000000009801508C58666C746F471538E43002B85B1CD542F9874B2861183919BA8787B60000000001A8444280D66C33420E5411995BACFCA2B28CF1C9F5DD7AB1A9C05CE8030000000000004CCCD78612DDF5CA01000A0000000000000000000000000066000000000000009801508C58666C746F471538E43002B85B1CD542F9874B2861183919BA8787B60000000001A8444380D66C33420E5411995BACFCA2B28CF1C9F5DD7AB1A9C05CE8030000000000004CCCD78612DDF5CA01000A000000000000000000000000005E000000000000009801508C58666C746F471538E43002B85B1CD542F9874B2861183919BA8787B60000000001A8444180D66C33420E5411995BACFCA2B28CF1C9F5DD7AB1A9C05CE80300000000000001000A0000000000000000000000000041000000000000009801508C58666C746F471538E43002B85B1CD542F9874B2861183919BA8787B60000000001A84E434CCCD78612DDF5CA4CCCD78612DDF5CA01000000000000008B000000000000009801508C58666C746F471538E43002B85B1CD542F9874B2861183919BA8787B60000000001A8524280D66C33420E5411995BACFCA2B28CF1C9F5DD7AB1A9C05C9B3155B37159DA50AA52D5967C509B410F5A36A3B1E31ECB5AC76675D79B4A5E200000B778A39A3663719DFC5E48C9D78431B1E45C2AF9DF538782BF199C189DABEAC70000000000 +# comment: all transactions diff --git a/catbuffer-generators/travis/symbol-sdk-java.gpg.enc b/catbuffer-generators/travis/symbol-sdk-java.gpg.enc new file mode 100644 index 00000000..d350f66b Binary files /dev/null and b/catbuffer-generators/travis/symbol-sdk-java.gpg.enc differ diff --git a/catbuffer-generators/travis/travis-functions.sh b/catbuffer-generators/travis/travis-functions.sh new file mode 100644 index 00000000..0276b7d6 --- /dev/null +++ b/catbuffer-generators/travis/travis-functions.sh @@ -0,0 +1,166 @@ +#!/usr/bin/env bash +set -e + +REMOTE_NAME="origin" +FUNCTIONS_VERSION="0.1.2" + +test_travis_functions () +{ + echo "Travis Functions Loaded" +} + +increment_version () +{ + declare -a part=( ${1//\./ } ) + declare new + declare -i carry=1 + + for (( CNTR=${#part[@]}-1; CNTR>=0; CNTR-=1 )); do + len=${#part[CNTR]} + new=$((part[CNTR]+carry)) + [ ${#new} -gt $len ] && carry=1 || carry=0 + [ $CNTR -gt 0 ] && part[CNTR]=${new: -len} || part[CNTR]=${new} + done + new="${part[*]}" + echo -e "${new// /.}" +} + +log_env_variables(){ + echo "DEV_BRANCH = $DEV_BRANCH" + echo "RELEASE_BRANCH = $RELEASE_BRANCH" + echo "POST_RELEASE_BRANCH = $POST_RELEASE_BRANCH" + echo "RELEASE_MESSAGE = $RELEASE_MESSAGE" + echo "REMOTE_NAME = $REMOTE_NAME" + echo "DOCKER_IMAGE_NAME = $DOCKER_IMAGE_NAME" + echo "TRAVIS_EVENT_TYPE = $TRAVIS_EVENT_TYPE" + echo "TRAVIS_COMMIT_MESSAGE = $TRAVIS_COMMIT_MESSAGE" + echo "TRAVIS_REPO_SLUG = $TRAVIS_REPO_SLUG" + echo "TRAVIS_BRANCH = $TRAVIS_BRANCH" + echo "TRAVIS_TAG = $TRAVIS_TAG" + echo "FUNCTIONS_VERSION = $FUNCTIONS_VERSION" +} + + +validate_env_variables(){ + log_env_variables + validate_env_variable "TRAVIS_EVENT_TYPE" "$FUNCNAME" + validate_env_variable "RELEASE_BRANCH" "$FUNCNAME" + validate_env_variable "POST_RELEASE_BRANCH" "$FUNCNAME" + validate_env_variable "DEV_BRANCH" "$FUNCNAME" + validate_env_variable "TRAVIS_COMMIT_MESSAGE" "$FUNCNAME" +} + +validate_env_variable () +{ + var="$1" + if [ "${!var}" = "" ] + then + echo "Env $var has not been provided for operation '$2'" + exit 128 + fi +} + +assert_value () +{ + value="$1" + expectedValue="$2" + if [ "$value" != "$expectedValue" ] + then + echo "'$value' is not the expected value '$expectedValue'" + exit 128 + fi +} + + + +checkout_branch () +{ + CHECKOUT_BRANCH="$1" + validate_env_variable "TRAVIS_REPO_SLUG" "$FUNCNAME" + validate_env_variable "CHECKOUT_BRANCH" "$FUNCNAME" + validate_env_variable "GITHUB_TOKEN" "$FUNCNAME" + validate_env_variable "REMOTE_NAME" "$FUNCNAME" + git remote rm $REMOTE_NAME + echo "Setting remote url https://github.com/${TRAVIS_REPO_SLUG}.git" + git remote add $REMOTE_NAME "https://${GITHUB_TOKEN}@github.com/${TRAVIS_REPO_SLUG}.git" >/dev/null 2>&1 + echo "Checking out $CHECKOUT_BRANCH as travis leaves the head detached." + git checkout $CHECKOUT_BRANCH +} + +load_version_from_npm(){ + VERSION="$(npm run version --silent)" + echo -e "$VERSION" +} + +load_version_from_file(){ + VERSION="$(head -n 1 version.txt)" + echo -e "$VERSION" +} + +post_release_version_file(){ + + validate_env_variable "RELEASE_BRANCH" "$FUNCNAME" + validate_env_variable "REMOTE_NAME" "$FUNCNAME" + validate_env_variable "POST_RELEASE_BRANCH" "$FUNCNAME" + checkout_branch "${RELEASE_BRANCH}" + VERSION="$(load_version_from_file)" + + NEW_VERSION=$(increment_version "$VERSION") + + echo "Version: $VERSION" + echo "New Version: $NEW_VERSION" + + echo "Creating tag version v$VERSION" + git tag -fa "v$VERSION" -m "Releasing version $VERSION" + + echo "Creating new version $NEW_VERSION" + echo "$NEW_VERSION" > 'version.txt' + git add version.txt + git commit -m "Creating new version $NEW_VERSION" + + echo "Pushing code to $REMOTE_NAME $POST_RELEASE_BRANCH" + git push $REMOTE_NAME $RELEASE_BRANCH:$POST_RELEASE_BRANCH + echo "Pushing tags to $REMOTE_NAME" + git push --tags $REMOTE_NAME + +} + + +push_github_pages(){ + + VERSION="$1" + DOCS_PATH="$2" + PUBLICATION_BRANCH=gh-pages + REPO_PATH=$PWD + + validate_env_variable "VERSION" "$FUNCNAME" + validate_env_variable "PUBLICATION_BRANCH" "$FUNCNAME" + validate_env_variable "DOCS_PATH" "$FUNCNAME" + validate_env_variable "GITHUB_TOKEN" "$FUNCNAME" + validate_env_variable "TRAVIS_REPO_SLUG" "$FUNCNAME" + validate_env_variable "REPO_PATH" "$FUNCNAME" + + # Checkout the branch + rm -rf $HOME/publish + cd $HOME + git clone --branch=$PUBLICATION_BRANCH https://${GITHUB_TOKEN}@github.com/$TRAVIS_REPO_SLUG publish 2>&1 > /dev/null + cd publish + # Update pages + + cp -r $REPO_PATH/${DOCS_PATH}. ./ + # Commit and push latest version + git add . + git config user.name "Travis" + git config user.email "travis@travis-ci.org" + git diff-index --quiet HEAD || git commit -m "Uploading $VERSION docs." + git push -fq origin $PUBLICATION_BRANCH 2>&1 > /dev/null + cd $REPO_PATH + +} + +if [ "$1" == "post_release_version_file" ];then + post_release_version_file +fi + + + diff --git a/catbuffer-generators/version.txt b/catbuffer-generators/version.txt new file mode 100644 index 00000000..b1e80bb2 --- /dev/null +++ b/catbuffer-generators/version.txt @@ -0,0 +1 @@ +0.1.3 diff --git a/catbuffer-parser/.gitignore b/catbuffer-parser/.gitignore new file mode 100644 index 00000000..d995890f --- /dev/null +++ b/catbuffer-parser/.gitignore @@ -0,0 +1,9 @@ +*~ +*.pch +*.pyc +__pycache__/ +.idea +.vscode/ +.DS_Store +_generated/ +.python-version diff --git a/catbuffer-parser/.pycodestyle b/catbuffer-parser/.pycodestyle new file mode 100644 index 00000000..caa456f8 --- /dev/null +++ b/catbuffer-parser/.pycodestyle @@ -0,0 +1,2 @@ +[pycodestyle] +max-line-length = 140 diff --git a/catbuffer-parser/.pylintrc b/catbuffer-parser/.pylintrc new file mode 100644 index 00000000..363ad178 --- /dev/null +++ b/catbuffer-parser/.pylintrc @@ -0,0 +1,426 @@ +[MASTER] + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code +extension-pkg-whitelist= + +# Add files or directories to the blacklist. They should be base names, not +# paths. +ignore=CVS + +# Add files or directories matching the regex patterns to the blacklist. The +# regex matches against base names, not paths. +ignore-patterns= + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +#init-hook= + +# Use multiple processes to speed up Pylint. +jobs=1 + +# List of plugins (as comma separated values of python modules names) to load, +# usually to register additional checkers. +load-plugins= + +# Pickle collected data for later comparisons. +persistent=yes + +# Specify a configuration file. +#rcfile= + +# Allow loading of arbitrary C extensions. Extensions are imported into the +# active Python interpreter and may run arbitrary code. +unsafe-load-any-extension=no + + +[MESSAGES CONTROL] + +# Only show warnings with the listed confidence levels. Leave empty to show +# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED +confidence= + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once).You can also use "--disable=all" to +# disable everything first and then reenable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use"--disable=all --enable=classes +# --disable=W" +#disable=print-statement,parameter-unpacking,unpacking-in-except,old-raise-syntax,backtick,long-suffix,old-ne-operator,old-octal-literal,import-star-module-level,raw-checker-failed,bad-inline-option,locally-disabled,locally-enabled,file-ignored,suppressed-message,useless-suppression,deprecated-pragma,apply-builtin,basestring-builtin,buffer-builtin,cmp-builtin,coerce-builtin,execfile-builtin,file-builtin,long-builtin,raw_input-builtin,reduce-builtin,standarderror-builtin,unicode-builtin,xrange-builtin,coerce-method,delslice-method,getslice-method,setslice-method,no-absolute-import,old-division,dict-iter-method,dict-view-method,next-method-called,metaclass-assignment,indexing-exception,raising-string,reload-builtin,oct-method,hex-method,nonzero-method,cmp-method,input-builtin,round-builtin,intern-builtin,unichr-builtin,map-builtin-not-iterating,zip-builtin-not-iterating,range-builtin-not-iterating,filter-builtin-not-iterating,using-cmp-argument,eq-without-hash,div-method,idiv-method,rdiv-method,exception-message-attribute,invalid-str-codec,sys-max-int,bad-python3-import,deprecated-string-function,deprecated-str-translate-call +disable=missing-docstring,misplaced-comparison-constant + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where +# it should appear only once). See also the "--disable" option for examples. +enable= + + +[REPORTS] + +# Python expression which should return a note less than 10 (10 is the highest +# note). You have access to the variables errors warning, statement which +# respectively contain the number of errors / warnings messages and the total +# number of statements analyzed. This is used by the global evaluation report +# (RP0004). +evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details +#msg-template= + +# Set the output format. Available formats are text, parseable, colorized, json +# and msvs (visual studio).You can also give a reporter class, eg +# mypackage.mymodule.MyReporterClass. +output-format=text + +# Tells whether to display a full report or only the messages +reports=no + +# Activate the evaluation score. +score=yes + + +[REFACTORING] + +# Maximum number of nested blocks for function / method body +max-nested-blocks=5 + + +[BASIC] + +# Naming hint for argument names +argument-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ + +# Regular expression matching correct argument names +argument-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ + +# Naming hint for attribute names +attr-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ + +# Regular expression matching correct attribute names +attr-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ + +# Bad variable names which should always be refused, separated by a comma +bad-names=foo,bar,baz,toto,tutu,tata + +# Naming hint for class attribute names +class-attribute-name-hint=([A-Z][a-zA-Z0-9_]{2,30}|(__.*__))$ + +# Regular expression matching correct class attribute names +class-attribute-rgx=([A-Z][a-zA-Z0-9_]{2,30}|(__.*__))$ + +# Naming hint for class names +class-name-hint=[A-Z_][a-zA-Z0-9]+$ + +# Regular expression matching correct class names +class-rgx=[A-Z_][a-zA-Z0-9]+$ + +# Naming hint for constant names +const-name-hint=(([A-Z_][A-Z0-9_]*)|(t_[A-Z0-9_]+)|(__.*__))$ + +# Regular expression matching correct constant names +const-rgx=(([A-Z_][A-Z0-9_]*)|(t_[A-Z0-9_]+)|(__.*__))$ + +# Minimum line length for functions/classes that require docstrings, shorter +# ones are exempt. +docstring-min-length=100 + +# Naming hint for function names +function-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ + +# Regular expression matching correct function names +function-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ + +# Good variable names which should always be accepted, separated by a comma +good-names=i,j,k,ex,Run,_ + +# Include a hint for the correct naming format with invalid-name +include-naming-hint=no + +# Naming hint for inline iteration names +inlinevar-name-hint=[A-Za-z_][a-z0-9_]*$ + +# Regular expression matching correct inline iteration names +inlinevar-rgx=[A-Za-z_][a-z0-9_]*$ + +# Naming hint for method names +method-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ + +# Regular expression matching correct method names +method-rgx=(([a-z][a-z0-9_]{2,30})|(test_[a-z0-9_]+)|(_[a-z0-9_]*))$ + +# Naming hint for module names +module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9_]+))$ + +# Regular expression matching correct module names +module-rgx=(test_)?([A-Za-z][a-zA-Z0-9_]+)$ + +# Colon-delimited sets of names that determine each other's naming style when +# the name regexes allow several styles. +name-group= + +# Regular expression which should only match function or class names that do +# not require a docstring. +no-docstring-rgx=. + +# List of decorators that produce properties, such as abc.abstractproperty. Add +# to this list to register other decorators that produce valid properties. +property-classes=abc.abstractproperty + +# Naming hint for variable names +variable-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ + +# Regular expression matching correct variable names +variable-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ + + +[FORMAT] + +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +expected-line-ending-format= + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines=^\s*(# )??$ + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren=4 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + +# Maximum number of characters on a single line. +max-line-length=140 + +# Maximum number of lines in a module +max-module-lines=1000 + +# List of optional constructs for which whitespace checking is disabled. `dict- +# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. +# `trailing-comma` allows a space between comma and closing bracket: (a, ). +# `empty-line` allows space-only lines. +no-space-check=trailing-comma,dict-separator + +# Allow the body of a class to be on the same line as the declaration if body +# contains single statement. +single-line-class-stmt=no + +# Allow the body of an if to be on the same line as the test if there is no +# else. +single-line-if-stmt=no + + +[LOGGING] + +# Logging modules to check that the string format arguments are in logging +# function parameter format +logging-modules=logging + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME,XXX,TODO + + +[SIMILARITIES] + +# Ignore comments when computing similarities. +ignore-comments=yes + +# Ignore docstrings when computing similarities. +ignore-docstrings=yes + +# Ignore imports when computing similarities. +ignore-imports=yes + +# Minimum lines number of a similarity. +min-similarity-lines=4 + + +[SPELLING] + +# Spelling dictionary name. Available dictionaries: none. To make it working +# install python-enchant package. +spelling-dict= + +# List of comma separated words that should not be checked. +spelling-ignore-words= + +# A path to a file that contains private dictionary; one word per line. +spelling-private-dict-file= + +# Tells whether to store unknown words to indicated private dictionary in +# --spelling-private-dict-file option instead of raising a message. +spelling-store-unknown-words=no + + +[TYPECHECK] + +# List of decorators that produce context managers, such as +# contextlib.contextmanager. Add to this list to register other decorators that +# produce valid context managers. +contextmanager-decorators=contextlib.contextmanager + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E1101 when accessed. Python regular +# expressions are accepted. +generated-members= + +# Tells whether missing members accessed in mixin class should be ignored. A +# mixin class is detected if its name ends with "mixin" (case insensitive). +ignore-mixin-members=yes + +# This flag controls whether pylint should warn about no-member and similar +# checks whenever an opaque object is returned when inferring. The inference +# can return multiple potential results while evaluating a Python object, but +# some branches might not be evaluated, which results in partial inference. In +# that case, it might be useful to still emit no-member and other checks for +# the rest of the inferred objects. +ignore-on-opaque-inference=yes + +# List of class names for which member attributes should not be checked (useful +# for classes with dynamically set attributes). This supports the use of +# qualified names. +ignored-classes=optparse.Values,thread._local,_thread._local + +# List of module names for which member attributes should not be checked +# (useful for modules/projects where namespaces are manipulated during runtime +# and thus existing member attributes cannot be deduced by static analysis. It +# supports qualified module names, as well as Unix pattern matching. +ignored-modules= + +# Show a hint with possible names when a member name was not found. The aspect +# of finding the hint is based on edit distance. +missing-member-hint=yes + +# The minimum edit distance a name should have in order to be considered a +# similar match for a missing member name. +missing-member-hint-distance=1 + +# The total number of similar names that should be taken in consideration when +# showing a hint for a missing member. +missing-member-max-choices=1 + + +[VARIABLES] + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid to define new builtins when possible. +additional-builtins= + +# Tells whether unused global variables should be treated as a violation. +allow-global-unused-variables=yes + +# List of strings which can identify a callback function by name. A callback +# name must start or end with one of those strings. +callbacks=cb_,_cb + +# A regular expression matching the name of dummy variables (i.e. expectedly +# not used). +dummy-variables-rgx=_+$|(_[a-z0-9_]*[a-z0-9]+?$)|dummy|^ignored_|^unused_ + +# Argument names that match this expression will be ignored. Default to name +# with leading underscore +ignored-argument-names=_.*|^ignored_|^unused_ + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# List of qualified module names which can have objects that can redefine +# builtins. +redefining-builtins-modules=six.moves,future.builtins + + +[CLASSES] + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__,__new__,setUp + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected=_asdict,_fields,_replace,_source,_make + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=mcs + + +[DESIGN] + +# Maximum number of arguments for function / method +max-args=5 + +# Maximum number of attributes for a class (see R0902). +max-attributes=8 + +# Maximum number of boolean expressions in a if statement +max-bool-expr=5 + +# Maximum number of branch for function / method body +max-branches=12 + +# Maximum number of locals for function / method body +max-locals=15 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + +# Maximum number of return / yield for function / method body +max-returns=6 + +# Maximum number of statements in function / method body +max-statements=50 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=0 + + +[IMPORTS] + +# Allow wildcard imports from modules that define __all__. +allow-wildcard-with-all=no + +# Analyse import fallback blocks. This can be used to support both Python 2 and +# 3 compatible code, which means that the block might have code that exists +# only in one or another interpreter, leading to false positives when analysed. +analyse-fallback-blocks=no + +# Deprecated modules which should not be used, separated by a comma +deprecated-modules=optparse,tkinter.tix + +# Create a graph of external dependencies in the given file (report RP0402 must +# not be disabled) +ext-import-graph= + +# Create a graph of every (i.e. internal and external) dependencies in the +# given file (report RP0402 must not be disabled) +import-graph= + +# Create a graph of internal dependencies in the given file (report RP0402 must +# not be disabled) +int-import-graph= + +# Force import order to recognize a module as part of the standard +# compatibility libraries. +known-standard-library= + +# Force import order to recognize a module as part of a third party library. +known-third-party=enchant + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when being caught. Defaults to +# "Exception" +overgeneral-exceptions=Exception diff --git a/catbuffer-parser/.travis.yml b/catbuffer-parser/.travis.yml new file mode 100644 index 00000000..244ba05a --- /dev/null +++ b/catbuffer-parser/.travis.yml @@ -0,0 +1,27 @@ +language: python + +python: + - 3.6 + +cache: pip + +install: + - pip install -r requirements.txt + +jobs: + include: + - stage: lint + script: + - isort --check-only --line-length 140 main.py catparser test + - pycodestyle --config=.pycodestyle -- main.py catparser test + - pylint --load-plugins pylint_quotes -- main.py catparser test + + - stage: test + name: "unit tests" + script: + - python3 -m unittest discover -v + + - stage: test + name: "generate all" + script: + - bash ./scripts/generate_all.sh diff --git a/catbuffer-parser/HEADER.inc b/catbuffer-parser/HEADER.inc new file mode 100644 index 00000000..243a6f9a --- /dev/null +++ b/catbuffer-parser/HEADER.inc @@ -0,0 +1,20 @@ +/** +*** Copyright (c) 2016-2019, Jaguar0625, gimre, BloodyRookie, Tech Bureau, Corp. +*** Copyright (c) 2020-present, Jaguar0625, gimre, BloodyRookie. +*** +*** This file is part of Catapult. +*** +*** Catapult is free software: you can redistribute it and/or modify +*** it under the terms of the GNU Lesser General Public License as published by +*** the Free Software Foundation, either version 3 of the License, or +*** (at your option) any later version. +*** +*** Catapult is distributed in the hope that it will be useful, +*** but WITHOUT ANY WARRANTY; without even the implied warranty of +*** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +*** GNU Lesser General Public License for more details. +*** +*** You should have received a copy of the GNU Lesser General Public License +*** along with Catapult. If not, see . +**/ + diff --git a/catbuffer-parser/LICENSE b/catbuffer-parser/LICENSE new file mode 100644 index 00000000..ab602974 --- /dev/null +++ b/catbuffer-parser/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2018 + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/catbuffer-parser/README.md b/catbuffer-parser/README.md new file mode 100644 index 00000000..06c2b02c --- /dev/null +++ b/catbuffer-parser/README.md @@ -0,0 +1,76 @@ +# catbuffer + +[![Build Status](https://api.travis-ci.com/nemtech/catbuffer.svg?branch=main)](https://travis-ci.com/nemtech/catbuffer) + +The catbuffer library defines the protocol to serialize and deserialize Symbol entities. Code generators from the [catbuffer-generators](https://github.com/nemtech/catbuffer-generators) project can then produce the leanest code necessary to serialize and deserialize those entities. + +Using catbuffer-generators, developers can generate builder classes for a given set of programming languages. For example, the [Symbol SDKs](https://nemtech.github.io/sdk) use the generated code to interact with the entities in binary form before announcing them to the network. + +The [schemas](schemas) folder contains definitions for each entity's data structure. These definitions are always kept up to date and in sync with the [catapult server](https://github.com/nemtech/catapult-server) code. + +## Requirements + +* Python >= 3.4 + +## Installation + +1. Clone the ``catbuffer`` repository: + +```bash +git clone https://github.com/nemtech/catbuffer +``` + +2. Install the package requirements: + +```bash +pip3 install -r requirements.txt +``` + +## Usage + +```bash +python3 main.py [OPTIONS] +``` + +| Option | Description | Default | +| -------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------- | +| -s, --schema TEXT | Input CATS file | | +| -o, --output TEXT | Output directory | _generated | +| -i, --include TEXT | Schema root directory | ./schemas | +| -g, --generator TEXT | Generator to use to produce output files (see the [available generators](https://github.com/nemtech/catbuffer-generators/blob/main/generators/All.py#L4)). | | +| -c, --copyright TEXT | File containing copyright data to use with output files. | ../HEADER.inc | + +## Examples + +In order to produce any output file, the [catbuffer-generators](https://github.com/nemtech/catbuffer-generators) project is needed. Please see this project's usage examples. + +However, ``catbuffer`` can still be used on its own to parse input files and check their validity: + +```bash +python3 main.py --schema schemas/transfer/transfer.cats +``` + +There is also a script in the ``scripts`` folder to parse and validate all schemas: + +```bash +scripts/generate_all.sh +``` + +> **NOTE:** +> These scripts require Bash 4 or higher. + +### Run the linter + +```bash +pylint --load-plugins pylint_quotes main.py catparser test +pycodestyle --config=.pycodestyle . +``` + +### Run the tests + +```bash +python3 -m unittest discover -v +``` + +Copyright (c) 2016-2019, Jaguar0625, gimre, BloodyRookie, Tech Bureau, Corp. +Copyright (c) 2020-present, Jaguar0625, gimre, BloodyRookie. diff --git a/catbuffer-parser/catparser/AliasParser.py b/catbuffer-parser/catparser/AliasParser.py new file mode 100644 index 00000000..e03e6fff --- /dev/null +++ b/catbuffer-parser/catparser/AliasParser.py @@ -0,0 +1,23 @@ +from .parserutils import parse_builtin, require_user_type_name +from .RegexParserFactory import RegexParserFactory + + +class AliasParser: + """Parser for `using` statements""" + def __init__(self, regex): + self.regex = regex + + def process_line(self, line): + match = self.regex.match(line) + + # aliases are only supported for builtin types + return ( + require_user_type_name(match.group(1)), + parse_builtin(match.group(2)) + ) + + +class AliasParserFactory(RegexParserFactory): + """Factory for creating alias parsers""" + def __init__(self): + super().__init__(r'using (\S+) = (\S+)', AliasParser) diff --git a/catbuffer-parser/catparser/CatsParseException.py b/catbuffer-parser/catparser/CatsParseException.py new file mode 100644 index 00000000..e255e043 --- /dev/null +++ b/catbuffer-parser/catparser/CatsParseException.py @@ -0,0 +1,2 @@ +class CatsParseException(Exception): + """Exception raised when a parse error is encountered""" diff --git a/catbuffer-parser/catparser/CatsParser.py b/catbuffer-parser/catparser/CatsParser.py new file mode 100644 index 00000000..f3bd563e --- /dev/null +++ b/catbuffer-parser/catparser/CatsParser.py @@ -0,0 +1,143 @@ +from collections import OrderedDict + +from .AliasParser import AliasParserFactory +from .CatsParseException import CatsParseException +from .CommentParser import CommentParser +from .EnumParser import EnumParserFactory +from .ImportParser import ImportParserFactory +from .ScopeManager import ScopeManager +from .StructParser import StructParserFactory + + +class CatsParser(ScopeManager): + """Parser used to parse CATS files line by line""" + def __init__(self, import_resolver): + super().__init__() + self.import_resolver = import_resolver + + self.aspect_parser = CommentParser() + self.type_parser_factories = [ + AliasParserFactory(), + EnumParserFactory(), + ImportParserFactory(), + StructParserFactory() + ] + + self.wip_type_descriptors = OrderedDict() + self.active_parser = None + + def process_line(self, line): + """Processes the next line of input""" + try: + self._process_line(line) + except Exception as ex: + raise CatsParseException('\n'.join(self.scope()), ex) from ex + + def commit(self): + """Completes processing of current type""" + self._close_type() + + def _process_line(self, line): + self.increment_line_number() + + # check if current line is a cross cutting concern + line_stripped = line.strip() + if self.aspect_parser.try_process_line(line_stripped): + return + + # something else, so attach current aspect state to it + partial_descriptor = self.aspect_parser.commit() + + # ignore blank lines + if not line_stripped: + return + + # close a type iff an unindented non-empty line is found + if self.active_parser and not line.startswith('\t'): + self._close_type() + + active_factories = self.type_parser_factories if not self.active_parser else self.active_parser.factories() + + try: + factory = next(factory for factory in active_factories if factory.is_match(line_stripped)) + except StopIteration as ex: + raise CatsParseException('none of the parsers matched the line "{0}"'.format(line_stripped)) from ex + + parser = factory.create() + parse_result = parser.process_line(line_stripped) + + # create a new scope if the current symbol is a composite + if not parse_result: + self.active_parser = parser + self.active_parser.partial_descriptor = partial_descriptor + return + + if self.active_parser: + if 'type' in parse_result: + self._require_known_type(parse_result['type']) + + # perform extra validation on some property links for better error detection/messages + if 'sort_key' in parse_result: + # sort key processing will only occur if linked field type already exists + self._require_type_with_field(parse_result['type'], parse_result['sort_key']) + + self.active_parser.append({**parse_result, **partial_descriptor}) + elif hasattr(parse_result, 'import_file'): + self.import_resolver(parse_result.import_file) + else: + self._set_type_descriptor(parse_result[0], {**parse_result[1], **partial_descriptor}) + + def _close_type(self): + if not self.active_parser: + return + + parsed_tuple = self.active_parser.commit() + + if 'layout' in parsed_tuple[1]: + new_type_layout = parsed_tuple[1]['layout'] + for property_type_descriptor in new_type_layout: + if 'condition' in property_type_descriptor: + # when condition is being post processed here, it is known that the linked condition field is part of + # the struct and the linked condition type already exists + + # look up condition type descriptor by searching active parser descriptor layout + condition_field_name = property_type_descriptor['condition'] + condition_type_descriptor = next( + descriptor for descriptor in new_type_layout + if 'name' in descriptor and descriptor['name'] == condition_field_name + ) + + self._require_enum_type_with_value(condition_type_descriptor['type'], property_type_descriptor['condition_value']) + + self._set_type_descriptor(parsed_tuple[0], {**parsed_tuple[1], **self.active_parser.partial_descriptor}) + self.active_parser = None + + def _require_known_type(self, type_name): + if type_name not in self.wip_type_descriptors and 'byte' != type_name: + raise CatsParseException('no definition for linked type "{0}"'.format(type_name)) + + return type_name + + def _require_type_with_field(self, type_name, field_name): + type_descriptor = self.wip_type_descriptors[type_name] + if not any(field_name == field['name'] for field in type_descriptor['layout']): + raise CatsParseException('"{0}" does not have field "{1}"'.format(type_name, field_name)) + + def _require_enum_type_with_value(self, type_name, value_name): + enum_type_descriptor = self.wip_type_descriptors[type_name] + if 'values' not in enum_type_descriptor: + raise CatsParseException('linked type "{0}" must be an enum type'.format(type_name)) + + if not any(value_name == value['name'] for value in enum_type_descriptor['values']): + raise CatsParseException('linked enum type "{0}" does not contain value "{1}"'.format(type_name, value_name)) + + def _set_type_descriptor(self, type_name, type_descriptor): + if type_name in self.wip_type_descriptors: + raise CatsParseException('duplicate definition for type "{0}"'.format(type_name)) + + self.wip_type_descriptors[type_name] = type_descriptor + + def type_descriptors(self): + """Returns all parsed type descriptors""" + self._close_type() + return self.wip_type_descriptors diff --git a/catbuffer-parser/catparser/CommentParser.py b/catbuffer-parser/catparser/CommentParser.py new file mode 100644 index 00000000..7880ee34 --- /dev/null +++ b/catbuffer-parser/catparser/CommentParser.py @@ -0,0 +1,18 @@ +class CommentParser: + """Reusable aspect parser for comment statements""" + def __init__(self): + self.comments = [] + + def try_process_line(self, line): + """Processes the current line if and only if it is a comment line""" + if line.startswith('#'): + self.comments.append(line[1:]) + return True + + return False + + def commit(self): + """Postprocesses and clears all captured comments""" + comments = ' '.join(comment.strip() for comment in self.comments) + self.comments.clear() + return {'comments': comments} diff --git a/catbuffer-parser/catparser/CompositeTypeParser.py b/catbuffer-parser/catparser/CompositeTypeParser.py new file mode 100644 index 00000000..ddd454aa --- /dev/null +++ b/catbuffer-parser/catparser/CompositeTypeParser.py @@ -0,0 +1,15 @@ +class CompositeTypeParser: + """Base for composite type parsers""" + def __init__(self, regex, factories): + self.regex = regex + self.sub_factories = factories + self.type_name = None + self.type_descriptor = None + + def factories(self): + """Gets sub-parsers for this composite type parser""" + return self.sub_factories + + def commit(self): + """Returns the composite type tuple""" + return (self.type_name, self.type_descriptor) diff --git a/catbuffer-parser/catparser/EnumParser.py b/catbuffer-parser/catparser/EnumParser.py new file mode 100644 index 00000000..96eb4a3f --- /dev/null +++ b/catbuffer-parser/catparser/EnumParser.py @@ -0,0 +1,54 @@ +from .CatsParseException import CatsParseException +from .CompositeTypeParser import CompositeTypeParser +from .parserutils import parse_builtin, parse_dec_or_hex, require_primitive, require_property_name, require_user_type_name +from .RegexParserFactory import RegexParserFactory + + +class EnumParser(CompositeTypeParser): + """Parser for `enum` statements""" + def __init__(self, regex): + super().__init__(regex, [EnumValueParserFactory()]) + + def process_line(self, line): + match = self.regex.match(line) + self.type_name = require_user_type_name(match.group(1)) + + base_type = require_primitive(match.group(2)) + builtin_type_descriptor = parse_builtin(base_type) + self.type_descriptor = { + 'type': 'enum', + 'size': builtin_type_descriptor['size'], + 'signedness': builtin_type_descriptor['signedness'], + 'values': [] + } + + def append(self, property_value_descriptor): + self._require_unknown_property(property_value_descriptor['name']) + + self.type_descriptor['values'].append(property_value_descriptor) + + def _require_unknown_property(self, property_name): + if any(property_name == property_type_descriptor['name'] for property_type_descriptor in self.type_descriptor['values']): + raise CatsParseException('duplicate definition for enum value "{0}"'.format(property_name)) + + +class EnumParserFactory(RegexParserFactory): + """Factory for creating enum parsers""" + def __init__(self): + super().__init__(r'enum (\S+) : (u?int\d+)', EnumParser) + + +class EnumValueParser: + """Parser for enum values""" + def __init__(self, regex): + self.regex = regex + + def process_line(self, line): + match = self.regex.match(line) + return {'name': require_property_name(match.group(1)), 'value': parse_dec_or_hex(match.group(2))} + + +class EnumValueParserFactory(RegexParserFactory): + """Factory for creating enum value parsers""" + def __init__(self): + super().__init__(r'(\S+) = (\S+)', EnumValueParser) diff --git a/catbuffer-parser/catparser/ImportParser.py b/catbuffer-parser/catparser/ImportParser.py new file mode 100644 index 00000000..c7593413 --- /dev/null +++ b/catbuffer-parser/catparser/ImportParser.py @@ -0,0 +1,26 @@ +from .RegexParserFactory import RegexParserFactory + + +class ImportResult: + """Information about an import statement""" + def __init__(self, import_file): + self.import_file = import_file + + def __eq__(self, rhs): + return isinstance(rhs, ImportResult) and self.import_file == rhs.import_file + + +class ImportParser: + """Parser for `import` statements""" + def __init__(self, regex): + self.regex = regex + + def process_line(self, line): + match = self.regex.match(line) + return ImportResult(match.group(1)) + + +class ImportParserFactory(RegexParserFactory): + """Factory for creating import parsers""" + def __init__(self): + super().__init__(r'import "([\S ]+)"', ImportParser) diff --git a/catbuffer-parser/catparser/RegexParserFactory.py b/catbuffer-parser/catparser/RegexParserFactory.py new file mode 100644 index 00000000..a550e148 --- /dev/null +++ b/catbuffer-parser/catparser/RegexParserFactory.py @@ -0,0 +1,16 @@ +import re + + +class RegexParserFactory: + """Base for top-level parser factories""" + def __init__(self, regex, parser_type): + self.regex = re.compile('^{0}$'.format(regex)) + self.parser_type = parser_type + + def is_match(self, line): + """Returns True if the line is a match for this factory's parser""" + return self.regex.match(line) + + def create(self): + """Creates a new parser""" + return self.parser_type(self.regex) diff --git a/catbuffer-parser/catparser/ScopeManager.py b/catbuffer-parser/catparser/ScopeManager.py new file mode 100644 index 00000000..d4145900 --- /dev/null +++ b/catbuffer-parser/catparser/ScopeManager.py @@ -0,0 +1,33 @@ +from .CatsParseException import CatsParseException + + +class Scope: + """Tuple composed of filename and line number""" + def __init__(self, name): + self.name = name + self.line_number = 0 + + +class ScopeManager: + """Manages the current scope composed of filename and line number""" + def __init__(self): + self.scopes = [Scope('')] + + def push_scope(self, name): + """Pushes the input scope""" + self.scopes.append(Scope(name)) + + def scope(self): + """Gets the current location""" + return ['{0}:{1}'.format(scope.name, scope.line_number) for scope in self.scopes][::-1] + + def pop_scope(self): + """Pops the input scope""" + if 1 == len(self.scopes): + raise CatsParseException('CatsParser cannot pop default scope') + + self.scopes.pop() + + def increment_line_number(self): + """Increments the line number for the current scope""" + self.scopes[-1].line_number += 1 diff --git a/catbuffer-parser/catparser/StructParser.py b/catbuffer-parser/catparser/StructParser.py new file mode 100644 index 00000000..b5bc7d00 --- /dev/null +++ b/catbuffer-parser/catparser/StructParser.py @@ -0,0 +1,208 @@ +from .CatsParseException import CatsParseException +from .CompositeTypeParser import CompositeTypeParser +from .parserutils import (is_builtin, is_dec_or_hex, is_primitive, parse_builtin, parse_dec_or_hex, require_property_name, + require_user_type_name) +from .RegexParserFactory import RegexParserFactory + +# region StructParser(Factory) + + +class StructParser(CompositeTypeParser): + """Parser for `struct` statements""" + def __init__(self, regex): + super().__init__(regex, [ + StructConstParserFactory(), + StructInlineParserFactory(), + StructScalarMemberParserFactory(), + StructArrayMemberParserFactory() + ]) + + def process_line(self, line): + match = self.regex.match(line) + self.type_name = require_user_type_name(match.group(1)) + self.type_descriptor = {'type': 'struct', 'layout': []} + + def append(self, property_type_descriptor): + self._require_no_array_with_fill_disposition() + + if 'size' in property_type_descriptor: + self._require_known_property(property_type_descriptor['size']) + + descriptor_uid = self._get_descriptor_uid(property_type_descriptor) + if descriptor_uid[0]: + self._require_unknown_property(descriptor_uid) + + self.type_descriptor['layout'].append(property_type_descriptor) + + def commit(self): + for property_type_descriptor in self.type_descriptor['layout']: + if 'condition' in property_type_descriptor: + self._require_known_property(property_type_descriptor['condition'], False) + + return super().commit() + + def _require_no_array_with_fill_disposition(self): + layout = self.type_descriptor['layout'] + if not layout: + return + + last_property = layout[-1] + if 'fill' == last_property.get('disposition'): + raise CatsParseException('array property with fill disposition "{0}" must be last property'.format(last_property['name'])) + + def _require_known_property(self, property_name, allow_numeric=True): + # size can be a constant represented by a numeric type + if allow_numeric and not isinstance(property_name, str): + return + + if all('name' not in property_type_descriptor or property_name != property_type_descriptor['name'] + for property_type_descriptor in self.type_descriptor['layout']): + raise CatsParseException('no definition for referenced property "{0}"'.format(property_name)) + + def _require_unknown_property(self, descriptor_uid): + if any(descriptor_uid == self._get_descriptor_uid(property_type_descriptor) + for property_type_descriptor in self.type_descriptor['layout']): + raise CatsParseException('duplicate definition for property "{0}"'.format(descriptor_uid)) + + @staticmethod + def _get_descriptor_uid(descriptor): + return (descriptor.get('name'), descriptor.get('disposition')) + + +class StructParserFactory(RegexParserFactory): + """Factory for creating struct parsers""" + def __init__(self): + super().__init__(r'struct (\S+)', StructParser) + +# endregion + +# region StructConstParser(Factory) + + +class StructConstParser: + """Parser for const struct members""" + def __init__(self, regex): + self.regex = regex + + def process_line(self, line): + match = self.regex.match(line) + type_name = match.group(1) + + const_descriptor = { + 'name': require_property_name(match.group(2)), + 'disposition': 'const', + 'value': parse_dec_or_hex(match.group(3)) + } + + if is_primitive(type_name): + const_descriptor = {**const_descriptor, **parse_builtin(type_name)} + else: + const_descriptor['type'] = require_user_type_name(type_name) + + return const_descriptor + + +class StructConstParserFactory(RegexParserFactory): + """Factory for creating struct const parsers""" + def __init__(self): + super().__init__(r'const (\S+) (\S+) = (\S+)', StructConstParser) + + +# endregion + +# region StructInlineParser(Factory) + +class StructInlineParser: + """Parser for inline struct members""" + def __init__(self, regex): + self.regex = regex + + def process_line(self, line): + # type is resolved to exist upstream, so its naming doesn't need to be checked here + match = self.regex.match(line) + return {'type': match.group(1), 'disposition': 'inline'} + + +class StructInlineParserFactory(RegexParserFactory): + """Factory for creating struct inline parsers""" + def __init__(self): + super().__init__(r'inline (\S+)', StructInlineParser) + +# endregion + +# region StructScalarMemberParser(Factory) + + +class StructScalarMemberParser: + """Parser for non-inline scalar struct members""" + def __init__(self, regex): + self.regex = regex + + def process_line(self, line): + match = self.regex.match(line) + linked_type_name = match.group(2) + + # type is resolved to exist upstream, so its naming doesn't need to be checked here + if is_builtin(linked_type_name): + property_type_descriptor = parse_builtin(linked_type_name) # reduce builtins to byte + else: + property_type_descriptor = {'type': linked_type_name} + + if match.group(3): + property_type_descriptor['condition'] = match.group(4) + property_type_descriptor['condition_operation'] = match.group(5) + property_type_descriptor['condition_value'] = match.group(6) + + property_type_descriptor['name'] = require_property_name(match.group(1)) + return property_type_descriptor + + +class StructScalarMemberParserFactory(RegexParserFactory): + """Factory for creating struct scalar member parsers""" + def __init__(self): + super().__init__(r'(\S+) = (\S+)( if (\S+) (equals|has) (\S+))?', StructScalarMemberParser) + +# endregion + +# region StructArrayMemberParser(Factory) + + +class StructArrayMemberParser: + """Parser for non-inline array struct members""" + def __init__(self, regex): + self.regex = regex + + def process_line(self, line): + match = self.regex.match(line) + + # type is resolved to exist upstream, so its naming doesn't need to be checked here + property_type_descriptor = {'type': match.group(2)} + + # size can be interpreted in different ways for count-based arrays + # size must be a field reference for size-based arrays + array_size = match.group(4) + if not match.group(3): + if is_dec_or_hex(array_size): + array_size = parse_dec_or_hex(array_size) + + if '__FILL__' == array_size: + array_size = 0 + property_type_descriptor['disposition'] = 'fill' + else: + property_type_descriptor['disposition'] = 'var' + + property_type_descriptor['size'] = array_size + + if match.group(5): + property_type_descriptor['sort_key'] = match.group(6) + + property_type_descriptor['name'] = require_property_name(match.group(1)) + return property_type_descriptor + + +class StructArrayMemberParserFactory(RegexParserFactory): + """Factory for creating struct array member parsers""" + def __init__(self): + super().__init__(r'(\S+) = array\((\S+), (size=)?(\S+)(, sort_key=(\S+))?\)', StructArrayMemberParser) + +# endregion diff --git a/catbuffer-parser/catparser/__init__.py b/catbuffer-parser/catparser/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/catbuffer-parser/catparser/parserutils.py b/catbuffer-parser/catparser/parserutils.py new file mode 100644 index 00000000..39b619dc --- /dev/null +++ b/catbuffer-parser/catparser/parserutils.py @@ -0,0 +1,73 @@ +import re + +from .CatsParseException import CatsParseException + +REGEXES = { + 'user_type_name': re.compile(r'^[A-Z][a-zA-Z0-9]*$'), + 'property_name': re.compile(r'^[a-z][a-zA-Z0-9_]*$'), + + 'int_or_uint': re.compile(r'^(u)?int(8|16|32|64)$'), + 'binary_fixed_type': re.compile(r'^binary_fixed\((0x[0-9A-F]+|[0-9]+)\)$'), + 'dec_or_hex': re.compile(r'^(0x[0-9A-F]+|[0-9]+)$'), +} + + +def _match_regex_or_throw(regex_key, line): + match = REGEXES[regex_key].match(line) + if not match: + raise CatsParseException('unable to parse "{0}": {1}'.format(regex_key, line)) + + return match + + +def require_user_type_name(type_name): + """Raises an exception if the specified name is not a valid user type name""" + _match_regex_or_throw('user_type_name', type_name) + return type_name + + +def require_property_name(type_name): + """Raises an exception if the specified name is not a valid property name""" + _match_regex_or_throw('property_name', type_name) + return type_name + + +def is_primitive(type_name): + """Returns true if the specified name is a valid primitive name""" + return REGEXES['int_or_uint'].match(type_name) + + +def require_primitive(type_name): + """Raises an exception if the specified name is not a valid primitive name""" + _match_regex_or_throw('int_or_uint', type_name) + return type_name + + +def is_dec_or_hex(string): + """Returns true if the specified string is a valid decimal or hexidecimal number""" + return REGEXES['dec_or_hex'].match(string) + + +def parse_dec_or_hex(string): + """Parses a string as either decimal or hexidecimal""" + base = 16 if string.startswith('0x') else 10 + return int(string, base) + + +def is_builtin(type_name): + return REGEXES['int_or_uint'].match(type_name) or REGEXES['binary_fixed_type'].match(type_name) + + +def parse_builtin(type_name): + """Parses a builtin type, either binary_fixed or a uint alias""" + is_unsigned = True + binary_fixed_type_match = REGEXES['binary_fixed_type'].match(type_name) + if binary_fixed_type_match: + type_descriptor = {'size': parse_dec_or_hex(binary_fixed_type_match.group(1))} + else: + match = _match_regex_or_throw('int_or_uint', type_name) + is_unsigned = bool(match.group(1)) + uint_byte_count = int(match.group(2)) // 8 + type_descriptor = {'size': uint_byte_count} + + return {**type_descriptor, 'type': 'byte', 'signedness': 'unsigned' if is_unsigned else 'signed'} diff --git a/catbuffer-parser/lint.sh b/catbuffer-parser/lint.sh new file mode 100644 index 00000000..41af054c --- /dev/null +++ b/catbuffer-parser/lint.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +find . -type f -name "*.sh" -print0 | xargs -0 shellcheck +find . -type f -name "*.py" -print0 | PYTHONPATH=. xargs -0 "$(pyenv which isort)" --check-only --line-length 140 +find . -type f -name "*.py" -print0 | PYTHONPATH=. xargs -0 "$(pyenv which pycodestyle)" --config=.pycodestyle +find . -type f -name "*.py" -print0 | PYTHONPATH=. xargs -0 "$(pyenv which pylint)" --load-plugins pylint_quotes diff --git a/catbuffer-parser/main.py b/catbuffer-parser/main.py new file mode 100644 index 00000000..24252cda --- /dev/null +++ b/catbuffer-parser/main.py @@ -0,0 +1,89 @@ +import argparse +import os +import pprint + +from catparser.CatsParser import CatsParser + +try: + from generators.All import AVAILABLE_GENERATORS +except ImportError: + AVAILABLE_GENERATORS = {} + + +class MultiFileParser: + """CATS parser that resolves imports in global namespace""" + def __init__(self): + self.cats_parser = CatsParser(self._process_import_file) + self.dirname = None + self.imported_filenames = [] + + def set_include_path(self, include_path): + self.dirname = include_path + + def parse(self, schema_filename): + self._process_file(schema_filename) + + def _process_import_file(self, filename): + if filename in self.imported_filenames: + return + + self.imported_filenames.append(filename) + filename = os.path.join(self.dirname, filename) + self._process_file(filename) + + def _process_file(self, filename): + self.cats_parser.push_scope(filename) + + with open(filename) as input_file: + lines = input_file.readlines() + for line in lines: + self.cats_parser.process_line(line) + + self.cats_parser.commit() + self.cats_parser.pop_scope() + + +def _generate_output(generator_name, output_path, schema, options): + generator_class = AVAILABLE_GENERATORS[generator_name] + os.makedirs(output_path, exist_ok=True) + generator = generator_class(schema, options) + for generated_descriptor in generator: + output_filename = os.path.join(output_path, generated_descriptor.filename) + with open(output_filename, 'w', newline='\n') as output_file: + for line in generated_descriptor.code: + output_file.write('%s\n' % line) + + +def main(): + parser = argparse.ArgumentParser(description='CATS code generator') + parser.add_argument('-s', '--schema', help='input CATS file', required=True) + parser.add_argument('-o', '--output', help='output directory, if not provided, _generated/{generator} is used') + parser.add_argument('-i', '--include', help='schema root directory', default='./schemas') + + generators_list = list(AVAILABLE_GENERATORS.keys()) + parser.add_argument('-g', '--generator', help='generator to use to produce output files', choices=generators_list) + parser.add_argument('-c', '--copyright', help='file containing copyright data to use with output files', default='../HEADER.inc') + args = parser.parse_args() + + file_parser = MultiFileParser() + file_parser.set_include_path(args.include) + file_parser.parse(args.schema) + + # console output the parsed schema + printer = pprint.PrettyPrinter(width=140) + printer.pprint('*** *** ***') + type_descriptors = file_parser.cats_parser.type_descriptors() + for key in type_descriptors: + printer.pprint((key, type_descriptors[key])) + + # generate and output code + if args.generator: + output_path = args.output + if output_path is None: + output_path = os.path.join('_generated', args.generator) + + _generate_output(args.generator, output_path, type_descriptors, {'copyright': args.copyright}) + + +if '__main__' == __name__: + main() diff --git a/catbuffer-parser/requirements.txt b/catbuffer-parser/requirements.txt new file mode 100644 index 00000000..e71e27bc --- /dev/null +++ b/catbuffer-parser/requirements.txt @@ -0,0 +1,4 @@ +isort==5.7.0 +pycodestyle==2.6.0 +pylint==2.7.2 +pylint-quotes==0.2.1 diff --git a/catbuffer-parser/scripts/generate_all.sh b/catbuffer-parser/scripts/generate_all.sh new file mode 100644 index 00000000..245ba270 --- /dev/null +++ b/catbuffer-parser/scripts/generate_all.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +# shellcheck source=. +# shellcheck disable=SC1091 +source "$(dirname "$0")/schema_lists.sh" +source "$(dirname "$0")/generate_batch.sh" + +generate_batch block_inputs "." +generate_batch finalization_inputs "." +generate_batch receipt_inputs "." +generate_batch state_inputs "." +generate_batch transaction_inputs "." diff --git a/catbuffer-parser/scripts/generate_batch.sh b/catbuffer-parser/scripts/generate_batch.sh new file mode 100644 index 00000000..561169f4 --- /dev/null +++ b/catbuffer-parser/scripts/generate_batch.sh @@ -0,0 +1,34 @@ +#!/bin/bash + +function generate_batch { + local start_success_color="\033[1;34m" + local start_error_color="\033[1;31m" + local end_color="\033[0m" + + local -n inputs=$1 + local folder="$2" + local counter=0 + for input in "${inputs[@]}"; + do + echo "generating ${input}" + python_args=( + "${folder}/main.py" + --schema "${folder}/schemas/${input}.cats" + --include "${folder}/schemas") + if [ "$#" -ge 3 ]; then + python_args+=( + --output "${folder}/_generated/$3" + --generator "$3" + --copyright "${folder}/HEADER.inc") + fi + + if ! python3 "${python_args[@]}"; then + echo -e "${start_error_color}ERROR: failed generating ${input}${end_color}" + exit 1 + fi + + ((++counter)) + done + + echo -e "${start_success_color}SUCCESS: generation complete with no errors [${counter} files processed]${end_color}" +} diff --git a/catbuffer-parser/scripts/schema_lists.sh b/catbuffer-parser/scripts/schema_lists.sh new file mode 100644 index 00000000..699f9c60 --- /dev/null +++ b/catbuffer-parser/scripts/schema_lists.sh @@ -0,0 +1,50 @@ +#!/bin/bash + +export block_inputs=( + "block") + +export finalization_inputs=( + "finalization/finalization_round" + "finalization/finalized_block_header") + +export receipt_inputs=( + "receipts" + "namespace/namespace_receipts" + "resolution_statement/resolution_statements") + +export state_inputs=( + "state/account_state" + "state/hash_lock" + "state/lock_info" + "state/metadata_entry" + "state/mosaic_entry" + "state/multisig_entry" + "state/namespace_history" + "state/restriction_account" + "state/restriction_mosaic_entry" + "state/secret_lock") + +export transaction_inputs=( + "account_link/account_key_link" + "account_link/node_key_link" + "aggregate/aggregate" + "coresystem/voting_key_link" + "coresystem/vrf_key_link" + "lock_hash/hash_lock" + "lock_secret/secret_lock" + "lock_secret/secret_proof" + "metadata/account_metadata" + "metadata/mosaic_metadata" + "metadata/namespace_metadata" + "mosaic/mosaic_definition" + "mosaic/mosaic_supply_change" + "multisig/multisig_account_modification" + "namespace/address_alias" + "namespace/mosaic_alias" + "namespace/namespace_registration" + "restriction_account/account_address_restriction" + "restriction_account/account_mosaic_restriction" + "restriction_account/account_operation_restriction" + "restriction_mosaic/mosaic_address_restriction" + "restriction_mosaic/mosaic_global_restriction" + "transfer/transfer") diff --git a/catbuffer-parser/test/ParserTestUtils.py b/catbuffer-parser/test/ParserTestUtils.py new file mode 100644 index 00000000..41a6a95f --- /dev/null +++ b/catbuffer-parser/test/ParserTestUtils.py @@ -0,0 +1,78 @@ +from catparser.CatsParseException import CatsParseException + + +class SingleLineParserTestUtils: + def __init__(self, parser_factory_type, unittest): + self.parser_factory_type = parser_factory_type + self.unittest = unittest + + def assert_parse(self, line, expected_result): + # Arrange: + parser = self.parser_factory_type().create() + + # Act: + result = parser.process_line(line) + + # Assert: + self.unittest.assertEqual(expected_result, result) + + def assert_parse_exception(self, line, exception_type=CatsParseException): + # Arrange: + parser = self.parser_factory_type().create() + + # Sanity: + self.unittest.assertTrue(self.parser_factory_type().is_match(line)) + + # Act + Assert: + with self.unittest.assertRaises(exception_type): + parser.process_line(line) + + def assert_parse_exceptions(self, invalid_lines): + # Arrange: + for line in invalid_lines: + # Act + Assert: + self.assert_parse_exception(line) + + def assert_naming(self, pattern, valid_names, invalid_names): + for name in valid_names: + # Act + Assert: no exception + self.parser_factory_type().create().process_line(pattern.format(name)) + + for name in invalid_names: + # Act + Assert: exception + self.assert_parse_exception(pattern.format(name)) + + +class MultiLineParserTestUtils(SingleLineParserTestUtils): + def assert_parse(self, line, expected_result): + # Arrange: + parser = self.parser_factory_type().create() + + # Act: + parser.process_line(line) + result = parser.commit() + + # Assert: + self.unittest.assertEqual(expected_result, result) + + +class ParserFactoryTestUtils: + def __init__(self, parser_factory_type, unittest): + self.parser_factory_type = parser_factory_type + self.unittest = unittest + + def assert_positives(self, matches): + # Arrange: + factory = self.parser_factory_type() + + # Act + Assert: + for line in matches: + self.unittest.assertTrue(factory.is_match(line)) + + def assert_negatives(self, matches): + # Arrange: + factory = self.parser_factory_type() + + # Act + Assert: + for line in matches: + self.unittest.assertFalse(factory.is_match(line)) diff --git a/catbuffer-parser/test/__init__.py b/catbuffer-parser/test/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/catbuffer-parser/test/constants.py b/catbuffer-parser/test/constants.py new file mode 100644 index 00000000..2473a64e --- /dev/null +++ b/catbuffer-parser/test/constants.py @@ -0,0 +1,15 @@ +VALID_USER_TYPE_NAMES = ['FooBar', 'Foo', 'FooBarBaz', 'Foo123', 'Foo123BAZ'] +INVALID_USER_TYPE_NAMES = ['fooBar', 'foo_bar', 'Foo_Bar', '_fooBar', 'm_fooBar', 'FOO_BAR'] + +VALID_PROPERTY_NAMES = ['fooBar', 'foo', 'fooBarBaz', 'foo123', 'foo123BAZ', 'foo_Bar', 'm_fooBar'] +INVALID_PROPERTY_NAMES = ['FooBar', 'Foo_bar', '_fooBar', 'FOO_BAR'] + +VALID_PRIMITIVE_NAMES = ['int8', 'int16', 'int32', 'int64', 'uint8', 'uint16', 'uint32', 'uint64'] +INVALID_PRIMITIVE_NAMES = ['sint8', 'vint32', ' uint8', 'uint16 ', 'uint 32', 'uint63', 'foo'] + +INT_TYPE_TUPLES = [('int8', 1, 'signed'), ('int16', 2, 'signed'), ('int32', 4, 'signed'), ('int64', 8, 'signed')] +UINT_TYPE_TUPLES = [('uint8', 1, 'unsigned'), ('uint16', 2, 'unsigned'), ('uint32', 4, 'unsigned'), ('uint64', 8, 'unsigned')] +PRIMITIVE_TYPE_TUPLES = INT_TYPE_TUPLES + UINT_TYPE_TUPLES +BUILTIN_TYPE_TUPLES = PRIMITIVE_TYPE_TUPLES + [ + ('binary_fixed(32)', 32, 'unsigned'), ('binary_fixed(0x20)', 32, 'unsigned'), ('binary_fixed(25)', 25, 'unsigned') +] diff --git a/catbuffer-parser/test/test_AliasParser.py b/catbuffer-parser/test/test_AliasParser.py new file mode 100644 index 00000000..f2ffa7b6 --- /dev/null +++ b/catbuffer-parser/test/test_AliasParser.py @@ -0,0 +1,44 @@ +import unittest +from test.constants import BUILTIN_TYPE_TUPLES, INVALID_USER_TYPE_NAMES, VALID_USER_TYPE_NAMES +from test.ParserTestUtils import ParserFactoryTestUtils, SingleLineParserTestUtils + +from catparser.AliasParser import AliasParserFactory + + +class AliasParserFactoryTest(unittest.TestCase): + def test_is_match_returns_true_for_positives(self): + # Assert: + ParserFactoryTestUtils(AliasParserFactory, self).assert_positives([ + 'using A = foo', 'using ^ = $$$', 'using A90zaZa = te$t' + ]) + + def test_is_match_returns_false_for_negatives(self): + # Assert: + ParserFactoryTestUtils(AliasParserFactory, self).assert_negatives([ + ' using A = foo', 'using A = foo ', 'import A = foo', 'using A = foo bar', 'using A B = foo bar' + ]) + + +class AliasParserTest(unittest.TestCase): + def test_can_parse_builtin_as_alias(self): + for builtin_tuple in BUILTIN_TYPE_TUPLES: + # Act + Assert: + SingleLineParserTestUtils(AliasParserFactory, self).assert_parse( + 'using Age = {0}'.format(builtin_tuple[0]), + ('Age', {'type': 'byte', 'signedness': builtin_tuple[2], 'size': builtin_tuple[1]})) + + def test_alias_names_must_have_type_name_semantics(self): + # Assert: + SingleLineParserTestUtils(AliasParserFactory, self).assert_naming( + 'using {0} = uint32', + VALID_USER_TYPE_NAMES, + INVALID_USER_TYPE_NAMES) + + def test_cannot_parse_invalid_alias(self): + # Arrange: + SingleLineParserTestUtils(AliasParserFactory, self).assert_parse_exceptions([ + 'using Hash256 = binary_fixed(2x22)', # malformed number + 'using Hash256 = binary_fixed(x)', # malformed number + 'using Age = uint33', # unknown type + 'using Age = FooBar' # user type + ]) diff --git a/catbuffer-parser/test/test_CatsParser.py b/catbuffer-parser/test/test_CatsParser.py new file mode 100644 index 00000000..b7276126 --- /dev/null +++ b/catbuffer-parser/test/test_CatsParser.py @@ -0,0 +1,645 @@ +import unittest + +from catparser.CatsParser import CatsParseException, CatsParser + + +def parse_all(lines, imports=None): + # Act: + if imports is None: + imports = [] + + parser = CatsParser(imports.append) + for line in lines: + parser.process_line(line) + + return parser.type_descriptors() + + +def uint_descriptor(size, explicit_type=True): + descriptor = {'signedness': 'unsigned', 'size': size} + if explicit_type: + descriptor['type'] = 'byte' + + return descriptor + + +def int_descriptor(size, explicit_type=True): + descriptor = uint_descriptor(size, explicit_type) + descriptor['signedness'] = 'signed' + return descriptor + + +class CatsParserTests(unittest.TestCase): + # pylint: disable=too-many-public-methods + + # region utils + + def _assert_parse_delayed_exception(self, lines): + # Arrange: + imports = [] + parser = CatsParser(imports.append) + for line in lines[:-1]: + parser.process_line(line) + + # Act + Assert: + with self.assertRaises(CatsParseException): + parser.process_line(lines[-1]) + + def _assert_parse_commit_exception(self, lines): + # Arrange: + imports = [] + parser = CatsParser(imports.append) + for line in lines: + parser.process_line(line) + + # Act + Assert: + with self.assertRaises(CatsParseException): + parser.commit() + + # endregion + + # region empty + basic + + def test_no_types_are_exposed_initially(self): + # Act: + parser = CatsParser(None) + + # Assert: + self.assertEqual(0, len(parser.type_descriptors())) + + def test_no_types_are_extracted_from_blank_lines(self): + # Act: + type_descriptors = parse_all([ + '', + '', + '' + ]) + + # Assert: + self.assertEqual(0, len(type_descriptors)) + + def test_parse_is_aborted_on_unknown_line(self): + # Act + Assert: + self._assert_parse_delayed_exception([ + 'using Foo = uint8', + 'alias Bar = uint8' + ]) + + # endregion + + # region comments + + def test_unattached_comments_are_ignored(self): + # Act: + type_descriptors = parse_all([ + '# comment alias', + '# another comment', + '', # should clear previous comments + 'using MosaicId = uint64' + ]) + + # Assert: + self.assertEqual(1, len(type_descriptors)) + self.assertEqual(type_descriptors['MosaicId'], {**uint_descriptor(8), 'comments': ''}) + + def test_previously_attached_comments_are_ignored(self): + # Act: + type_descriptors = parse_all([ + '# comment one', + 'using Age = uint64', + '# comment two', + 'using Year = uint16' + ]) + + # Assert: + self.assertEqual(2, len(type_descriptors)) + self.assertEqual(type_descriptors['Age'], {**uint_descriptor(8), 'comments': 'comment one'}) + self.assertEqual(type_descriptors['Year'], {**uint_descriptor(2), 'comments': 'comment two'}) + + # endregion + + # region imports + + def test_can_parse_valid_import(self): + # Act: + imports = [] + type_descriptors = parse_all(['import "foo.cats"'], imports) + + # Assert: + self.assertEqual(0, len(type_descriptors)) + self.assertEqual(['foo.cats'], imports) + + # endregion + + # region alias + + def test_can_parse_alias(self): + # Act: + type_descriptors = parse_all([ + 'using MosaicId = uint64', + '# unique account identifier', + 'using Address = binary_fixed(25)' + ]) + + # Assert: + self.assertEqual(2, len(type_descriptors)) + self.assertEqual(type_descriptors['MosaicId'], {**uint_descriptor(8), 'comments': ''}) + self.assertEqual(type_descriptors['Address'], {**uint_descriptor(25), 'comments': 'unique account identifier'}) + + # endregion + + # region struct + + def test_can_parse_struct_builtin_types(self): + # Act: + type_descriptors = parse_all([ + '# binary layout for a pair', + 'struct Pair', + '\t# some field comment', + '\tfooBar = uint64', + '\tbaz = binary_fixed(25)' + ]) + + # Assert: + self.assertEqual(1, len(type_descriptors)) + self.assertEqual(type_descriptors['Pair'], {'type': 'struct', 'comments': 'binary layout for a pair', 'layout': [ + {'name': 'fooBar', **uint_descriptor(8), 'comments': 'some field comment'}, + {'name': 'baz', **uint_descriptor(25), 'comments': ''} + ]}) + + def test_can_parse_struct_custom_types(self): + # Act: + type_descriptors = parse_all([ + 'using MosaicId = uint16', + 'using Amount = uint16', + '# binary layout for a mosaic', + 'struct Mosaic', + '\t# mosaic identifier', + '\tmosaicId = MosaicId', + '\tamount = Amount' + ]) + + # Assert: + self.assertEqual(3, len(type_descriptors)) + self.assertEqual(type_descriptors['Mosaic'], {'type': 'struct', 'comments': 'binary layout for a mosaic', 'layout': [ + {'name': 'mosaicId', 'type': 'MosaicId', 'comments': 'mosaic identifier'}, + {'name': 'amount', 'type': 'Amount', 'comments': ''} + ]}) + + def test_can_parse_struct_conditional_types(self): + # Act: + type_descriptors = parse_all([ + 'enum Shape : uint8', + '\tcircle = 4', + '\trectangle = 9', + 'using Circ = uint16', + 'using Perm = uint16', + 'struct Enclosing', + '\tdiscriminator = Shape', + '\t# u part 1', + '\tcircumference = Circ if discriminator has circle', + '\t# union pt 2', + '\tperimiter = Perm if discriminator equals rectangle' + ]) + + # Assert: + self.assertEqual(4, len(type_descriptors)) + self.assertEqual(type_descriptors['Enclosing'], {'type': 'struct', 'comments': '', 'layout': [ + {'name': 'discriminator', 'type': 'Shape', 'comments': ''}, + { + 'name': 'circumference', 'type': 'Circ', + 'condition': 'discriminator', 'condition_value': 'circle', 'condition_operation': 'has', + 'comments': 'u part 1' + }, + { + 'name': 'perimiter', 'type': 'Perm', + 'condition': 'discriminator', 'condition_value': 'rectangle', 'condition_operation': 'equals', + 'comments': 'union pt 2' + } + ]}) + + def test_can_parse_struct_conditional_types_with_inline_member(self): + # Act: + type_descriptors = parse_all([ + 'enum Shape : uint8', + '\tcircle = 4', + '\trectangle = 9', + 'using Circ = uint16', + 'using Perm = uint16', + 'struct Version', + '\tversion = uint16', + 'struct Enclosing', + '\tinline Version', + '\tdiscriminator = Shape', + '\t# u part 1', + '\tcircumference = Circ if discriminator has circle', + '\t# union pt 2', + '\tperimiter = Perm if discriminator equals rectangle' + ]) + + # Assert: + self.assertEqual(5, len(type_descriptors)) + self.assertEqual(type_descriptors['Enclosing'], {'type': 'struct', 'comments': '', 'layout': [ + {'type': 'Version', 'disposition': 'inline', 'comments': ''}, + {'name': 'discriminator', 'type': 'Shape', 'comments': ''}, + { + 'name': 'circumference', 'type': 'Circ', + 'condition': 'discriminator', 'condition_value': 'circle', 'condition_operation': 'has', + 'comments': 'u part 1' + }, + { + 'name': 'perimiter', 'type': 'Perm', + 'condition': 'discriminator', 'condition_value': 'rectangle', 'condition_operation': 'equals', + 'comments': 'union pt 2' + } + ]}) + + def test_can_parse_struct_conditional_types_trailing_discriminator(self): + # Act: + type_descriptors = parse_all([ + 'enum Shape : uint8', + '\tcircle = 4', + '\trectangle = 9', + 'using Circ = uint16', + 'using Perm = uint16', + 'struct Enclosing', + '\t# u part 1', + '\tcircumference = Circ if discriminator has circle', + '\t# union pt 2', + '\tperimiter = Perm if discriminator equals rectangle', + '\tdiscriminator = Shape' + ]) + + # Assert: + self.assertEqual(4, len(type_descriptors)) + self.assertEqual(type_descriptors['Enclosing'], {'type': 'struct', 'comments': '', 'layout': [ + { + 'name': 'circumference', 'type': 'Circ', + 'condition': 'discriminator', 'condition_value': 'circle', 'condition_operation': 'has', + 'comments': 'u part 1' + }, + { + 'name': 'perimiter', 'type': 'Perm', + 'condition': 'discriminator', 'condition_value': 'rectangle', 'condition_operation': 'equals', + 'comments': 'union pt 2' + }, + {'name': 'discriminator', 'type': 'Shape', 'comments': ''} + ]}) + + def test_can_parse_struct_array_types(self): + # Act: + type_descriptors = parse_all([ + 'using Truck = uint16', + 'using Car = uint16', + 'struct Fleet', + '\tcarCount = uint8', + '# all trucks in the fleet', + '\ttrucks = array(Truck, 10)', + '\tcars = array(Car, carCount)' + ]) + + # Assert: + self.assertEqual(3, len(type_descriptors)) + self.assertEqual(type_descriptors['Fleet'], {'type': 'struct', 'comments': '', 'layout': [ + {'name': 'carCount', **uint_descriptor(1), 'comments': ''}, + {'name': 'trucks', 'type': 'Truck', 'size': 10, 'comments': 'all trucks in the fleet'}, + {'name': 'cars', 'type': 'Car', 'size': 'carCount', 'comments': ''} + ]}) + + def test_can_parse_struct_sorted_array_types(self): + # Act: + type_descriptors = parse_all([ + 'struct Face', + '\teyeColor = uint8', + 'struct Tracking', + '\tfaces = array(Face, 10, sort_key=eyeColor)' + ]) + + # Assert: + self.assertEqual(2, len(type_descriptors)) + self.assertEqual(type_descriptors['Tracking'], {'type': 'struct', 'comments': '', 'layout': [ + {'name': 'faces', 'type': 'Face', 'size': 10, 'sort_key': 'eyeColor', 'comments': ''} + ]}) + + def test_can_parse_struct_vararray_types(self): + # Act: + type_descriptors = parse_all([ + 'using Car = uint16', + 'struct Fleet', + '\tcarsSize = uint8', + '# all cars in the fleet', + '\tcars = array(Car, size=carsSize)' + ]) + + # Assert: + self.assertEqual(2, len(type_descriptors)) + self.assertEqual(type_descriptors['Fleet'], {'type': 'struct', 'comments': '', 'layout': [ + {'name': 'carsSize', **uint_descriptor(1), 'comments': ''}, + {'name': 'cars', 'type': 'Car', 'size': 'carsSize', 'disposition': 'var', 'comments': 'all cars in the fleet'} + ]}) + + def test_can_parse_struct_array_fill_types(self): + # Act: + type_descriptors = parse_all([ + 'using Car = uint16', + 'struct Fleet', + '# all cars in the fleet', + '\tcars = array(Car, __FILL__)' + ]) + + # Assert: + self.assertEqual(2, len(type_descriptors)) + self.assertEqual(type_descriptors['Fleet'], {'type': 'struct', 'comments': '', 'layout': [ + {'name': 'cars', 'type': 'Car', 'size': 0, 'disposition': 'fill', 'comments': 'all cars in the fleet'} + ]}) + + def test_cannot_parse_struct_vararray_numeric_size_types(self): + # Act + Assert: + self._assert_parse_delayed_exception([ + 'using Car = uint16', + 'struct Fleet', + '# all cars in the fleet', + '\tcars = array(Car, size=123)' + ]) + + def test_cannot_parse_struct_vararray_fill_types(self): + # Act + Assert: + self._assert_parse_delayed_exception([ + 'using Car = uint16', + 'struct Fleet', + '# all cars in the fleet', + '\tcars = array(Car, size=__FILL__)' + ]) + + def test_can_parse_struct_closed_by_other_type(self): + # Act: + type_descriptors = parse_all([ + 'struct Mosaic', + '\tmosaicId = uint64', + '', + '\tamount = uint32', + 'using Address = binary_fixed(25)' + ]) + + # Assert: + self.assertEqual(2, len(type_descriptors)) + self.assertEqual(type_descriptors['Mosaic'], {'type': 'struct', 'comments': '', 'layout': [ + {'name': 'mosaicId', **uint_descriptor(8), 'comments': ''}, + {'name': 'amount', **uint_descriptor(4), 'comments': ''} + ]}) + + def test_can_parse_struct_with_inline_member(self): + # Act: + type_descriptors = parse_all([ + 'struct Placeholder', + 'struct Pair', + '\tfooBar = uint64', + '# some placeholder comment', + '\tinline Placeholder', + '\tbaz = uint32' + ]) + + # Assert: + self.assertEqual(2, len(type_descriptors)) + self.assertEqual(type_descriptors['Pair'], {'type': 'struct', 'comments': '', 'layout': [ + {'name': 'fooBar', **uint_descriptor(8), 'comments': ''}, + {'type': 'Placeholder', 'disposition': 'inline', 'comments': 'some placeholder comment'}, + {'name': 'baz', **uint_descriptor(4), 'comments': ''} + ]}) + + def test_can_parse_struct_with_const_member(self): + # Act: + type_descriptors = parse_all([ + 'struct Pair', + '\tfooBar = uint64', + '# some const comment', + '\tconst int8 tupleSize = 2', + '\tbaz = uint32' + ]) + + # Assert: + self.assertEqual(1, len(type_descriptors)) + self.assertEqual(type_descriptors['Pair'], {'type': 'struct', 'comments': '', 'layout': [ + {'name': 'fooBar', **uint_descriptor(8), 'comments': ''}, + {'name': 'tupleSize', **int_descriptor(1), 'disposition': 'const', 'value': 2, 'comments': 'some const comment'}, + {'name': 'baz', **uint_descriptor(4), 'comments': ''} + ]}) + + def test_cannot_parse_struct_with_unknown_member_type(self): + # Act + Assert: + for type_name in ['MosaicId', 'array(MosaicId, 10)']: + self._assert_parse_delayed_exception([ + 'struct Foo', + '\tid = {0}'.format(type_name) + ]) + + def test_cannot_parse_struct_with_non_enum_condition_link(self): + # Act + Assert: + self._assert_parse_commit_exception([ + 'using Shape = uint8', + 'using Circ = uint16', + 'struct Enclosing', + '\tdiscriminator = Shape', + '\tcircumference = Circ if discriminator equals 123' + ]) + + def test_cannot_parse_struct_with_unknown_condition_value(self): + # Act + Assert: + self._assert_parse_commit_exception([ + 'enum Shape : uint8', + '\tcircle = 1', + 'using Circ = uint16', + 'struct Enclosing', + '\tdiscriminator = Shape', + '\tcircumference = Circ if discriminator equals hexagon' + ]) + + def test_cannot_parse_struct_with_unknown_array_size(self): + # Act + Assert: + self._assert_parse_delayed_exception([ + 'using MosaicId = uint16', + 'struct Foo', + '\tids = array(MosaicId, numMosaics)' + ]) + + def test_cannot_parse_struct_with_unknown_sort_key(self): + # Act + Assert: + self._assert_parse_delayed_exception([ + 'using Face = uint16', + 'struct Tracking', + '\tfaces = array(Face, 10, sort_key=eyeColor)' + ]) + + def test_cannot_parse_struct_with_unknown_inline_type(self): + # Act + Assert: + for type_name in ['MosaicId', 'array(MosaicId, 10)', 'uint8', 'binary_fixed(25)']: + self._assert_parse_delayed_exception([ + 'struct Foo', + '\tinline {0}'.format(type_name) + ]) + + def test_cannot_parse_struct_with_unknown_const_type(self): + # Act + Assert: + for type_name in ['uint7', 'binary_fixed(25)', 'Car']: + self._assert_parse_delayed_exception([ + 'struct Foo', + '\tconst {0} bar = 123'.format(type_name) + ]) + + # endregion + + # region enum + + def test_can_parse_enum_values(self): + # Act: + type_descriptors = parse_all([ + '# enumeration of entity types', + 'enum EntityType : uint16', + '\t# transfer transaction type', + '\ttransfer = 7', + '\thashLock = 0x0C' + ]) + + # Assert: + self.assertEqual(1, len(type_descriptors)) + self.assertEqual(type_descriptors['EntityType'], { + 'type': 'enum', **uint_descriptor(2, False), 'comments': 'enumeration of entity types', 'values': [ + {'name': 'transfer', 'value': 7, 'comments': 'transfer transaction type'}, + {'name': 'hashLock', 'value': 12, 'comments': ''} + ] + }) + + def test_can_parse_enum_closed_by_other_type(self): + # Act: + type_descriptors = parse_all([ + 'enum EntityType : uint16', + '\ttransfer = 7', + '', + '\thashLock = 0x0C', + 'using Address = binary_fixed(25)' + ]) + + # Assert: + self.assertEqual(2, len(type_descriptors)) + self.assertEqual(type_descriptors['EntityType'], {'type': 'enum', **uint_descriptor(2, False), 'comments': '', 'values': [ + {'name': 'transfer', 'value': 7, 'comments': ''}, + {'name': 'hashLock', 'value': 12, 'comments': ''} + ]}) + + def test_cannot_parse_enum_with_non_numeric_value(self): + # Act + Assert: + self._assert_parse_delayed_exception([ + 'enum EntityType : uint16', + '\ttransfer = QZ' + ]) + + # endregion + + # region scoping + + def test_cannot_parse_as_outer_scope_from_inner_scope(self): + # Arrange: + valid_headers = [ + ['struct Mosaic', '\tmosaicId = uint64'], + ['enum EntityType : uint16', '\ttransfer = 7'] + ] + + # Act + Assert: + # - using is not valid in composite scope + for header in valid_headers: + self._assert_parse_delayed_exception(header + ['\tusing Address = binary_fixed(25)']) + + def test_cannot_parse_schema_with_duplicate_type_names(self): + # Arrange: + valid_declarations = [ + ['struct Bar', '\tfoo = uint64'], + ['enum Bar : uint16', '\tbaz = 7'], + ['using Bar = uint16'] + ] + + # Act + Assert: + counter = 0 + for declaration1 in valid_declarations: + for declaration2 in valid_declarations: + lines = declaration1 + declaration2 + if not declaration2[0].startswith('using'): + lines += ['using Baz = uint32'] # make sure all composites are closed + + self._assert_parse_delayed_exception(lines) + counter += 1 + + # Sanity: + self.assertEqual(9, counter) + + def test_cannot_parse_schema_with_duplicate_struct_property_names_in_same_scope(self): + # Act + Assert: + self._assert_parse_delayed_exception([ + 'struct Bar', + '\tfoo = uint8', + '\tfoo = uint16' + ]) + + def test_cannot_parse_schema_with_duplicate_enum_property_names_in_same_scope(self): + # Act + Assert: + self._assert_parse_delayed_exception([ + 'enum Bar : uint16', + '\tfoo = 4', + '\tfoo = 9' + ]) + + def test_can_parse_schema_with_duplicate_struct_property_names_in_different_scopes(self): + # Act: + type_descriptors = parse_all([ + 'struct Bar', + '\tfoo = uint8', + '', + 'struct Baz', + '\tfoo = uint16' + ]) + + self.assertEqual(2, len(type_descriptors)) + self.assertEqual(type_descriptors['Bar'], {'type': 'struct', 'comments': '', 'layout': [ + {'name': 'foo', **uint_descriptor(1), 'comments': ''} + ]}) + self.assertEqual(type_descriptors['Baz'], {'type': 'struct', 'comments': '', 'layout': [ + {'name': 'foo', **uint_descriptor(2), 'comments': ''} + ]}) + + def test_can_parse_schema_with_duplicate_enum_property_names_in_different_scopes(self): + # Act: + type_descriptors = parse_all([ + 'enum Bar : uint16', + '\tfoo = 4', + '', + 'enum Baz : int32', + '\tfoo = 9' + ]) + + self.assertEqual(2, len(type_descriptors)) + self.assertEqual(type_descriptors['Bar'], {'type': 'enum', **uint_descriptor(2, False), 'comments': '', 'values': [ + {'name': 'foo', 'value': 4, 'comments': ''} + ]}) + self.assertEqual(type_descriptors['Baz'], {'type': 'enum', **int_descriptor(4, False), 'comments': '', 'values': [ + {'name': 'foo', 'value': 9, 'comments': ''} + ]}) + + # endregion + + # region ordering + + def test_type_definition_order_is_preserved(self): + # Act: + type_descriptors = parse_all([ + 'using Truck = uint16', + 'struct Fleet', + '\tcarCount = uint8', + 'enum Bar : uint16', + '\tfoo = 4', + 'using Car = uint16' + ]) + + # Assert: + self.assertEqual(4, len(type_descriptors)) + self.assertEqual(list(type_descriptors.keys()), ['Truck', 'Fleet', 'Bar', 'Car']) + + # endregion diff --git a/catbuffer-parser/test/test_CommentParser.py b/catbuffer-parser/test/test_CommentParser.py new file mode 100644 index 00000000..fd136231 --- /dev/null +++ b/catbuffer-parser/test/test_CommentParser.py @@ -0,0 +1,76 @@ +import unittest + +from catparser.CommentParser import CommentParser + + +class CommentParserTest(unittest.TestCase): + def test_try_process_line_returns_true_only_for_comment_lines(self): + # Arrange: + parser = CommentParser() + + # Act + Assert: + for line in ['# foo bar', '#', '# $$$']: + self.assertTrue(parser.try_process_line(line)) + + for line in [' # foo bar', 'foo bar']: + self.assertFalse(parser.try_process_line(line)) + + def test_no_comments_are_present_initially(self): + # Arrange: + parser = CommentParser() + + # Act: + result = parser.commit() + + # Assert: + self.assertEqual({'comments': ''}, result) + + def test_can_add_single_line_comment(self): + # Arrange: + parser = CommentParser() + + # Act: + parser.try_process_line('# this is a comment') + result = parser.commit() + + # Assert: + self.assertEqual({'comments': 'this is a comment'}, result) + + def test_can_add_multi_line_comment(self): + # Arrange: + parser = CommentParser() + + # Act: + parser.try_process_line('# this is a comment') + parser.try_process_line('# foo bar') + result = parser.commit() + + # Assert: + self.assertEqual({'comments': 'this is a comment foo bar'}, result) + + def test_post_processing_removes_leading_and_trailing_whitespace_per_line(self): + # Arrange: + parser = CommentParser() + + # Act: + parser.try_process_line('# this is a comment ') + parser.try_process_line('# foo bar ') + result = parser.commit() + + # Assert: + self.assertEqual({'comments': 'this is a comment foo bar'}, result) + + def test_can_reuse_parser(self): + # Arrange: + parser = CommentParser() + + # Act: + parser.try_process_line('# this is a comment') + result1 = parser.commit() + + parser.try_process_line('# foo bar') + result2 = parser.commit() + + # Assert: + self.assertEqual({'comments': 'this is a comment'}, result1) + self.assertEqual({'comments': 'foo bar'}, result2) diff --git a/catbuffer-parser/test/test_EnumParser.py b/catbuffer-parser/test/test_EnumParser.py new file mode 100644 index 00000000..489c9b41 --- /dev/null +++ b/catbuffer-parser/test/test_EnumParser.py @@ -0,0 +1,142 @@ +import unittest +from test.constants import (INVALID_PROPERTY_NAMES, INVALID_USER_TYPE_NAMES, PRIMITIVE_TYPE_TUPLES, VALID_PROPERTY_NAMES, + VALID_USER_TYPE_NAMES) +from test.ParserTestUtils import MultiLineParserTestUtils, ParserFactoryTestUtils, SingleLineParserTestUtils + +from catparser.CatsParseException import CatsParseException +from catparser.EnumParser import EnumParserFactory, EnumValueParserFactory + + +def primitive_enum_descriptor(size, is_signed): + return {'type': 'enum', 'signedness': 'signed' if is_signed else 'unsigned', 'size': size} + + +class EnumParserFactoryTest(unittest.TestCase): + def test_is_match_returns_true_for_positives(self): + # Assert: + ParserFactoryTestUtils(EnumParserFactory, self).assert_positives([ + 'enum F : uint8', 'enum Foo : uint7', 'enum FooZA09za : uint8', 'enum 8oo : uint9', 'enum $^^$ : uint8' + ]) + + def test_is_match_returns_false_for_negatives(self): + # Assert: + ParserFactoryTestUtils(EnumParserFactory, self).assert_negatives([ + ' enum F : uint8', 'enum F : uint8 ', 'enum F', 'enum F :', 'enum F : uintA', 'enum F : binary_fixed(8)', 'enum F : Foo' + ]) + + +class EnumParserTest(unittest.TestCase): + def _assert_parse(self, line, expected_result): + # Assert: + MultiLineParserTestUtils(EnumParserFactory, self).assert_parse(line, expected_result) + + def _assert_parse_exception(self, line): + # Assert: + MultiLineParserTestUtils(EnumParserFactory, self).assert_parse_exception(line) + + def test_parser_exposes_custom_factories(self): + # Act + parser = EnumParserFactory().create() + + # Assert + self.assertEqual(1, len(parser.factories())) + + def test_can_parse_type_declaration(self): + for primitive_tuple in PRIMITIVE_TYPE_TUPLES: + # Act + Assert: + self._assert_parse( + 'enum Colors : {0}'.format(primitive_tuple[0]), + ('Colors', {'type': 'enum', 'size': primitive_tuple[1], 'signedness': primitive_tuple[2], 'values': []})) + + def test_cannot_parse_enum_declaration_with_invalid_base(self): + for base_type in ['uint7', 'uint9']: + # Act + Assert: + self._assert_parse_exception('enum Colors : {0}'.format(base_type)) + + def test_enum_names_must_have_type_name_semantics(self): + # Assert: + MultiLineParserTestUtils(EnumParserFactory, self).assert_naming( + 'enum {0} : uint8', + VALID_USER_TYPE_NAMES, + INVALID_USER_TYPE_NAMES) + + def test_can_append_value(self): + # Arrange: + parser = EnumParserFactory().create() + + # Act: + parser.process_line('enum Colors : uint16') + parser.append({'name': 'foo'}) + parser.append({'name': 'bar'}) + result = parser.commit() + + # Assert: + self.assertEqual(('Colors', {**primitive_enum_descriptor(2, False), 'values': [{'name': 'foo'}, {'name': 'bar'}]}), result) + + def test_cannot_append_multiple_properties_with_same_name(self): + # Arrange: + parser = EnumParserFactory().create() + + # Act: + parser.process_line('enum Colors : uint16') + parser.append({'name': 'foo'}) + parser.append({'name': 'bar'}) + + # Assert: + with self.assertRaises(CatsParseException): + parser.append({'name': 'foo'}) + + def test_can_append_multiple_properties_with_same_value(self): + # Arrange: + parser = EnumParserFactory().create() + + # Act: + parser.process_line('enum Colors : int16') + parser.append({'name': 'foo', 'value': 2}) + parser.append({'name': 'bar', 'value': 2}) + result = parser.commit() + + # Assert: + self.assertEqual( + ('Colors', {**primitive_enum_descriptor(2, True), 'values': [{'name': 'foo', 'value': 2}, {'name': 'bar', 'value': 2}]}), + result) + + +class EnumValueParserFactoryTest(unittest.TestCase): + def test_is_match_returns_true_for_positives(self): + # Assert: + ParserFactoryTestUtils(EnumValueParserFactory, self).assert_positives([ + 'foo = bar', 'foo = BAR', 'fzaZa09 = d', 'f = ccc', 'foo = fazFZA90', '$$$ = ^^^' + ]) + + def test_is_match_returns_false_for_negatives(self): + # Assert: + ParserFactoryTestUtils(EnumValueParserFactory, self).assert_negatives([ + ' foo = bar', 'foo = bar ', 'foo = ', '= bar', 'foo = array(bar, baz)' + ]) + + +class EnumValueParserTest(unittest.TestCase): + def _assert_parse(self, line, expected_result): + # Assert: + SingleLineParserTestUtils(EnumValueParserFactory, self).assert_parse(line, expected_result) + + def test_can_parse_dec_declaration(self): + # Act + Assert: + self._assert_parse( + 'red = 12', + {'name': 'red', 'value': 12}) + + def test_can_parse_hex_declaration(self): + # Act + Assert: + self._assert_parse( + 'red = 0x11', + {'name': 'red', 'value': 17}) + + def test_cannot_parse_non_numeric_declaration(self): + # Act + Assert: + SingleLineParserTestUtils(EnumValueParserFactory, self).assert_parse_exception('red = uint16', ValueError) + + def test_member_names_must_have_property_name_semantics(self): + # Assert: + SingleLineParserTestUtils(EnumValueParserFactory, self).assert_naming('{0} = 12', VALID_PROPERTY_NAMES, INVALID_PROPERTY_NAMES) diff --git a/catbuffer-parser/test/test_ImportParser.py b/catbuffer-parser/test/test_ImportParser.py new file mode 100644 index 00000000..e67c017c --- /dev/null +++ b/catbuffer-parser/test/test_ImportParser.py @@ -0,0 +1,29 @@ +import unittest +from test.ParserTestUtils import ParserFactoryTestUtils, SingleLineParserTestUtils + +from catparser.ImportParser import ImportParserFactory, ImportResult + +VALID_IMPORT_FILE_NAMES = ['A', 'aBzZzac09', 'aa bb.cats', 'foo.cats', 'foo bar', 'foo bar.cats', '$^$'] + + +class ImportParserFactoryTest(unittest.TestCase): + def test_is_match_returns_true_for_positives(self): + # Assert: + ParserFactoryTestUtils(ImportParserFactory, self).assert_positives([ + 'import "{0}"'.format(import_file) for import_file in VALID_IMPORT_FILE_NAMES + ]) + + def test_is_match_returns_false_for_negatives(self): + # Assert: + ParserFactoryTestUtils(ImportParserFactory, self).assert_negatives([ + ' import "A"', 'import "A" ', 'import ""', 'import "aa\taa"', 'foo $$$' + ]) + + +class ImportParserTest(unittest.TestCase): + def test_can_parse_import(self): + for import_file in VALID_IMPORT_FILE_NAMES: + # Act + Assert: + SingleLineParserTestUtils(ImportParserFactory, self).assert_parse( + 'import "{0}"'.format(import_file), + ImportResult(import_file)) diff --git a/catbuffer-parser/test/test_ScopeManager.py b/catbuffer-parser/test/test_ScopeManager.py new file mode 100644 index 00000000..4a07bfa7 --- /dev/null +++ b/catbuffer-parser/test/test_ScopeManager.py @@ -0,0 +1,88 @@ +import unittest + +from catparser.CatsParseException import CatsParseException +from catparser.ScopeManager import ScopeManager + + +class ScopeManagerTest(unittest.TestCase): + def test_manager_initially_has_default_scope(self): + # Arrange: + manager = ScopeManager() + + # Act: + scope = manager.scope() + + # Assert: + self.assertEqual([':0'], scope) + + def test_can_increment_line_number(self): + # Arrange: + manager = ScopeManager() + + # Act: + manager.increment_line_number() + manager.increment_line_number() + manager.increment_line_number() + scope = manager.scope() + + # Assert: + self.assertEqual([':3'], scope) + + @staticmethod + def _initialize_manager_with_three_scopes(): + # Arrange: + manager = ScopeManager() + + # Act: + manager.push_scope('zeta') + manager.increment_line_number() + manager.push_scope('beta') + manager.increment_line_number() + manager.increment_line_number() + manager.push_scope('gamma') + return manager + + def test_can_push_scopes(self): + # Arrange: + manager = self._initialize_manager_with_three_scopes() + + # Act: + scope = manager.scope() + + # Assert: + self.assertEqual(['gamma:0', 'beta:2', 'zeta:1', ':0'], scope) + + def test_can_pop_some_scopes(self): + # Arrange: + manager = self._initialize_manager_with_three_scopes() + + # Act: + manager.pop_scope() + manager.pop_scope() + scope = manager.scope() + + # Assert: + self.assertEqual(['zeta:1', ':0'], scope) + + def test_can_pop_all_scopes(self): + # Arrange: + manager = self._initialize_manager_with_three_scopes() + + # Act: + manager.pop_scope() + manager.pop_scope() + manager.pop_scope() + scope = manager.scope() + + # Assert: + self.assertEqual([':0'], scope) + + def test_cannot_pop_default_scope(self): + # Arrange: + manager = ScopeManager() + manager.push_scope('zeta') + manager.pop_scope() + + # Act + Assert + with self.assertRaises(CatsParseException): + manager.pop_scope() diff --git a/catbuffer-parser/test/test_StructParser.py b/catbuffer-parser/test/test_StructParser.py new file mode 100644 index 00000000..6d0256d2 --- /dev/null +++ b/catbuffer-parser/test/test_StructParser.py @@ -0,0 +1,487 @@ +import unittest +from test.constants import (BUILTIN_TYPE_TUPLES, INVALID_PROPERTY_NAMES, INVALID_USER_TYPE_NAMES, VALID_PRIMITIVE_NAMES, + VALID_PROPERTY_NAMES, VALID_USER_TYPE_NAMES) +from test.ParserTestUtils import MultiLineParserTestUtils, ParserFactoryTestUtils, SingleLineParserTestUtils + +from catparser.CatsParseException import CatsParseException +from catparser.StructParser import (StructArrayMemberParserFactory, StructConstParserFactory, StructInlineParserFactory, + StructParserFactory, StructScalarMemberParserFactory) + +# region StructParserTest + + +class StructParserFactoryTest(unittest.TestCase): + def test_is_match_returns_true_for_positives(self): + # Assert: + ParserFactoryTestUtils(StructParserFactory, self).assert_positives([ + 'struct F', 'struct Foo', 'struct FooZA09za', 'struct foo', 'struct 8oo', 'struct $^^$' + ]) + + def test_is_match_returns_false_for_negatives(self): + # Assert: + ParserFactoryTestUtils(StructParserFactory, self).assert_negatives([ + ' struct Foo', 'struct Foo ', 'struct ', 'struct foo bar' + ]) + + +class StructParserTest(unittest.TestCase): + def _assert_parse(self, line, expected_result): + # Assert: + MultiLineParserTestUtils(StructParserFactory, self).assert_parse(line, expected_result) + + def _assert_parse_exception(self, line): + # Assert: + MultiLineParserTestUtils(StructParserFactory, self).assert_parse_exception(line) + + def test_parser_exposes_custom_factories(self): + # Act + parser = StructParserFactory().create() + + # Assert + self.assertEqual(4, len(parser.factories())) + + def test_can_parse_type_declaration(self): + # Act + Assert: + self._assert_parse( + 'struct Car', + ('Car', {'type': 'struct', 'layout': []})) + + def test_struct_names_must_have_type_name_semantics(self): + # Assert: + MultiLineParserTestUtils(StructParserFactory, self).assert_naming('struct {0}', VALID_USER_TYPE_NAMES, INVALID_USER_TYPE_NAMES) + + def test_can_append_scalar(self): + # Arrange: + parser = StructParserFactory().create() + + # Act: + parser.process_line('struct Car') + parser.append({'name': 'foo'}) + parser.append({'name': 'bar'}) + result = parser.commit() + + # Assert: + self.assertEqual(('Car', {'type': 'struct', 'layout': [{'name': 'foo'}, {'name': 'bar'}]}), result) + + def test_can_append_scalar_conditional(self): + # Arrange: + parser = StructParserFactory().create() + + # Act: + parser.process_line('struct Car') + parser.append({'name': 'foo'}) + parser.append({'name': 'bar', 'condition': 'foo', 'condition_value': 'red'}) + result = parser.commit() + + # Assert: + self.assertEqual(('Car', {'type': 'struct', 'layout': [ + {'name': 'foo'}, + {'name': 'bar', 'condition': 'foo', 'condition_value': 'red'} + ]}), result) + + def test_can_append_scalar_conditional_trailing_discriminator(self): + # Arrange: + parser = StructParserFactory().create() + + # Act: + parser.process_line('struct Car') + parser.append({'name': 'bar', 'condition': 'foo', 'condition_value': 'red'}) + parser.append({'name': 'foo'}) + result = parser.commit() + + # Assert: + self.assertEqual(('Car', {'type': 'struct', 'layout': [ + {'name': 'bar', 'condition': 'foo', 'condition_value': 'red'}, + {'name': 'foo'} + ]}), result) + + def test_cannot_append_scalar_with_invalid_condition_reference(self): + for condition in ['baz', '10']: + # Arrange: + parser = StructParserFactory().create() + + # Act: + parser.process_line('struct Car') + parser.append({'name': 'foo'}) + parser.append({'name': 'bar', 'condition': condition, 'condition_value': 'red'}) + + # Assert: + with self.assertRaises(CatsParseException): + parser.commit() + + def test_can_append_array_with_numeric_size(self): + # Arrange: + parser = StructParserFactory().create() + + # Act: + parser.process_line('struct Car') + parser.append({'name': 'foo'}) + parser.append({'name': 'bar', 'size': 10}) + result = parser.commit() + + # Assert: + self.assertEqual(('Car', {'type': 'struct', 'layout': [{'name': 'foo'}, {'name': 'bar', 'size': 10}]}), result) + + def test_can_append_array_with_valid_size_reference(self): + # Arrange: + parser = StructParserFactory().create() + + # Act: + parser.process_line('struct Car') + parser.append({'name': 'foo'}) + parser.append({'name': 'bar', 'size': 'foo'}) + result = parser.commit() + + # Assert: + self.assertEqual(('Car', {'type': 'struct', 'layout': [{'name': 'foo'}, {'name': 'bar', 'size': 'foo'}]}), result) + + def test_can_append_array_with_valid_size_reference_and_inline(self): + # Arrange: + parser = StructParserFactory().create() + + # Act: + parser.process_line('struct Car') + parser.append({'disposition': 'inline', 'type': 'Vehicle'}) + parser.append({'name': 'foo'}) + parser.append({'name': 'bar', 'size': 'foo'}) + result = parser.commit() + + # Assert: + self.assertEqual(('Car', {'type': 'struct', 'layout': [ + {'disposition': 'inline', 'type': 'Vehicle'}, + {'name': 'foo'}, + {'name': 'bar', 'size': 'foo'} + ]}), result) + + def test_cannot_append_array_with_invalid_size_reference(self): + # Arrange: + parser = StructParserFactory().create() + + # Act: + parser.process_line('struct Car') + parser.append({'name': 'foo'}) + + # Assert: + with self.assertRaises(CatsParseException): + parser.append({'name': 'bar', 'size': 'fob'}) + + def test_can_append_multiple_properties_with_same_name_and_different_disposition(self): + # Arrange: + parser = StructParserFactory().create() + + # Act: + parser.process_line('struct Car') + parser.append({'name': 'foo'}) + parser.append({'name': 'foo', 'disposition': 'const'}) + result = parser.commit() + + # Assert: + self.assertEqual(('Car', {'type': 'struct', 'layout': [{'name': 'foo'}, {'name': 'foo', 'disposition': 'const'}]}), result) + + def test_cannot_append_multiple_properties_with_same_name_and_disposition(self): + # Arrange: + parser = StructParserFactory().create() + + # Act: + parser.process_line('struct Car') + parser.append({'name': 'foo'}) + parser.append({'name': 'bar'}) + + # Assert: + with self.assertRaises(CatsParseException): + parser.append({'name': 'foo'}) + + def test_can_append_unnamed_descriptor(self): + # Arrange: + parser = StructParserFactory().create() + + # Act: + parser.process_line('struct Car') + parser.append({'disposition': 'inline', 'type': 'Foo'}) + parser.append({'name': 'foo'}) + result = parser.commit() + + # Assert: + self.assertEqual(('Car', {'type': 'struct', 'layout': [ + {'disposition': 'inline', 'type': 'Foo'}, + {'name': 'foo'} + ]}), result) + + def test_can_append_array_with_fill_disposition_last(self): + # Arrange: + parser = StructParserFactory().create() + + # Act: + parser.process_line('struct Car') + parser.append({'name': 'foo'}) + parser.append({'name': 'cat', 'disposition': 'fill'}) + result = parser.commit() + + # Assert: + self.assertEqual(('Car', {'type': 'struct', 'layout': [ + {'name': 'foo'}, + {'name': 'cat', 'disposition': 'fill'} + ]}), result) + + def test_cannot_append_property_after_array_with_fill_disposition(self): + # Arrange: + parser = StructParserFactory().create() + + # Act: + parser.process_line('struct Car') + parser.append({'name': 'foo'}) + parser.append({'name': 'cat', 'disposition': 'fill'}) + + # Assert: + with self.assertRaises(CatsParseException): + parser.append({'name': 'bar'}) + +# endregion + +# region StructConstParserTest + + +class StructConstParserFactoryTest(unittest.TestCase): + def test_is_match_returns_true_for_positives(self): + # Assert: + ParserFactoryTestUtils(StructConstParserFactory, self).assert_positives([ + 'const Foo foo = Bar', 'const FOO FOO = BAR', 'const Za09Za FzaZa09 = fzaZa09', 'const !!! $$$ = ###' + ]) + + def test_is_match_returns_false_for_negatives(self): + # Assert: + ParserFactoryTestUtils(StructConstParserFactory, self).assert_negatives([ + ' const Foo foo = Bar', 'const Foo foo = Bar ', 'const Foo foo =', 'const Foo foo Bar', 'const Foo = Bar', 'const Foo' + ]) + + +class StructConstParserTest(unittest.TestCase): + def _assert_parse(self, line, expected_result): + # Assert: + SingleLineParserTestUtils(StructConstParserFactory, self).assert_parse(line, expected_result) + + def _assert_parse_exception(self, line): + # Assert: + SingleLineParserTestUtils(StructConstParserFactory, self).assert_parse_exception(line) + + def test_can_parse_uint_type_constant(self): + # Act + Assert: + for value in [32, 0x20]: + self._assert_parse( + 'const uint16 foo = {0}'.format(value), + {'name': 'foo', 'type': 'byte', 'signedness': 'unsigned', 'size': 2, 'value': 32, 'disposition': 'const'}) + + def test_can_parse_custom_type_constant(self): + # Act + Assert: + for value in [33, 0x21]: + self._assert_parse( + 'const ColorShade red = {0}'.format(value), + {'name': 'red', 'type': 'ColorShade', 'value': 33, 'disposition': 'const'}) + + def test_cannot_parse_non_numeric_value(self): + # Act + Assert: + for value in ['FOO', 'AF']: + SingleLineParserTestUtils(StructConstParserFactory, self).assert_parse_exception( + 'const uint16 foo = {0}'.format(value), + ValueError) + + def test_cannot_parse_binary_fixed_type_constant(self): + # Act + Assert: + SingleLineParserTestUtils(StructConstParserFactory, self).assert_parse_exception('const binary_fixed(25) foo = 123') + + def test_member_names_must_have_property_name_semantics(self): + # Assert: + SingleLineParserTestUtils(StructConstParserFactory, self).assert_naming( + 'const uint32 {0} = 123', + VALID_PROPERTY_NAMES, + INVALID_PROPERTY_NAMES) + + def test_type_names_must_have_type_name_or_uint_semantics(self): + # Assert: + SingleLineParserTestUtils(StructConstParserFactory, self).assert_naming( + 'const {0} foo = 123', + VALID_USER_TYPE_NAMES + VALID_PRIMITIVE_NAMES, + INVALID_USER_TYPE_NAMES + ['binary_fixed(32)']) + +# endregion + +# region StructInlineParserTest + + +class StructInlineParserFactoryTest(unittest.TestCase): + def test_is_match_returns_true_for_positives(self): + # Assert: + ParserFactoryTestUtils(StructInlineParserFactory, self).assert_positives([ + 'inline Bar', 'inline BAR', 'inline fzaZa09', 'inline $$$' + ]) + + def test_is_match_returns_false_for_negatives(self): + # Assert: + ParserFactoryTestUtils(StructInlineParserFactory, self).assert_negatives([ + ' inline Bar', 'inline Bar ', 'inline ', ' Bar' + ]) + + +class StructInlineParserTest(unittest.TestCase): + def test_can_parse_simple_custom_declaration(self): + # Act + Assert: + SingleLineParserTestUtils(StructInlineParserFactory, self).assert_parse( + 'inline Vehicle_', + {'type': 'Vehicle_', 'disposition': 'inline'}) + +# endregion + +# region StructScalarMemberParser + + +class StructScalarParserFactoryTest(unittest.TestCase): + def test_is_match_returns_true_for_positives(self): + # Assert: + ParserFactoryTestUtils(StructScalarMemberParserFactory, self).assert_positives([ + 'foo = bar', 'foo = BAR', 'fzaZa09 = d', '& = $$$', 'foo = fazFZA90', + 'foo = bar if abc equals def', 'foo = bar if abc has def' + ]) + + def test_is_match_returns_false_for_negatives(self): + # Assert: + ParserFactoryTestUtils(StructScalarMemberParserFactory, self).assert_negatives([ + ' foo = bar', 'foo = bar ', 'foo = ', '= bar', 'foo = array(bar, baz)', 'foo = bar if abc mask def', + 'foo = bar if abc equals', 'foo = bar abc equals def', + 'foo = bar if abc has', 'foo = bar abc has def' + ]) + + +class StructScalarParserTest(unittest.TestCase): + def _assert_parse(self, line, expected_result): + # Assert: + SingleLineParserTestUtils(StructScalarMemberParserFactory, self).assert_parse(line, expected_result) + + def test_can_parse_simple_custom_declaration(self): + # Act + Assert: + self._assert_parse( + 'car = Vehicle_', + {'name': 'car', 'type': 'Vehicle_'}) + + def test_can_parse_simple_builtin_declaration(self): + for builtin_tuple in BUILTIN_TYPE_TUPLES: + # Act + Assert: + self._assert_parse( + 'car = {0}'.format(builtin_tuple[0]), + {'name': 'car', 'type': 'byte', 'signedness': builtin_tuple[2], 'size': builtin_tuple[1]}) + + def test_can_parse_conditional_custom_declaration_equals(self): + # Act + Assert: + self._assert_parse( + 'roadGrade = RoadGrade_ if terrain equals road', + {'name': 'roadGrade', 'type': 'RoadGrade_', 'condition': 'terrain', 'condition_operation': 'equals', 'condition_value': 'road'}) + + def test_can_parse_conditional_custom_declaration_has(self): + # Act + Assert: + self._assert_parse( + 'roadGrade = RoadGrade_ if terrain has road', + {'name': 'roadGrade', 'type': 'RoadGrade_', 'condition': 'terrain', 'condition_operation': 'has', 'condition_value': 'road'}) + + def test_member_names_must_have_property_name_semantics(self): + # Assert: + SingleLineParserTestUtils(StructScalarMemberParserFactory, self).assert_naming( + '{0} = uint32', + VALID_PROPERTY_NAMES, + INVALID_PROPERTY_NAMES) + +# endregion + +# region StructArrayMemberParser + + +VALID_ARRAY_PATTERNS = ['foo = {0}(bar, {1}baz)', '$$$ = {0}(&, {1}**)', '$$$ = {0}(&, {1}**, sort_key=@@)'] +INVALID_ARRAY_PATTERNS = [ + ' foo = {0}(bar, {1}baz)', 'foo = {0}(bar, {1}baz) ', 'foo = ', '= {0}(bar, {1}baz)', + 'foo = {0}(bar, {1}baz', 'foo = {0}(bar, {1}baz) if abc equals def', 'foo = {0}(bar, {1}baz) if abc has def' +] +ARRAY_DIMENSION_QUALIFIERS = ['', 'size='] + + +class StructArrayMemberParserFactoryTest(unittest.TestCase): + def test_is_match_returns_true_for_positives(self): + # Assert: + for dimension_qualifier in ARRAY_DIMENSION_QUALIFIERS: + ParserFactoryTestUtils(StructArrayMemberParserFactory, self).assert_positives([ + pattern.format('array', dimension_qualifier) for pattern in VALID_ARRAY_PATTERNS + ]) + + def test_is_match_returns_false_for_negatives(self): + # Assert: + for dimension_qualifier in ARRAY_DIMENSION_QUALIFIERS: + ParserFactoryTestUtils(StructArrayMemberParserFactory, self).assert_negatives([ + pattern.format('array', dimension_qualifier) for pattern in INVALID_ARRAY_PATTERNS + ]) + + +class StructArrayMemberParserTest(unittest.TestCase): + def _assert_parse(self, line, expected_result): + # Assert: + SingleLineParserTestUtils(StructArrayMemberParserFactory, self).assert_parse(line, expected_result) + + def test_can_parse_array_with_non_numeric_size(self): + for type_name in ['byte', 'Car']: + # Act + Assert: + self._assert_parse( + 'vehicles = array({0}, garageSize)'.format(type_name), + {'name': 'vehicles', 'type': type_name, 'size': 'garageSize'}) + + def test_can_parse_array_with_numeric_size(self): + for type_name in ['byte', 'Car']: + for numeric_str in ['10', '0x0A']: + # Act + Assert: + self._assert_parse( + 'vehicles = array({0}, {1})'.format(type_name, numeric_str), + {'name': 'vehicles', 'type': type_name, 'size': 10}) + + def test_can_parse_array_with_fill_size(self): + for type_name in ['byte', 'Car']: + # Act + Assert: + self._assert_parse( + 'vehicles = array({0}, __FILL__)'.format(type_name), + {'name': 'vehicles', 'type': type_name, 'size': 0, 'disposition': 'fill'}) + + def test_can_parse_array_with_sort_key(self): + # Act + Assert: + self._assert_parse( + 'vehicles = array(Car, 10, sort_key=bar)', + {'name': 'vehicles', 'type': 'Car', 'size': 10, 'sort_key': 'bar'}) + + def test_can_parse_vararray_with_non_numeric_size(self): + for type_name in ['byte', 'Car']: + # Act + Assert: + self._assert_parse( + 'vehicles = array({0}, size=garageSize)'.format(type_name), + {'name': 'vehicles', 'type': type_name, 'size': 'garageSize', 'disposition': 'var'}) + + def test_can_parse_vararray_with_unsupported_numeric_size(self): + for type_name in ['byte', 'Car']: + # Act + Assert: size is not converted for var array + self._assert_parse( + 'vehicles = array({0}, size=0x0A)'.format(type_name), + {'name': 'vehicles', 'type': type_name, 'size': '0x0A', 'disposition': 'var'}) + + def test_can_parse_vararray_with_unsupported_fill_size(self): + for type_name in ['byte', 'Car']: + # Act + Assert: size is not converted for var array + self._assert_parse( + 'vehicles = array({0}, size=__FILL__)'.format(type_name), + {'name': 'vehicles', 'type': type_name, 'size': '__FILL__', 'disposition': 'var'}) + + def test_can_parse_vararray_with_sort_key(self): + # Act + Assert: + self._assert_parse( + 'vehicles = array(Car, size=garageSize, sort_key=bar)', + {'name': 'vehicles', 'type': 'Car', 'size': 'garageSize', 'disposition': 'var', 'sort_key': 'bar'}) + + def test_member_names_must_have_property_name_semantics(self): + # Assert: + SingleLineParserTestUtils(StructArrayMemberParserFactory, self).assert_naming( + '{0} = array(Car, 10)', + VALID_PROPERTY_NAMES, + INVALID_PROPERTY_NAMES) + +# endregion diff --git a/catbuffer-parser/test/test_parserutils.py b/catbuffer-parser/test/test_parserutils.py new file mode 100644 index 00000000..f021dd17 --- /dev/null +++ b/catbuffer-parser/test/test_parserutils.py @@ -0,0 +1,194 @@ +import unittest +from test.constants import (BUILTIN_TYPE_TUPLES, INT_TYPE_TUPLES, INVALID_PRIMITIVE_NAMES, INVALID_PROPERTY_NAMES, INVALID_USER_TYPE_NAMES, + UINT_TYPE_TUPLES, VALID_PRIMITIVE_NAMES, VALID_PROPERTY_NAMES, VALID_USER_TYPE_NAMES) + +from catparser.CatsParseException import CatsParseException +from catparser.parserutils import (is_builtin, is_dec_or_hex, is_primitive, parse_builtin, parse_dec_or_hex, require_primitive, + require_property_name, require_user_type_name) + +# region naming conventions + + +class RequireUserTypeNameTest(unittest.TestCase): + def test_nothrow_for_positives(self): + for string in VALID_USER_TYPE_NAMES: + # Act: + result = require_user_type_name(string) + + # Assert: + self.assertEqual(string, result) + + def test_throw_for_negatives(self): + for string in INVALID_USER_TYPE_NAMES: + # Act: + with self.assertRaises(CatsParseException): + require_user_type_name(string) + + +class RequirePropertyNameTest(unittest.TestCase): + def test_nothrow_for_positives(self): + for string in VALID_PROPERTY_NAMES: + # Act: + result = require_property_name(string) + + # Assert: + self.assertEqual(string, result) + + def test_throw_for_negatives(self): + for string in INVALID_PROPERTY_NAMES: + # Act: + with self.assertRaises(CatsParseException): + require_property_name(string) + +# endregion + +# region primitive + + +class IsPrimitiveTest(unittest.TestCase): + def test_true_for_positives(self): + for string in VALID_PRIMITIVE_NAMES: + # Act: + result = is_primitive(string) + + # Assert: + self.assertTrue(result) + + def test_false_for_negatives(self): + for string in INVALID_PRIMITIVE_NAMES: + # Act: + result = is_primitive(string) + + # Assert: + self.assertFalse(result) + + +class RequirePrimitiveTest(unittest.TestCase): + def test_nothrow_for_positives(self): + for string in VALID_PRIMITIVE_NAMES: + # Act: + result = require_primitive(string) + + # Assert: + self.assertEqual(string, result) + + def test_throw_for_negatives(self): + for string in INVALID_PRIMITIVE_NAMES: + # Act: + with self.assertRaises(CatsParseException): + require_primitive(string) + +# endregion + +# region dec or hex + + +INVALID_NUMERIC_STRINGS = ['AFE', '0x8Y8', 'p', '&'] + + +class IsDecOrHexTest(unittest.TestCase): + def test_true_for_positives(self): + for string in ['10', '123', '0x10', '0x123', '0xAFE']: + # Act: + result = is_dec_or_hex(string) + + # Assert: + self.assertTrue(result) + + def test_false_for_negatives(self): + for string in INVALID_NUMERIC_STRINGS: + # Act: + result = is_dec_or_hex(string) + + # Assert: + self.assertFalse(result) + + +class ParseDecOrHexTest(unittest.TestCase): + def test_can_parse_dec(self): + # Act + Assert: + self.assertEqual(10, parse_dec_or_hex('10')) + self.assertEqual(123, parse_dec_or_hex('123')) + + def test_can_parse_hex(self): + # Act + Assert: + self.assertEqual(0x10, parse_dec_or_hex('0x10')) + self.assertEqual(0x123, parse_dec_or_hex('0x123')) + self.assertEqual(0xAFE, parse_dec_or_hex('0xAFE')) + + def test_cannot_parse_invalid_number(self): + for string in INVALID_NUMERIC_STRINGS: + with self.assertRaises(ValueError): + parse_dec_or_hex(string) + +# endregion + +# region builtin + + +INVALID_BUILTIN_TYPE_NAMES = [ + 'binary_fixed(2x22)', # malformed number + 'binary_fixed(0x8Y8)', + 'binary_fixed(x)', + 'uint33', # unsupported size + ' uint32', # invalid spacing + 'uint32 ', + 'FooBar' # non-builtin +] + + +class IsBuiltinTest(unittest.TestCase): + def test_true_for_positives(self): + for builtin_tuple in BUILTIN_TYPE_TUPLES: + # Act: + result = is_builtin(builtin_tuple[0]) + + # Assert: + self.assertTrue(result) + + def test_false_for_negatives(self): + for string in INVALID_BUILTIN_TYPE_NAMES: + # Act: + result = is_builtin(string) + + # Assert: + self.assertFalse(result) + + +class ParseBuiltinTest(unittest.TestCase): + def _assert_parse(self, line, expected_result): + # Act: + result = parse_builtin(line) + + # Assert: + self.assertEqual(expected_result, result) + + def test_can_parse_int_builtin(self): + for int_tuple in INT_TYPE_TUPLES: + # Act + Assert: + self._assert_parse( + int_tuple[0], + {'type': 'byte', 'signedness': 'signed', 'size': int_tuple[1]}) + + def test_can_parse_uint_builtin(self): + for uint_tuple in UINT_TYPE_TUPLES: + # Act + Assert: + self._assert_parse( + uint_tuple[0], + {'type': 'byte', 'signedness': 'unsigned', 'size': uint_tuple[1]}) + + def test_can_parse_binary_fixed_builtin(self): + for size_tuple in [('32', 32), ('0x20', 32), ('25', 25)]: + # Act + Assert: + self._assert_parse( + 'binary_fixed({0})'.format(size_tuple[0]), + {'type': 'byte', 'signedness': 'unsigned', 'size': size_tuple[1]}) + + def test_cannot_parse_invalid_builtin(self): + # Arrange: + for type_name in INVALID_BUILTIN_TYPE_NAMES: + # Act + Assert: + with self.assertRaises(CatsParseException): + parse_builtin(type_name) + +# endregion diff --git a/catbuffer-schemas/README.md b/catbuffer-schemas/README.md new file mode 100644 index 00000000..f2a160f5 --- /dev/null +++ b/catbuffer-schemas/README.md @@ -0,0 +1,3 @@ +# catbuffer-schemas + +This repository contains Symbol entity schemas in catbuffer format. diff --git a/catbuffer-schemas/schemas/account_link/account_key_link.cats b/catbuffer-schemas/schemas/account_link/account_key_link.cats new file mode 100644 index 00000000..d58cdb3d --- /dev/null +++ b/catbuffer-schemas/schemas/account_link/account_key_link.cats @@ -0,0 +1,25 @@ +import "transaction.cats" + +# binary layout for an account key link transaction +struct AccountKeyLinkTransactionBody + # linked public key + linkedPublicKey = Key + + # link action + linkAction = LinkAction + +# binary layout for a non-embedded account key link transaction +struct AccountKeyLinkTransaction + const uint8 version = 1 + const EntityType entityType = 0x414C + + inline Transaction + inline AccountKeyLinkTransactionBody + +# binary layout for an embedded account key link transaction +struct EmbeddedAccountKeyLinkTransaction + const uint8 version = 1 + const EntityType entityType = 0x414C + + inline EmbeddedTransaction + inline AccountKeyLinkTransactionBody diff --git a/catbuffer-schemas/schemas/account_link/node_key_link.cats b/catbuffer-schemas/schemas/account_link/node_key_link.cats new file mode 100644 index 00000000..a922c771 --- /dev/null +++ b/catbuffer-schemas/schemas/account_link/node_key_link.cats @@ -0,0 +1,25 @@ +import "transaction.cats" + +# binary layout for a node key link transaction +struct NodeKeyLinkTransactionBody + # linked public key + linkedPublicKey = Key + + # link action + linkAction = LinkAction + +# binary layout for a non-embedded node key link transaction +struct NodeKeyLinkTransaction + const uint8 version = 1 + const EntityType entityType = 0x424C + + inline Transaction + inline NodeKeyLinkTransactionBody + +# binary layout for an embedded node key link transaction +struct EmbeddedNodeKeyLinkTransaction + const uint8 version = 1 + const EntityType entityType = 0x424C + + inline EmbeddedTransaction + inline NodeKeyLinkTransactionBody diff --git a/catbuffer-schemas/schemas/aggregate/aggregate.cats b/catbuffer-schemas/schemas/aggregate/aggregate.cats new file mode 100644 index 00000000..8c44c6af --- /dev/null +++ b/catbuffer-schemas/schemas/aggregate/aggregate.cats @@ -0,0 +1,52 @@ +import "aggregate/cosignature.cats" +import "transaction.cats" + +# binary layout for an aggregate transaction +struct AggregateTransactionBody + # aggregate hash of an aggregate's transactions + transactionsHash = Hash256 + + # transaction payload size in bytes + # \note this is the total number of bytes occupied by all sub-transactions + payloadSize = uint32 + + # reserved padding to align end of AggregateTransactionHeader on 8-byte boundary + aggregateTransactionHeader_Reserved1 = uint32 + + # sub-transaction data (transactions are variable sized and payload size is in bytes) + transactions = array(EmbeddedTransaction, size=payloadSize) + + # cosignatures data (fills remaining body space after transactions) + cosignatures = array(Cosignature, __FILL__) + +# binary layout for an aggregate complete transaction +struct AggregateCompleteTransaction + const uint8 version = 2 + const EntityType entityType = 0x4141 + + inline Transaction + inline AggregateTransactionBody + +# binary layout for an aggregate bonded transaction +struct AggregateBondedTransaction + const uint8 version = 2 + const EntityType entityType = 0x4241 + + inline Transaction + inline AggregateTransactionBody + +# binary layout for an aggregate complete transaction +struct AggregateCompleteTransactionV1 + const uint8 version = 1 + const EntityType entityType = 0x4141 + + inline Transaction + inline AggregateTransactionBody + +# binary layout for an aggregate bonded transaction +struct AggregateBondedTransactionV1 + const uint8 version = 1 + const EntityType entityType = 0x4241 + + inline Transaction + inline AggregateTransactionBody diff --git a/catbuffer-schemas/schemas/aggregate/cosignature.cats b/catbuffer-schemas/schemas/aggregate/cosignature.cats new file mode 100644 index 00000000..311b830b --- /dev/null +++ b/catbuffer-schemas/schemas/aggregate/cosignature.cats @@ -0,0 +1,19 @@ +import "types.cats" + +# cosignature attached to an aggregate transaction +struct Cosignature + # version + version = uint64 + + # cosigner public key + signerPublicKey = Key + + # cosigner signature + signature = Signature + +# cosignature detached from an aggregate transaction +struct DetachedCosignature + inline Cosignature + + # hash of the aggregate transaction that is signed by this cosignature + parentHash = Hash256 diff --git a/catbuffer-schemas/schemas/all.cats b/catbuffer-schemas/schemas/all.cats new file mode 100644 index 00000000..a1046f48 --- /dev/null +++ b/catbuffer-schemas/schemas/all.cats @@ -0,0 +1,39 @@ +import "block.cats" +import "receipts.cats" +import "namespace/namespace_receipts.cats" +import "resolution_statement/resolution_statements.cats" +import "state/account_state.cats" +import "state/hash_lock.cats" +import "state/lock_info.cats" +import "state/metadata_entry.cats" +import "state/mosaic_entry.cats" +import "state/multisig_entry.cats" +import "state/namespace_history.cats" +import "state/restriction_account.cats" +import "state/restriction_mosaic_entry.cats" +import "state/secret_lock.cats" +import "account_link/account_key_link.cats" +import "account_link/node_key_link.cats" +import "aggregate/aggregate.cats" +import "coresystem/voting_key_link.cats" +import "coresystem/vrf_key_link.cats" +import "lock_hash/hash_lock.cats" +import "lock_secret/secret_lock.cats" +import "lock_secret/secret_proof.cats" +import "metadata/account_metadata.cats" +import "metadata/mosaic_metadata.cats" +import "metadata/namespace_metadata.cats" +import "mosaic/mosaic_definition.cats" +import "mosaic/mosaic_supply_change.cats" +import "multisig/multisig_account_modification.cats" +import "namespace/address_alias.cats" +import "namespace/mosaic_alias.cats" +import "namespace/namespace_registration.cats" +import "restriction_account/account_address_restriction.cats" +import "restriction_account/account_mosaic_restriction.cats" +import "restriction_account/account_operation_restriction.cats" +import "restriction_mosaic/mosaic_address_restriction.cats" +import "restriction_mosaic/mosaic_global_restriction.cats" +import "transfer/transfer.cats" +import "finalization/finalization_round.cats" +import "finalization/finalized_block_header.cats" diff --git a/catbuffer-schemas/schemas/block.cats b/catbuffer-schemas/schemas/block.cats new file mode 100644 index 00000000..2ba01757 --- /dev/null +++ b/catbuffer-schemas/schemas/block.cats @@ -0,0 +1,92 @@ +import "entity.cats" + +using ProofGamma = binary_fixed(32) +using ProofVerificationHash = binary_fixed(16) +using ProofScalar = binary_fixed(32) + +# Verfiable random function proof +struct VrfProof + # gamma + gamma = ProofGamma + + # verification hash + verificationHash = ProofVerificationHash + + # scalar + scalar = ProofScalar + +# binary layout for a block header +struct BlockHeader + inline SizePrefixedEntity + inline VerifiableEntity + inline EntityBody + + # block height + height = Height + + # number of milliseconds elapsed since creation of nemesis block + timestamp = Timestamp + + # block difficulty + difficulty = Difficulty + + # generation hash proof + generationHashProof = VrfProof + + # previous block hash + previousBlockHash = Hash256 + + # hash of the transactions in this block + transactionsHash = Hash256 + + # hash of the receipts generated by this block + receiptsHash = Hash256 + + # hash of the global chain state at this block + stateHash = Hash256 + + # beneficiary address designated by harvester + beneficiaryAddress = Address + + # fee multiplier applied to block transactions + feeMultiplier = BlockFeeMultiplier + +# binary layout for an importance block footer +struct ImportanceBlockFooter + # number of voting eligible accounts + votingEligibleAccountsCount = uint32 + + # number of harvesting eligible accounts + harvestingEligibleAccountsCount = uint64 + + # total balance eligible for voting + totalVotingBalance = Amount + + # previous importance block hash + previousImportanceBlockHash = Hash256 + +# binary layout for a nemesis block header +struct NemesisBlockHeader + const uint8 version = 1 + const EntityType entityType = 0x8043 + + inline BlockHeader + inline ImportanceBlockFooter + +# binary layout for a normal block header +struct NormalBlockHeader + const uint8 version = 1 + const EntityType entityType = 0x8143 + + inline BlockHeader + + # reserved padding to align end of BlockHeader on 8-byte boundary + blockHeader_Reserved1 = uint32 + +# binary layout for an importance block header +struct ImportanceBlockHeader + const uint8 version = 1 + const EntityType entityType = 0x8243 + + inline BlockHeader + inline ImportanceBlockFooter diff --git a/catbuffer-schemas/schemas/coresystem/voting_key_link.cats b/catbuffer-schemas/schemas/coresystem/voting_key_link.cats new file mode 100644 index 00000000..7e084c82 --- /dev/null +++ b/catbuffer-schemas/schemas/coresystem/voting_key_link.cats @@ -0,0 +1,31 @@ +import "transaction.cats" + +# binary layout for a voting key link transaction +struct VotingKeyLinkTransactionBody + # linked public key + linkedPublicKey = VotingKey + + # start finalization epoch + startEpoch = FinalizationEpoch + + # end finalization epoch + endEpoch = FinalizationEpoch + + # link action + linkAction = LinkAction + +# binary layout for a non-embedded voting key link transaction +struct VotingKeyLinkTransaction + const uint8 version = 1 + const EntityType entityType = 0x4143 + + inline Transaction + inline VotingKeyLinkTransactionBody + +# binary layout for an embedded voting key link transaction +struct EmbeddedVotingKeyLinkTransaction + const uint8 version = 1 + const EntityType entityType = 0x4143 + + inline EmbeddedTransaction + inline VotingKeyLinkTransactionBody diff --git a/catbuffer-schemas/schemas/coresystem/vrf_key_link.cats b/catbuffer-schemas/schemas/coresystem/vrf_key_link.cats new file mode 100644 index 00000000..a162f0fc --- /dev/null +++ b/catbuffer-schemas/schemas/coresystem/vrf_key_link.cats @@ -0,0 +1,25 @@ +import "transaction.cats" + +# binary layout for a vrf key link transaction +struct VrfKeyLinkTransactionBody + # linked public key + linkedPublicKey = Key + + # link action + linkAction = LinkAction + +# binary layout for a non-embedded vrf key link transaction +struct VrfKeyLinkTransaction + const uint8 version = 1 + const EntityType entityType = 0x4243 + + inline Transaction + inline VrfKeyLinkTransactionBody + +# binary layout for an embedded vrf key link transaction +struct EmbeddedVrfKeyLinkTransaction + const uint8 version = 1 + const EntityType entityType = 0x4243 + + inline EmbeddedTransaction + inline VrfKeyLinkTransactionBody diff --git a/catbuffer-schemas/schemas/entity.cats b/catbuffer-schemas/schemas/entity.cats new file mode 100644 index 00000000..5f337d3f --- /dev/null +++ b/catbuffer-schemas/schemas/entity.cats @@ -0,0 +1,56 @@ +import "types.cats" + +# enumeration of entity types +enum EntityType : uint16 + # reserved entity type + reserved = 0x0000 + +# enumeration of network types +enum NetworkType : uint8 + # mijin network + mijin = 0x60 + + # public network + public = 0x68 + + # private network + private = 0x78 + + # mijin test network + mijinTest = 0x90 + + # public test network + publicTest = 0x98 + + # private test network + privateTest = 0xA8 + +# binary layout for a size-prefixed entity +struct SizePrefixedEntity + # entity size + size = uint32 + +# binary layout for a verifiable entity +struct VerifiableEntity + # reserved padding to align Signature on 8-byte boundary + verifiableEntityHeader_Reserved1 = uint32 + + # entity signature + signature = Signature + +# binary layout for a blockchain entity (block or transaction) +struct EntityBody + # entity signer's public key + signerPublicKey = Key + + # reserved padding to align end of EntityBody on 8-byte boundary + entityBody_Reserved1 = uint32 + + # entity version + version = uint8 + + # entity network + network = NetworkType + + # entity type + type = EntityType diff --git a/catbuffer-schemas/schemas/finalization/finalization_round.cats b/catbuffer-schemas/schemas/finalization/finalization_round.cats new file mode 100644 index 00000000..3b6d633e --- /dev/null +++ b/catbuffer-schemas/schemas/finalization/finalization_round.cats @@ -0,0 +1,9 @@ +import "types.cats" + +# binary layout for finalization round +struct FinalizationRound + # finalization epoch + epoch = FinalizationEpoch + + # finalization point + point = FinalizationPoint diff --git a/catbuffer-schemas/schemas/finalization/finalized_block_header.cats b/catbuffer-schemas/schemas/finalization/finalized_block_header.cats new file mode 100644 index 00000000..94f28dd6 --- /dev/null +++ b/catbuffer-schemas/schemas/finalization/finalized_block_header.cats @@ -0,0 +1,13 @@ +import "finalization/finalization_round.cats" + +# binary layout for finalized block header +struct FinalizedBlockHeader + # finalization round + round = FinalizationRound + + # finalization height + height = Height + + # finalization hash + hash = Hash256 + diff --git a/catbuffer-schemas/schemas/lock_hash/hash_lock.cats b/catbuffer-schemas/schemas/lock_hash/hash_lock.cats new file mode 100644 index 00000000..048a3f74 --- /dev/null +++ b/catbuffer-schemas/schemas/lock_hash/hash_lock.cats @@ -0,0 +1,28 @@ +import "transaction.cats" + +# binary layout for a hash lock transaction +struct HashLockTransactionBody + # lock mosaic + mosaic = UnresolvedMosaic + + # number of blocks for which a lock should be valid + duration = BlockDuration + + # lock hash + hash = Hash256 + +# binary layout for a non-embedded hash lock transaction +struct HashLockTransaction + const uint8 version = 1 + const EntityType entityType = 0x4148 + + inline Transaction + inline HashLockTransactionBody + +# binary layout for an embedded hash lock transaction +struct EmbeddedHashLockTransaction + const uint8 version = 1 + const EntityType entityType = 0x4148 + + inline EmbeddedTransaction + inline HashLockTransactionBody diff --git a/catbuffer-schemas/schemas/lock_secret/lock_secret_types.cats b/catbuffer-schemas/schemas/lock_secret/lock_secret_types.cats new file mode 100644 index 00000000..d65e27e5 --- /dev/null +++ b/catbuffer-schemas/schemas/lock_secret/lock_secret_types.cats @@ -0,0 +1,10 @@ +# enumeration of lock hash algorithms +enum LockHashAlgorithm : uint8 + # input is hashed using sha-3 256 + sha3_256 = 0x00 + + # input is hashed twice: first with sha-256 and then with ripemd-160 (bitcoin's OP_HASH160) + hash_160 = 0x01 + + # input is hashed twice with sha-256 (bitcoin's OP_HASH256) + hash_256 = 0x02 diff --git a/catbuffer-schemas/schemas/lock_secret/secret_lock.cats b/catbuffer-schemas/schemas/lock_secret/secret_lock.cats new file mode 100644 index 00000000..896e895c --- /dev/null +++ b/catbuffer-schemas/schemas/lock_secret/secret_lock.cats @@ -0,0 +1,35 @@ +import "lock_secret/lock_secret_types.cats" +import "transaction.cats" + +# binary layout for a secret lock transaction +struct SecretLockTransactionBody + # locked mosaic recipient address + recipientAddress = UnresolvedAddress + + # secret + secret = Hash256 + + # locked mosaic + mosaic = UnresolvedMosaic + + # number of blocks for which a lock should be valid + duration = BlockDuration + + # hash algorithm + hashAlgorithm = LockHashAlgorithm + +# binary layout for a non-embedded secret lock transaction +struct SecretLockTransaction + const uint8 version = 1 + const EntityType entityType = 0x4152 + + inline Transaction + inline SecretLockTransactionBody + +# binary layout for an embedded secret lock transaction +struct EmbeddedSecretLockTransaction + const uint8 version = 1 + const EntityType entityType = 0x4152 + + inline EmbeddedTransaction + inline SecretLockTransactionBody diff --git a/catbuffer-schemas/schemas/lock_secret/secret_proof.cats b/catbuffer-schemas/schemas/lock_secret/secret_proof.cats new file mode 100644 index 00000000..48dd6dd1 --- /dev/null +++ b/catbuffer-schemas/schemas/lock_secret/secret_proof.cats @@ -0,0 +1,35 @@ +import "lock_secret/lock_secret_types.cats" +import "transaction.cats" + +# binary layout for a secret proof transaction +struct SecretProofTransactionBody + # locked mosaic recipient address + recipientAddress = UnresolvedAddress + + # secret + secret = Hash256 + + # proof size in bytes + proofSize = uint16 + + # hash algorithm + hashAlgorithm = LockHashAlgorithm + + # proof data + proof = array(byte, proofSize) + +# binary layout for a non-embedded secret proof transaction +struct SecretProofTransaction + const uint8 version = 1 + const EntityType entityType = 0x4252 + + inline Transaction + inline SecretProofTransactionBody + +# binary layout for an embedded secret proof transaction +struct EmbeddedSecretProofTransaction + const uint8 version = 1 + const EntityType entityType = 0x4252 + + inline EmbeddedTransaction + inline SecretProofTransactionBody diff --git a/catbuffer-schemas/schemas/metadata/account_metadata.cats b/catbuffer-schemas/schemas/metadata/account_metadata.cats new file mode 100644 index 00000000..8ed8b7e1 --- /dev/null +++ b/catbuffer-schemas/schemas/metadata/account_metadata.cats @@ -0,0 +1,36 @@ +import "transaction.cats" + +# binary layout for an account metadata transaction +struct AccountMetadataTransactionBody + # metadata target address + targetAddress = UnresolvedAddress + + # metadata key scoped to source, target and type + scopedMetadataKey = uint64 + + # change in value size in bytes + valueSizeDelta = int16 + + # value size in bytes + valueSize = uint16 + + # difference between existing value and new value + # \note when there is no existing value, new value is same this value + # \note when there is an existing value, new value is calculated as xor(previous-value, value) + value = array(byte, valueSize) + +# binary layout for a non-embedded account metadata transaction +struct AccountMetadataTransaction + const uint8 version = 1 + const EntityType entityType = 0x4144 + + inline Transaction + inline AccountMetadataTransactionBody + +# binary layout for an embedded account metadata transaction +struct EmbeddedAccountMetadataTransaction + const uint8 version = 1 + const EntityType entityType = 0x4144 + + inline EmbeddedTransaction + inline AccountMetadataTransactionBody diff --git a/catbuffer-schemas/schemas/metadata/mosaic_metadata.cats b/catbuffer-schemas/schemas/metadata/mosaic_metadata.cats new file mode 100644 index 00000000..8b0481a3 --- /dev/null +++ b/catbuffer-schemas/schemas/metadata/mosaic_metadata.cats @@ -0,0 +1,39 @@ +import "transaction.cats" + +# binary layout for a mosaic metadata transaction +struct MosaicMetadataTransactionBody + # metadata target address + targetAddress = UnresolvedAddress + + # metadata key scoped to source, target and type + scopedMetadataKey = uint64 + + # target mosaic identifier + targetMosaicId = UnresolvedMosaicId + + # change in value size in bytes + valueSizeDelta = int16 + + # value size in bytes + valueSize = uint16 + + # difference between existing value and new value + # \note when there is no existing value, new value is same this value + # \note when there is an existing value, new value is calculated as xor(previous-value, value) + value = array(byte, valueSize) + +# binary layout for a non-embedded mosaic metadata transaction +struct MosaicMetadataTransaction + const uint8 version = 1 + const EntityType entityType = 0x4244 + + inline Transaction + inline MosaicMetadataTransactionBody + +# binary layout for an embedded mosaic metadata transaction +struct EmbeddedMosaicMetadataTransaction + const uint8 version = 1 + const EntityType entityType = 0x4244 + + inline EmbeddedTransaction + inline MosaicMetadataTransactionBody diff --git a/catbuffer-schemas/schemas/metadata/namespace_metadata.cats b/catbuffer-schemas/schemas/metadata/namespace_metadata.cats new file mode 100644 index 00000000..01f0ac0e --- /dev/null +++ b/catbuffer-schemas/schemas/metadata/namespace_metadata.cats @@ -0,0 +1,40 @@ +import "namespace/namespace_types.cats" +import "transaction.cats" + +# binary layout for a namespace metadata transaction +struct NamespaceMetadataTransactionBody + # metadata target address + targetAddress = UnresolvedAddress + + # metadata key scoped to source, target and type + scopedMetadataKey = uint64 + + # target namespace identifier + targetNamespaceId = NamespaceId + + # change in value size in bytes + valueSizeDelta = int16 + + # value size in bytes + valueSize = uint16 + + # difference between existing value and new value + # \note when there is no existing value, new value is same this value + # \note when there is an existing value, new value is calculated as xor(previous-value, value) + value = array(byte, valueSize) + +# binary layout for a non-embedded namespace metadata transaction +struct NamespaceMetadataTransaction + const uint8 version = 1 + const EntityType entityType = 0x4344 + + inline Transaction + inline NamespaceMetadataTransactionBody + +# binary layout for an embedded namespace metadata transaction +struct EmbeddedNamespaceMetadataTransaction + const uint8 version = 1 + const EntityType entityType = 0x4344 + + inline EmbeddedTransaction + inline NamespaceMetadataTransactionBody diff --git a/catbuffer-schemas/schemas/mosaic/mosaic_definition.cats b/catbuffer-schemas/schemas/mosaic/mosaic_definition.cats new file mode 100644 index 00000000..33e0fcdf --- /dev/null +++ b/catbuffer-schemas/schemas/mosaic/mosaic_definition.cats @@ -0,0 +1,36 @@ +import "mosaic/mosaic_types.cats" +import "transaction.cats" + +# binary layout for a mosaic definition transaction +struct MosaicDefinitionTransactionBody + # mosaic identifier + id = MosaicId + + # mosaic duration + duration = BlockDuration + + # mosaic nonce + nonce = MosaicNonce + + # mosaic flags + flags = MosaicFlags + + # mosaic divisibility + divisibility = uint8 + +# binary layout for a non-embedded mosaic definition transaction +struct MosaicDefinitionTransaction + const uint8 version = 1 + const EntityType entityType = 0x414D + + inline Transaction + inline MosaicDefinitionTransactionBody + +# binary layout for an embedded mosaic definition transaction +struct EmbeddedMosaicDefinitionTransaction + const uint8 version = 1 + const EntityType entityType = 0x414D + + inline EmbeddedTransaction + inline MosaicDefinitionTransactionBody + diff --git a/catbuffer-schemas/schemas/mosaic/mosaic_supply_change.cats b/catbuffer-schemas/schemas/mosaic/mosaic_supply_change.cats new file mode 100644 index 00000000..a5641316 --- /dev/null +++ b/catbuffer-schemas/schemas/mosaic/mosaic_supply_change.cats @@ -0,0 +1,29 @@ +import "mosaic/mosaic_types.cats" +import "transaction.cats" + +# binary layout for a mosaic supply change transaction +struct MosaicSupplyChangeTransactionBody + # affected mosaic identifier + mosaicId = UnresolvedMosaicId + + # change amount + delta = Amount + + # supply change action + action = MosaicSupplyChangeAction + +# binary layout for a non-embedded mosaic supply change transaction +struct MosaicSupplyChangeTransaction + const uint8 version = 1 + const EntityType entityType = 0x424D + + inline Transaction + inline MosaicSupplyChangeTransactionBody + +# binary layout for an embedded mosaic supply change transaction +struct EmbeddedMosaicSupplyChangeTransaction + const uint8 version = 1 + const EntityType entityType = 0x424D + + inline EmbeddedTransaction + inline MosaicSupplyChangeTransactionBody diff --git a/catbuffer-schemas/schemas/mosaic/mosaic_types.cats b/catbuffer-schemas/schemas/mosaic/mosaic_types.cats new file mode 100644 index 00000000..ffedb41c --- /dev/null +++ b/catbuffer-schemas/schemas/mosaic/mosaic_types.cats @@ -0,0 +1,24 @@ +using MosaicNonce = uint32 + +# enumeration of mosaic property flags +enum MosaicFlags : uint8 + # no flags present + none = 0x00 + + # mosaic supports supply changes even when mosaic owner owns partial supply + supplyMutable = 0x01 + + # mosaic supports transfers between arbitrary accounts + # \note when not set, mosaic can only be transferred to and from mosaic owner + transferable = 0x02 + + # mosaic supports custom restrictions configured by mosaic owner + restrictable = 0x04 + +# enumeration of mosaic supply change actions +enum MosaicSupplyChangeAction : uint8 + # decreases the supply + decrease = 0x00 + + # increases the supply + increase = 0x01 diff --git a/catbuffer-schemas/schemas/multisig/multisig_account_modification.cats b/catbuffer-schemas/schemas/multisig/multisig_account_modification.cats new file mode 100644 index 00000000..d0dcb30d --- /dev/null +++ b/catbuffer-schemas/schemas/multisig/multisig_account_modification.cats @@ -0,0 +1,40 @@ +import "transaction.cats" + +# binary layout for a multisig account modification transaction +struct MultisigAccountModificationTransactionBody + # relative change of the minimal number of cosignatories required when removing an account + minRemovalDelta = int8 + + # relative change of the minimal number of cosignatories required when approving a transaction + minApprovalDelta = int8 + + # number of cosignatory address additions + addressAdditionsCount = uint8 + + # number of cosignatory address deletions + addressDeletionsCount = uint8 + + # reserved padding to align addressAdditions on 8-byte boundary + multisigAccountModificationTransactionBody_Reserved1 = uint32 + + # cosignatory address additions + addressAdditions = array(UnresolvedAddress, addressAdditionsCount) + + # cosignatory address deletions + addressDeletions = array(UnresolvedAddress, addressDeletionsCount) + +# binary layout for a non-embedded multisig account modification transaction +struct MultisigAccountModificationTransaction + const uint8 version = 1 + const EntityType entityType = 0x4155 + + inline Transaction + inline MultisigAccountModificationTransactionBody + +# binary layout for an embedded multisig account modification transaction +struct EmbeddedMultisigAccountModificationTransaction + const uint8 version = 1 + const EntityType entityType = 0x4155 + + inline EmbeddedTransaction + inline MultisigAccountModificationTransactionBody diff --git a/catbuffer-schemas/schemas/namespace/address_alias.cats b/catbuffer-schemas/schemas/namespace/address_alias.cats new file mode 100644 index 00000000..27ec182e --- /dev/null +++ b/catbuffer-schemas/schemas/namespace/address_alias.cats @@ -0,0 +1,29 @@ +import "namespace/namespace_types.cats" +import "transaction.cats" + +# binary layout for an address alias transaction +struct AddressAliasTransactionBody + # identifier of the namespace that will become an alias + namespaceId = NamespaceId + + # aliased address + address = Address + + # alias action + aliasAction = AliasAction + +# binary layout for a non-embedded address alias transaction +struct AddressAliasTransaction + const uint8 version = 1 + const EntityType entityType = 0x424E + + inline Transaction + inline AddressAliasTransactionBody + +# binary layout for an embedded address alias transaction +struct EmbeddedAddressAliasTransaction + const uint8 version = 1 + const EntityType entityType = 0x424E + + inline EmbeddedTransaction + inline AddressAliasTransactionBody diff --git a/catbuffer-schemas/schemas/namespace/mosaic_alias.cats b/catbuffer-schemas/schemas/namespace/mosaic_alias.cats new file mode 100644 index 00000000..aec0b970 --- /dev/null +++ b/catbuffer-schemas/schemas/namespace/mosaic_alias.cats @@ -0,0 +1,29 @@ +import "namespace/namespace_types.cats" +import "transaction.cats" + +# binary layout for an mosaic alias transaction +struct MosaicAliasTransactionBody + # identifier of the namespace that will become an alias + namespaceId = NamespaceId + + # aliased mosaic identifier + mosaicId = MosaicId + + # alias action + aliasAction = AliasAction + +# binary layout for a non-embedded mosaic alias transaction +struct MosaicAliasTransaction + const uint8 version = 1 + const EntityType entityType = 0x434E + + inline Transaction + inline MosaicAliasTransactionBody + +# binary layout for an embedded mosaic alias transaction +struct EmbeddedMosaicAliasTransaction + const uint8 version = 1 + const EntityType entityType = 0x434E + + inline EmbeddedTransaction + inline MosaicAliasTransactionBody diff --git a/catbuffer-schemas/schemas/namespace/namespace_receipts.cats b/catbuffer-schemas/schemas/namespace/namespace_receipts.cats new file mode 100644 index 00000000..e5623773 --- /dev/null +++ b/catbuffer-schemas/schemas/namespace/namespace_receipts.cats @@ -0,0 +1,9 @@ +import "namespace/namespace_types.cats" +import "receipts.cats" + +# binary layout for a namespace expiry receipt +struct NamespaceExpiryReceipt + inline Receipt + + # expiring namespace id + artifactId = NamespaceId diff --git a/catbuffer-schemas/schemas/namespace/namespace_registration.cats b/catbuffer-schemas/schemas/namespace/namespace_registration.cats new file mode 100644 index 00000000..e658e0e9 --- /dev/null +++ b/catbuffer-schemas/schemas/namespace/namespace_registration.cats @@ -0,0 +1,38 @@ +import "namespace/namespace_types.cats" +import "transaction.cats" + +# binary layout for a namespace registration transaction +struct NamespaceRegistrationTransactionBody + # namespace duration + duration = BlockDuration if registrationType equals root + + # parent namespace identifier + parentId = NamespaceId if registrationType equals child + + # namespace identifier + id = NamespaceId + + # namespace registration type + registrationType = NamespaceRegistrationType + + # namespace name size + nameSize = uint8 + + # namespace name + name = array(byte, nameSize) + +# binary layout for a non-embedded namespace registration transaction +struct NamespaceRegistrationTransaction + const uint8 version = 1 + const EntityType entityType = 0x414E + + inline Transaction + inline NamespaceRegistrationTransactionBody + +# binary layout for an embedded namespace registration transaction +struct EmbeddedNamespaceRegistrationTransaction + const uint8 version = 1 + const EntityType entityType = 0x414E + + inline EmbeddedTransaction + inline NamespaceRegistrationTransactionBody diff --git a/catbuffer-schemas/schemas/namespace/namespace_types.cats b/catbuffer-schemas/schemas/namespace/namespace_types.cats new file mode 100644 index 00000000..975dd722 --- /dev/null +++ b/catbuffer-schemas/schemas/namespace/namespace_types.cats @@ -0,0 +1,17 @@ +using NamespaceId = uint64 + +# enumeration of namespace registration types +enum NamespaceRegistrationType : uint8 + # root namespace + root = 0x00 + + # child namespace + child = 0x01 + +# enumeration of alias actions +enum AliasAction : uint8 + # unlink alias + unlink = 0x00 + + # link alias + link = 0x01 diff --git a/catbuffer-schemas/schemas/receipts.cats b/catbuffer-schemas/schemas/receipts.cats new file mode 100644 index 00000000..b8d981f2 --- /dev/null +++ b/catbuffer-schemas/schemas/receipts.cats @@ -0,0 +1,101 @@ +import "entity.cats" + +# enumeration of receipt types +enum ReceiptType : uint16 + # reserved receipt type + reserved = 0x0000 + + # mosaic rental fee receipt type + mosaic_rental_fee = 0x124D + + # namespace rental fee receipt type + namespace_rental_fee = 0x134E + + # harvest fee receipt type + harvest_fee = 0x2143 + + # lock hash completed receipt type + lockHash_completed = 0x2248 + + # lock hash expired receipt type + lockHash_expired = 0x2348 + + # lock secret completed receipt type + lockSecret_completed = 0x2252 + + # lock secret expired receipt type + lockSecret_expired = 0x2352 + + # lock hash created receipt type + lockHash_created = 0x3148 + + # lock secret created receipt type + lockSecret_created = 0x3152 + + # mosaic expired receipt type + mosaic_expired = 0x414D + + # namespace expired receipt type + namespace_expired = 0x414E + + # namespace deleted receipt type + namespace_deleted = 0x424E + + # inflation receipt type + inflation = 0x5143 + + # transaction group receipt type + transaction_group = 0xE143 + + # address alias resolution receipt type + address_alias_resolution = 0xF143 + + # mosaic alias resolution receipt type + mosaic_alias_resolution = 0xF243 + +# binary layout for a receipt entity +struct Receipt + inline SizePrefixedEntity + + # receipt version + version = uint16 + + # receipt type + type = ReceiptType + +# binary layout for a balance transfer receipt +struct BalanceTransferReceipt + inline Receipt + + # mosaic + mosaic = Mosaic + + # mosaic sender address + senderAddress = Address + + # mosaic recipient address + recipientAddress = Address + +# binary layout for a balance change receipt +struct BalanceChangeReceipt + inline Receipt + + # mosaic + mosaic = Mosaic + + # account address + targetAddress = Address + +# binary layout for an inflation receipt +struct InflationReceipt + inline Receipt + + # mosaic + mosaic = Mosaic + +# binary layout for a mosaic expiry receipt +struct MosaicExpiryReceipt + inline Receipt + + # expiring mosaic id + artifactId = MosaicId diff --git a/catbuffer-schemas/schemas/resolution_statement/resolution_statement_types.cats b/catbuffer-schemas/schemas/resolution_statement/resolution_statement_types.cats new file mode 100644 index 00000000..ea0a9a56 --- /dev/null +++ b/catbuffer-schemas/schemas/resolution_statement/resolution_statement_types.cats @@ -0,0 +1,23 @@ +# binary layout for receipt source +struct ReceiptSource + # transaction primary source (e.g. index within block) + primaryId = uint32 + + # transaction secondary source (e.g. index within aggregate) + secondaryId = uint32 + +# binary layout for address resolution entry +struct AddressResolutionEntry + # source of resolution within block + source = ReceiptSource + + # resolved value + resolved = Address + +# binary layout for mosaic resolution entry +struct MosaicResolutionEntry + # source of resolution within block + source = ReceiptSource + + # resolved value + resolved = MosaicId diff --git a/catbuffer-schemas/schemas/resolution_statement/resolution_statements.cats b/catbuffer-schemas/schemas/resolution_statement/resolution_statements.cats new file mode 100644 index 00000000..f684277e --- /dev/null +++ b/catbuffer-schemas/schemas/resolution_statement/resolution_statements.cats @@ -0,0 +1,22 @@ +import "receipts.cats" +import "resolution_statement/resolution_statement_types.cats" + +# binary layout for a mosaic resolution statement +struct MosaicResolutionStatement + inline Receipt + + # unresolved mosaic + unresolved = UnresolvedMosaicId + + # resolution entries + resolutionEntries = array(MosaicResolutionEntry, __FILL__) + +# binary layout for an address resolution statement +struct AddressResolutionStatement + inline Receipt + + # unresolved address + unresolved = UnresolvedAddress + + # resolution entries + resolutionEntries = array(AddressResolutionEntry, __FILL__) diff --git a/catbuffer-schemas/schemas/restriction_account/account_address_restriction.cats b/catbuffer-schemas/schemas/restriction_account/account_address_restriction.cats new file mode 100644 index 00000000..4a5b4c1e --- /dev/null +++ b/catbuffer-schemas/schemas/restriction_account/account_address_restriction.cats @@ -0,0 +1,38 @@ +import "restriction_account/restriction_account_types.cats" +import "transaction.cats" + +# binary layout for an account address restriction transaction +struct AccountAddressRestrictionTransactionBody + # account restriction flags + restrictionFlags = AccountRestrictionFlags + + # number of account restriction additions + restrictionAdditionsCount = uint8 + + # number of account restriction deletions + restrictionDeletionsCount = uint8 + + # reserved padding to align restrictionAdditions on 8-byte boundary + accountRestrictionTransactionBody_Reserved1 = uint32 + + # account restriction additions + restrictionAdditions = array(UnresolvedAddress, restrictionAdditionsCount) + + # account restriction deletions + restrictionDeletions = array(UnresolvedAddress, restrictionDeletionsCount) + +# binary layout for a non-embedded account address restriction transaction +struct AccountAddressRestrictionTransaction + const uint8 version = 1 + const EntityType entityType = 0x4150 + + inline Transaction + inline AccountAddressRestrictionTransactionBody + +# binary layout for an embedded account address restriction transaction +struct EmbeddedAccountAddressRestrictionTransaction + const uint8 version = 1 + const EntityType entityType = 0x4150 + + inline EmbeddedTransaction + inline AccountAddressRestrictionTransactionBody diff --git a/catbuffer-schemas/schemas/restriction_account/account_mosaic_restriction.cats b/catbuffer-schemas/schemas/restriction_account/account_mosaic_restriction.cats new file mode 100644 index 00000000..fc71e08b --- /dev/null +++ b/catbuffer-schemas/schemas/restriction_account/account_mosaic_restriction.cats @@ -0,0 +1,38 @@ +import "restriction_account/restriction_account_types.cats" +import "transaction.cats" + +# binary layout for an account mosaic restriction transaction +struct AccountMosaicRestrictionTransactionBody + # account restriction flags + restrictionFlags = AccountRestrictionFlags + + # number of account restriction additions + restrictionAdditionsCount = uint8 + + # number of account restriction deletions + restrictionDeletionsCount = uint8 + + # reserved padding to align restrictionAdditions on 8-byte boundary + accountRestrictionTransactionBody_Reserved1 = uint32 + + # account restriction additions + restrictionAdditions = array(UnresolvedMosaicId, restrictionAdditionsCount) + + # account restriction deletions + restrictionDeletions = array(UnresolvedMosaicId, restrictionDeletionsCount) + +# binary layout for a non-embedded account mosaic restriction transaction +struct AccountMosaicRestrictionTransaction + const uint8 version = 1 + const EntityType entityType = 0x4250 + + inline Transaction + inline AccountMosaicRestrictionTransactionBody + +# binary layout for an embedded account mosaic restriction transaction +struct EmbeddedAccountMosaicRestrictionTransaction + const uint8 version = 1 + const EntityType entityType = 0x4250 + + inline EmbeddedTransaction + inline AccountMosaicRestrictionTransactionBody diff --git a/catbuffer-schemas/schemas/restriction_account/account_operation_restriction.cats b/catbuffer-schemas/schemas/restriction_account/account_operation_restriction.cats new file mode 100644 index 00000000..637a0d6e --- /dev/null +++ b/catbuffer-schemas/schemas/restriction_account/account_operation_restriction.cats @@ -0,0 +1,38 @@ +import "restriction_account/restriction_account_types.cats" +import "transaction.cats" + +# binary layout for an account operation restriction transaction +struct AccountOperationRestrictionTransactionBody + # account restriction flags + restrictionFlags = AccountRestrictionFlags + + # number of account restriction additions + restrictionAdditionsCount = uint8 + + # number of account restriction deletions + restrictionDeletionsCount = uint8 + + # reserved padding to align restrictionAdditions on 8-byte boundary + accountRestrictionTransactionBody_Reserved1 = uint32 + + # account restriction additions + restrictionAdditions = array(EntityType, restrictionAdditionsCount) + + # account restriction deletions + restrictionDeletions = array(EntityType, restrictionDeletionsCount) + +# binary layout for a non-embedded account operation restriction transaction +struct AccountOperationRestrictionTransaction + const uint8 version = 1 + const EntityType entityType = 0x4350 + + inline Transaction + inline AccountOperationRestrictionTransactionBody + +# binary layout for an embedded account operation restriction transaction +struct EmbeddedAccountOperationRestrictionTransaction + const uint8 version = 1 + const EntityType entityType = 0x4350 + + inline EmbeddedTransaction + inline AccountOperationRestrictionTransactionBody diff --git a/catbuffer-schemas/schemas/restriction_account/restriction_account_types.cats b/catbuffer-schemas/schemas/restriction_account/restriction_account_types.cats new file mode 100644 index 00000000..b2a7cea4 --- /dev/null +++ b/catbuffer-schemas/schemas/restriction_account/restriction_account_types.cats @@ -0,0 +1,18 @@ +import "entity.cats" + +# enumeration of account restriction flags +enum AccountRestrictionFlags : uint16 + # restriction type is an address + address = 0x0001 + + # restriction type is a mosaic identifier + mosaicId = 0x0002 + + # restriction type is a transaction type + transactionType = 0x0004 + + # restriction is interpreted as outgoing + outgoing = 0x4000 + + # restriction is interpreted as blocking (instead of allowing) operation + block = 0x8000 diff --git a/catbuffer-schemas/schemas/restriction_mosaic/mosaic_address_restriction.cats b/catbuffer-schemas/schemas/restriction_mosaic/mosaic_address_restriction.cats new file mode 100644 index 00000000..feeb1b7f --- /dev/null +++ b/catbuffer-schemas/schemas/restriction_mosaic/mosaic_address_restriction.cats @@ -0,0 +1,34 @@ +import "transaction.cats" + +# binary layout for a mosaic address restriction transaction +struct MosaicAddressRestrictionTransactionBody + # identifier of the mosaic to which the restriction applies + mosaicId = UnresolvedMosaicId + + # restriction key + restrictionKey = uint64 + + # previous restriction value + previousRestrictionValue = uint64 + + # new restriction value + newRestrictionValue = uint64 + + # address being restricted + targetAddress = UnresolvedAddress + +# binary layout for a non-embedded mosaic address restriction transaction +struct MosaicAddressRestrictionTransaction + const uint8 version = 1 + const EntityType entityType = 0x4251 + + inline Transaction + inline MosaicAddressRestrictionTransactionBody + +# binary layout for an embedded mosaic address restriction transaction +struct EmbeddedMosaicAddressRestrictionTransaction + const uint8 version = 1 + const EntityType entityType = 0x4251 + + inline EmbeddedTransaction + inline MosaicAddressRestrictionTransactionBody diff --git a/catbuffer-schemas/schemas/restriction_mosaic/mosaic_global_restriction.cats b/catbuffer-schemas/schemas/restriction_mosaic/mosaic_global_restriction.cats new file mode 100644 index 00000000..8f788850 --- /dev/null +++ b/catbuffer-schemas/schemas/restriction_mosaic/mosaic_global_restriction.cats @@ -0,0 +1,41 @@ +import "restriction_mosaic/restriction_mosaic_types.cats" +import "transaction.cats" + +# binary layout for a mosaic global restriction transaction +struct MosaicGlobalRestrictionTransactionBody + # identifier of the mosaic being restricted + mosaicId = UnresolvedMosaicId + + # identifier of the mosaic providing the restriction key + referenceMosaicId = UnresolvedMosaicId + + # restriction key relative to the reference mosaic identifier + restrictionKey = uint64 + + # previous restriction value + previousRestrictionValue = uint64 + + # new restriction value + newRestrictionValue = uint64 + + # previous restriction type + previousRestrictionType = MosaicRestrictionType + + # new restriction type + newRestrictionType = MosaicRestrictionType + +# binary layout for a non-embedded mosaic global restriction transaction +struct MosaicGlobalRestrictionTransaction + const uint8 version = 1 + const EntityType entityType = 0x4151 + + inline Transaction + inline MosaicGlobalRestrictionTransactionBody + +# binary layout for an embedded mosaic global restriction transaction +struct EmbeddedMosaicGlobalRestrictionTransaction + const uint8 version = 1 + const EntityType entityType = 0x4151 + + inline EmbeddedTransaction + inline MosaicGlobalRestrictionTransactionBody diff --git a/catbuffer-schemas/schemas/restriction_mosaic/restriction_mosaic_types.cats b/catbuffer-schemas/schemas/restriction_mosaic/restriction_mosaic_types.cats new file mode 100644 index 00000000..92dbd20b --- /dev/null +++ b/catbuffer-schemas/schemas/restriction_mosaic/restriction_mosaic_types.cats @@ -0,0 +1,24 @@ +using MosaicRestrictionKey = uint64 + +# enumeration of mosaic restriction types +enum MosaicRestrictionType : uint8 + # uninitialized value indicating no restriction + none = 0x00 + + # allow if equal + eq = 0x01 + + # allow if not equal + ne = 0x02 + + # allow if less than + lt = 0x03 + + # allow if less than or equal + le = 0x04 + + # allow if greater than + gt = 0x05 + + # allow if greater than or equal + ge = 0x06 diff --git a/catbuffer-schemas/schemas/state/account_state.cats b/catbuffer-schemas/schemas/state/account_state.cats new file mode 100644 index 00000000..58e47db3 --- /dev/null +++ b/catbuffer-schemas/schemas/state/account_state.cats @@ -0,0 +1,59 @@ +import "state/account_state_types.cats" +import "state/state_header.cats" + +# account activity buckets +struct HeightActivityBuckets + # account activity buckets + buckets = array(HeightActivityBucket, 5) + +# binary layout for non-historical account state +struct AccountState + inline StateHeader + + # address of account + address = Address + + # height at which address has been obtained + addressHeight = Height + + # public key of account + publicKey = Key + + # height at which public key has been obtained + publicKeyHeight = Height + + # type of account + accountType = AccountType + + # account format + format = AccountStateFormat + + # mask of supplemental public key flags + supplementalPublicKeysMask = AccountKeyTypeFlags + + # number of voting public keys + votingPublicKeysCount = uint8 + + # linked account public key + linkedPublicKey = Key if supplementalPublicKeysMask has linked + + # node public key + nodePublicKey = Key if supplementalPublicKeysMask has node + + # vrf public key + vrfPublicKey = Key if supplementalPublicKeysMask has vrf + + # voting public keys + votingPublicKeys = array(PinnedVotingKey, votingPublicKeysCount) + + # current importance snapshot of the account + importanceSnapshots = ImportanceSnapshot if format equals highValue + + # activity buckets of the account + activityBuckets = HeightActivityBuckets if format equals highValue + + # number of total balances (mosaics) + balancesCount = uint16 + + # balances of account + balances = array(Mosaic, balancesCount) diff --git a/catbuffer-schemas/schemas/state/account_state_types.cats b/catbuffer-schemas/schemas/state/account_state_types.cats new file mode 100644 index 00000000..36936465 --- /dev/null +++ b/catbuffer-schemas/schemas/state/account_state_types.cats @@ -0,0 +1,72 @@ +import "types.cats" + +# enumeration of account types +enum AccountType : uint8 + # account is not linked to another account + unlinked = 0 + + # account is a balance-holding account that is linked to a remote harvester account + main = 1 + + # account is a remote harvester account that is linked to a balance-holding account + remote = 2 + + # account is a remote harvester eligible account that is unlinked + # \note this allows an account that has previously been used as remote to be reused as a remote + remoteUnlinked = 3 + +# enumeration of account key type flags +enum AccountKeyTypeFlags : uint8 + # unset key + unset = 0x00 + + # linked account public key + # \note this can be either a remote or main account public key depending on context + linked = 0x01 + + # node public key on which remote is allowed to harvest + node = 0x02 + + # VRF public key + vrf = 0x04 + +# enumeration of account state formats +enum AccountStateFormat : uint8 + # regular account + regular = 0 + + # high value account eligible to harvest + highValue = 1 + +# pinned voting key +struct PinnedVotingKey + # voting key + votingKey = VotingKey + + # start finalization epoch + startEpoch = FinalizationEpoch + + # end finalization epoch + endEpoch = FinalizationEpoch + +# temporal importance information +struct ImportanceSnapshot + # account importance + importance = Importance + + # importance height + height = ImportanceHeight + +# account activity bucket +struct HeightActivityBucket + # activity start height + startHeight = ImportanceHeight + + # total fees paid by account + totalFeesPaid = Amount + + # number of times account has been used as a beneficiary + beneficiaryCount = uint32 + + # raw importance score + rawScore = uint64 diff --git a/catbuffer-schemas/schemas/state/hash_lock.cats b/catbuffer-schemas/schemas/state/hash_lock.cats new file mode 100644 index 00000000..cebe45c7 --- /dev/null +++ b/catbuffer-schemas/schemas/state/hash_lock.cats @@ -0,0 +1,21 @@ +import "state/lock_info.cats" +import "state/state_header.cats" + +# binary layout for hash lock transaction info +struct HashLockInfo + inline StateHeader + + # owner address + ownerAddress = Address + + # mosaic associated with lock + mosaic = Mosaic + + # height at which the lock expires + endHeight = Height + + # flag indicating whether or not the lock was already used + status = LockStatus + + # hash + hash = Hash256 diff --git a/catbuffer-schemas/schemas/state/lock_info.cats b/catbuffer-schemas/schemas/state/lock_info.cats new file mode 100644 index 00000000..851ee7d4 --- /dev/null +++ b/catbuffer-schemas/schemas/state/lock_info.cats @@ -0,0 +1,9 @@ +import "types.cats" + +# lock status for lock transaction +enum LockStatus : uint8 + # lock is unused + unused = 0 + + # lock was already used + used = 1 diff --git a/catbuffer-schemas/schemas/state/metadata_entry.cats b/catbuffer-schemas/schemas/state/metadata_entry.cats new file mode 100644 index 00000000..fffe39d9 --- /dev/null +++ b/catbuffer-schemas/schemas/state/metadata_entry.cats @@ -0,0 +1,25 @@ +import "state/metadata_entry_types.cats" +import "state/state_header.cats" +import "namespace/namespace_types.cats" + +# binary layout of a metadata entry +struct MetadataEntry + inline StateHeader + + # metadata source address (provider) + sourceAddress = Address + + # metadata target address + targetAddress = Address + + # metadata key scoped to source, target and type + scopedMetadataKey = ScopedMetadataKey + + # target id + targetId = uint64 + + # metadata type + metadataType = MetadataType + + # value + value = MetadataValue diff --git a/catbuffer-schemas/schemas/state/metadata_entry_types.cats b/catbuffer-schemas/schemas/state/metadata_entry_types.cats new file mode 100644 index 00000000..10c4e310 --- /dev/null +++ b/catbuffer-schemas/schemas/state/metadata_entry_types.cats @@ -0,0 +1,22 @@ +import "types.cats" + +using ScopedMetadataKey = uint64 + +# enum for the different types of metadata +enum MetadataType : uint8 + # account metadata + account = 0 + + # mosaic metadata + mosaic = 1 + + # namespace metadata + namespace = 2 + +# binary layout of a metadata entry value +struct MetadataValue + # size of the value + size = uint16 + + # data of the value + data = array(byte, size) diff --git a/catbuffer-schemas/schemas/state/mosaic_entry.cats b/catbuffer-schemas/schemas/state/mosaic_entry.cats new file mode 100644 index 00000000..ad5d7999 --- /dev/null +++ b/catbuffer-schemas/schemas/state/mosaic_entry.cats @@ -0,0 +1,15 @@ +import "state/mosaic_entry_types.cats" +import "state/state_header.cats" + +# binary layout for mosaic entry +struct MosaicEntry + inline StateHeader + + # entry id + mosaicId = MosaicId + + # total supply amount + supply = Amount + + # definition comprised of entry properties + definition = MosaicDefinition diff --git a/catbuffer-schemas/schemas/state/mosaic_entry_types.cats b/catbuffer-schemas/schemas/state/mosaic_entry_types.cats new file mode 100644 index 00000000..c7143423 --- /dev/null +++ b/catbuffer-schemas/schemas/state/mosaic_entry_types.cats @@ -0,0 +1,27 @@ +import "types.cats" +import "mosaic/mosaic_types.cats" + +# binary layout for mosaic properties +struct MosaicProperties + # mosaic flags + flags = MosaicFlags + + # mosaic divisibility + divisibility = uint8 + + # mosaic duration + duration = BlockDuration + +# binary layout for mosaic definition +struct MosaicDefinition + # block height + startHeight = Height + + # mosaic owner + ownerAddress = Address + + # revision + revision = uint32 + + # properties + properties = MosaicProperties diff --git a/catbuffer-schemas/schemas/state/multisig_entry.cats b/catbuffer-schemas/schemas/state/multisig_entry.cats new file mode 100644 index 00000000..5ae1dc61 --- /dev/null +++ b/catbuffer-schemas/schemas/state/multisig_entry.cats @@ -0,0 +1,27 @@ +import "types.cats" +import "state/state_header.cats" + +# binary layout for a multisig entry +struct MultisigEntry + inline StateHeader + + # minimum approval for modifications + minApproval = uint32 + + # minimum approval for removal + minRemoval = uint32 + + # account address + accountAddress = Address + + # number of cosignatories + cosignatoryAddressesCount = uint64 + + # cosignatories for account + cosignatoryAddresses = array(Address, cosignatoryAddressesCount) + + # number of other accounts for which the entry is cosignatory + multisigAddressesCount = uint64 + + # accounts for which the entry is cosignatory + multisigAddresses = array(Address, multisigAddressesCount) diff --git a/catbuffer-schemas/schemas/state/namespace_history.cats b/catbuffer-schemas/schemas/state/namespace_history.cats new file mode 100644 index 00000000..da7f2eed --- /dev/null +++ b/catbuffer-schemas/schemas/state/namespace_history.cats @@ -0,0 +1,24 @@ +import "state/namespace_history_types.cats" +import "state/state_header.cats" + +# binary layout for non-historical root namespace history +struct RootNamespaceHistory + inline StateHeader + + # id of the root namespace history + id = NamespaceId + + # namespace owner address + ownerAddress = Address + + # lifetime in blocks + lifetime = NamespaceLifetime + + # root namespace alias + rootAlias = NamespaceAlias + + # number of children + childrenCount = uint64 + + # save child sub-namespace paths + paths = array(NamespacePath, childrenCount, sort_key=path) diff --git a/catbuffer-schemas/schemas/state/namespace_history_types.cats b/catbuffer-schemas/schemas/state/namespace_history_types.cats new file mode 100644 index 00000000..f166a729 --- /dev/null +++ b/catbuffer-schemas/schemas/state/namespace_history_types.cats @@ -0,0 +1,43 @@ +import "namespace/namespace_types.cats" +import "types.cats" + +# binary layout for namespace lifetime +struct NamespaceLifetime + # start height + lifetimeStart = Height + + # end height + lifetimeEnd = Height + +# namespace alias type +enum NamespaceAliasType : uint8 + # no alias + none = 0 + + # if alias is mosaicId + mosaicId = 1 + + # if alias is address + address = 2 + +# binary layout for alias +struct NamespaceAlias + # namespace alias type + namespaceAliasType = NamespaceAliasType + + # mosaic alias + mosaicAlias = MosaicId if namespaceAliasType equals mosaicId + + # address alias + addressAlias = Address if namespaceAliasType equals address + +# binary layout for a namespace path +struct NamespacePath + # number of paths (excluding root id) + pathSize = uint8 + + # namespace path (excluding root id) + path = array(NamespaceId, pathSize) + + # namespace alias + alias = NamespaceAlias diff --git a/catbuffer-schemas/schemas/state/restriction_account.cats b/catbuffer-schemas/schemas/state/restriction_account.cats new file mode 100644 index 00000000..b8b65043 --- /dev/null +++ b/catbuffer-schemas/schemas/state/restriction_account.cats @@ -0,0 +1,16 @@ +import "restriction_account/restriction_account_types.cats" +import "state/restriction_account_types.cats" +import "state/state_header.cats" + +# binary layout for account restrictions +struct AccountRestrictions + inline StateHeader + + # address on which restrictions are placed + address = Address + + # number of restrictions + restrictionsCount = uint64 + + # account restrictions + restrictions = array(AccountRestrictionsInfo, restrictionsCount) diff --git a/catbuffer-schemas/schemas/state/restriction_account_types.cats b/catbuffer-schemas/schemas/state/restriction_account_types.cats new file mode 100644 index 00000000..931a4325 --- /dev/null +++ b/catbuffer-schemas/schemas/state/restriction_account_types.cats @@ -0,0 +1,39 @@ +import "entity.cats" + +# binary layout for address based account restriction +struct AccountRestrictionAddressValue + # number of restrictions for a particular account + restrictionValuesCount = uint64 + + # restriction values + restrictionValues = array(Address, restrictionValuesCount) + +# binary layout for mosaic id based account restriction +struct AccountRestrictionMosaicValue + # number of restrictions for a particular account + restrictionValuesCount = uint64 + + # restriction values + restrictionValues = array(MosaicId, restrictionValuesCount) + +# binary layout for transaction type based account restriction +struct AccountRestrictionTransactionTypeValue + # number of restrictions for a particular account + restrictionValuesCount = uint64 + + # restriction values + restrictionValues = array(EntityType, restrictionValuesCount) + +# binary layout for account restrictions +struct AccountRestrictionsInfo + # raw restriction flags + restrictionFlags = AccountRestrictionFlags + + # address restrictions + addressRestrictions = AccountRestrictionAddressValue if restrictionFlags has address + + # mosaic identifier restrictions + mosaicIdRestrictions = AccountRestrictionMosaicValue if restrictionFlags has mosaicId + + # transaction type restrictions + transactionTypeRestrictions = AccountRestrictionTransactionTypeValue if restrictionFlags has transactionType diff --git a/catbuffer-schemas/schemas/state/restriction_mosaic_entry.cats b/catbuffer-schemas/schemas/state/restriction_mosaic_entry.cats new file mode 100644 index 00000000..fba671c2 --- /dev/null +++ b/catbuffer-schemas/schemas/state/restriction_mosaic_entry.cats @@ -0,0 +1,34 @@ +import "state/restriction_mosaic_types.cats" +import "state/state_header.cats" + +# binary layout for a mosaic restriction +struct MosaicAddressRestrictionEntry + # identifier of the mosaic to which the restriction applies + mosaicId = MosaicId + + # address being restricted + address = Address + + # address key value restriction set + keyPairs = AddressKeyValueSet + +# binary layout for a mosaic restriction +struct MosaicGlobalRestrictionEntry + # identifier of the mosaic to which the restriction applies + mosaicId = MosaicId + + # global key value restriction set + keyPairs = GlobalKeyValueSet + +# binary layout for a mosaic restriction +struct MosaicRestrictionEntry + inline StateHeader + + # type of restriction being placed upon the entity + entryType = MosaicRestrictionEntryType + + # address restriction rule + addressEntry = MosaicAddressRestrictionEntry if entryType equals address + + # global mosaic rule + globalEntry = MosaicGlobalRestrictionEntry if entryType equals global diff --git a/catbuffer-schemas/schemas/state/restriction_mosaic_types.cats b/catbuffer-schemas/schemas/state/restriction_mosaic_types.cats new file mode 100644 index 00000000..a22a1afb --- /dev/null +++ b/catbuffer-schemas/schemas/state/restriction_mosaic_types.cats @@ -0,0 +1,53 @@ +import "types.cats" +import "restriction_mosaic/restriction_mosaic_types.cats" + +# type of mosaic restriction entry +enum MosaicRestrictionEntryType : uint8 + # address restriction + address = 0 + + # global (mosaic) restriction + global = 1 + +# layout for mosaic address restriction key-value pair +struct AddressKeyValue + # key for value + key = MosaicRestrictionKey + + # value associated by key + value = uint64 + +# binary layout for mosaic address restriction key-value set +struct AddressKeyValueSet + # number of key value pairs + keyValueCount = uint8 + + # key value array + keys = array(AddressKeyValue, keyValueCount, sort_key=key) + +# binary layout of restriction rule being applied +struct RestrictionRule + # identifier of the mosaic providing the restriction key + referenceMosaicId = MosaicId + + # restriction value + restrictionValue = uint64 + + # restriction type + restrictionType = MosaicRestrictionType + +# binary layout for a global key-value +struct GlobalKeyValue + # key associated with a restriction rule + key = MosaicRestrictionKey + + # restriction rule (the value) associated with a key + restrictionRule = RestrictionRule + +# binary layout for a global restriction key-value set +struct GlobalKeyValueSet + # number of key value pairs + keyValueCount = uint8 + + # key value array + keys = array(GlobalKeyValue, keyValueCount, sort_key=key) diff --git a/catbuffer-schemas/schemas/state/secret_lock.cats b/catbuffer-schemas/schemas/state/secret_lock.cats new file mode 100644 index 00000000..faa9b47c --- /dev/null +++ b/catbuffer-schemas/schemas/state/secret_lock.cats @@ -0,0 +1,28 @@ +import "lock_secret/lock_secret_types.cats" +import "state/lock_info.cats" +import "state/state_header.cats" + +# binary layout for serialized lock transaction +struct SecretLockInfo + inline StateHeader + + # owner address + ownerAddress = Address + + # mosaic associated with lock + mosaic = Mosaic + + # height at which the lock expires + endHeight = Height + + # flag indicating whether or not the lock was already used + status = LockStatus + + # hash algorithm + hashAlgorithm = LockHashAlgorithm + + # transaction secret + secret = Hash256 + + # transaction recipient + recipient = Address diff --git a/catbuffer-schemas/schemas/state/state_header.cats b/catbuffer-schemas/schemas/state/state_header.cats new file mode 100644 index 00000000..4e0a7078 --- /dev/null +++ b/catbuffer-schemas/schemas/state/state_header.cats @@ -0,0 +1,4 @@ +# header common to all serialized states +struct StateHeader + # serialization version + version = uint16 diff --git a/catbuffer-schemas/schemas/transaction.cats b/catbuffer-schemas/schemas/transaction.cats new file mode 100644 index 00000000..2018cc5a --- /dev/null +++ b/catbuffer-schemas/schemas/transaction.cats @@ -0,0 +1,25 @@ +import "entity.cats" + +# binary layout for a transaction +struct Transaction + inline SizePrefixedEntity + inline VerifiableEntity + inline EntityBody + + # transaction fee + fee = Amount + + # transaction deadline + deadline = Timestamp + +# binary layout for an embedded transaction header +struct EmbeddedTransactionHeader + inline SizePrefixedEntity + + # reserved padding to align end of EmbeddedTransactionHeader on 8-byte boundary + embeddedTransactionHeader_Reserved1 = uint32 + +# binary layout for an embedded transaction +struct EmbeddedTransaction + inline EmbeddedTransactionHeader + inline EntityBody diff --git a/catbuffer-schemas/schemas/transfer/transfer.cats b/catbuffer-schemas/schemas/transfer/transfer.cats new file mode 100644 index 00000000..defb0180 --- /dev/null +++ b/catbuffer-schemas/schemas/transfer/transfer.cats @@ -0,0 +1,40 @@ +import "transaction.cats" + +# binary layout for a transfer transaction +struct TransferTransactionBody + # recipient address + recipientAddress = UnresolvedAddress + + # size of attached message + messageSize = uint16 + + # number of attached mosaics + mosaicsCount = uint8 + + # reserved padding to align mosaics on 8-byte boundary + transferTransactionBody_Reserved1 = uint32 + + # reserved padding to align mosaics on 8-byte boundary + transferTransactionBody_Reserved2 = uint8 + + # attached mosaics + mosaics = array(UnresolvedMosaic, mosaicsCount, sort_key=mosaicId) + + # attached message + message = array(byte, messageSize) + +# binary layout for a non-embedded transfer transaction +struct TransferTransaction + const uint8 version = 1 + const EntityType entityType = 0x4154 + + inline Transaction + inline TransferTransactionBody + +# binary layout for an embedded transfer transaction +struct EmbeddedTransferTransaction + const uint8 version = 1 + const EntityType entityType = 0x4154 + + inline EmbeddedTransaction + inline TransferTransactionBody diff --git a/catbuffer-schemas/schemas/types.cats b/catbuffer-schemas/schemas/types.cats new file mode 100644 index 00000000..78fc8414 --- /dev/null +++ b/catbuffer-schemas/schemas/types.cats @@ -0,0 +1,44 @@ +using Amount = uint64 +using BlockDuration = uint64 +using BlockFeeMultiplier = uint32 +using Difficulty = uint64 +using FinalizationEpoch = uint32 +using FinalizationPoint = uint32 +using Height = uint64 +using Importance = uint64 +using ImportanceHeight = uint64 +using UnresolvedMosaicId = uint64 +using MosaicId = uint64 +using Timestamp = uint64 + +using UnresolvedAddress = binary_fixed(24) +using Address = binary_fixed(24) +using Hash256 = binary_fixed(32) +using Hash512 = binary_fixed(64) +using Key = binary_fixed(32) +using VotingKey = binary_fixed(32) +using Signature = binary_fixed(64) + +# binary layout for a mosaic +struct Mosaic + # mosaic identifier + mosaicId = MosaicId + + # mosaic amount + amount = Amount + +# binary layout for an unresolved mosaic +struct UnresolvedMosaic + # mosaic identifier + mosaicId = UnresolvedMosaicId + + # mosaic amount + amount = Amount + +# enumeration of link actions +enum LinkAction : uint8 + # unlink account + unlink = 0x00 + + # link account + link = 0x01 diff --git a/sdk-core/src/main/java/io/nem/symbol/core/crypto/MerkleHashBuilder.java b/sdk-core/src/main/java/io/nem/symbol/core/crypto/MerkleHashBuilder.java index a8e8bf5c..070c227b 100644 --- a/sdk-core/src/main/java/io/nem/symbol/core/crypto/MerkleHashBuilder.java +++ b/sdk-core/src/main/java/io/nem/symbol/core/crypto/MerkleHashBuilder.java @@ -16,19 +16,18 @@ package io.nem.symbol.core.crypto; import java.util.ArrayList; -import java.util.List; /** Merkle hash builder. */ public class MerkleHashBuilder { - private final List hashes; + private final ArrayList hashes; /** Constructor. */ public MerkleHashBuilder() { this.hashes = new ArrayList<>(); } - private byte[] getRootHash(List hashes) { + private byte[] getRootHash(ArrayList hashes) { if (hashes.isEmpty()) { return new byte[32]; } @@ -38,15 +37,14 @@ private byte[] getRootHash(List hashes) { Hasher hasher = Hashes::sha3_256; while (numRemainingHashes > 1) { - for (int i = 0; i < numRemainingHashes; i += 2) { if (i + 1 < numRemainingHashes) { - hashes.add(i / 2, hasher.hash(hashes.get(i), hashes.get(i + 1))); + hashes.set(i / 2, hasher.hash(hashes.get(i), hashes.get(i + 1))); continue; } // if there is an odd number of hashes, duplicate the last one - hashes.add(i / 2, hasher.hash(hashes.get(i), hashes.get(i))); + hashes.set(i / 2, hasher.hash(hashes.get(i), hashes.get(i))); ++numRemainingHashes; } diff --git a/sdk-core/src/main/java/io/nem/symbol/sdk/infrastructure/BinarySerializationImpl.java b/sdk-core/src/main/java/io/nem/symbol/sdk/infrastructure/BinarySerializationImpl.java index e3f52f2b..a7b917a1 100644 --- a/sdk-core/src/main/java/io/nem/symbol/sdk/infrastructure/BinarySerializationImpl.java +++ b/sdk-core/src/main/java/io/nem/symbol/sdk/infrastructure/BinarySerializationImpl.java @@ -175,43 +175,126 @@ public class BinarySerializationImpl implements BinarySerialization { /** Constructor */ public BinarySerializationImpl() { - register(new TransferTransactionSerializer()); - register(new MosaicSupplyChangeTransactionSerializer()); - register(new MosaicDefinitionTransactionSerializer()); - register(new AccountKeyLinkTransactionSerializer()); - register(new AccountMetadataTransactionSerializer()); - register(new MosaicMetadataTransactionSerializer()); - register(new NamespaceMetadataTransactionSerializer()); - register(new NamespaceRegistrationTransactionSerializer()); - register(new SecretLockTransactionSerializer()); - register(new SecretProofTransactionSerializer()); - register(new AddressAliasTransactionSerializer()); - register(new MosaicAliasTransactionSerializer()); - register(new HashLockTransactionSerializer()); - register(new MultisigAccountModificationTransactionSerializer()); - register(new MosaicAddressRestrictionTransactionSerializer()); - register(new MosaicGlobalRestrictionTransactionSerializer()); - register(new AccountMosaicRestrictionTransactionSerializer()); - register(new AccountOperationRestrictionTransactionSerializer()); - register(new AccountAddressRestrictionTransactionSerializer()); - register(new NodeKeyLinkTransactionBuilderSerializer()); - register(new VotingKeyLinkTransactionBuilderSerializer()); - register(new VrfKeyLinkTransactionBuilderSerializer()); - register(new AggregateTransactionSerializer(TransactionType.AGGREGATE_COMPLETE, this)); - register(new AggregateTransactionSerializer(TransactionType.AGGREGATE_BONDED, this)); + { + TransactionSerializer serializer = new TransferTransactionSerializer(); + register(serializer, serializer.getVersion()); + } + { + TransactionSerializer serializer = new MosaicSupplyChangeTransactionSerializer(); + register(serializer, serializer.getVersion()); + } + { + TransactionSerializer serializer = new MosaicDefinitionTransactionSerializer(); + register(serializer, serializer.getVersion()); + } + { + TransactionSerializer serializer = new AccountKeyLinkTransactionSerializer(); + register(serializer, serializer.getVersion()); + } + { + TransactionSerializer serializer = new AccountMetadataTransactionSerializer(); + register(serializer, serializer.getVersion()); + } + { + TransactionSerializer serializer = new MosaicMetadataTransactionSerializer(); + register(serializer, serializer.getVersion()); + } + { + TransactionSerializer serializer = new NamespaceMetadataTransactionSerializer(); + register(serializer, serializer.getVersion()); + } + { + TransactionSerializer serializer = new NamespaceRegistrationTransactionSerializer(); + register(serializer, serializer.getVersion()); + } + { + TransactionSerializer serializer = new SecretLockTransactionSerializer(); + register(serializer, serializer.getVersion()); + } + { + TransactionSerializer serializer = new SecretProofTransactionSerializer(); + register(serializer, serializer.getVersion()); + } + { + TransactionSerializer serializer = new AddressAliasTransactionSerializer(); + register(serializer, serializer.getVersion()); + } + { + TransactionSerializer serializer = new MosaicAliasTransactionSerializer(); + register(serializer, serializer.getVersion()); + } + { + TransactionSerializer serializer = new HashLockTransactionSerializer(); + register(serializer, serializer.getVersion()); + } + { + TransactionSerializer serializer = new MultisigAccountModificationTransactionSerializer(); + register(serializer, serializer.getVersion()); + } + { + TransactionSerializer serializer = new MosaicAddressRestrictionTransactionSerializer(); + register(serializer, serializer.getVersion()); + } + { + TransactionSerializer serializer = new MosaicGlobalRestrictionTransactionSerializer(); + register(serializer, serializer.getVersion()); + } + { + TransactionSerializer serializer = new AccountMosaicRestrictionTransactionSerializer(); + register(serializer, serializer.getVersion()); + } + { + TransactionSerializer serializer = new AccountOperationRestrictionTransactionSerializer(); + register(serializer, serializer.getVersion()); + } + { + TransactionSerializer serializer = new AccountAddressRestrictionTransactionSerializer(); + register(serializer, serializer.getVersion()); + } + { + TransactionSerializer serializer = new NodeKeyLinkTransactionBuilderSerializer(); + register(serializer, serializer.getVersion()); + } + { + TransactionSerializer serializer = new VotingKeyLinkTransactionBuilderSerializer(); + register(serializer, serializer.getVersion()); + } + { + TransactionSerializer serializer = new VrfKeyLinkTransactionBuilderSerializer(); + register(serializer, serializer.getVersion()); + } + + // beginregion use same objects for OLD version (format has not changed) + { + TransactionSerializer serializer = new AggregateTransactionSerializer(TransactionType.AGGREGATE_COMPLETE, this); + register(serializer, 1); + } + { + TransactionSerializer serializer = new AggregateTransactionSerializer(TransactionType.AGGREGATE_BONDED, this); + register(serializer, 1); + } + // endregion + + { + TransactionSerializer serializer = new AggregateTransactionSerializer(TransactionType.AGGREGATE_COMPLETE, this); + register(serializer, serializer.getVersion()); + } + { + TransactionSerializer serializer = new AggregateTransactionSerializer(TransactionType.AGGREGATE_BONDED, this); + register(serializer, serializer.getVersion()); + } } /** @param serializer the serializer to be registered. */ - private void register(TransactionSerializer serializer) { + private void register(TransactionSerializer serializer, int version) { - Pair pair = - Pair.of(serializer.getTransactionType(), serializer.getVersion()); + Pair pair = Pair.of(serializer.getTransactionType(), version); if (serializers.put(pair, serializer) != null) { throw new IllegalArgumentException( "TransactionSerializer for type " + serializer.getTransactionType() + " and version " - + serializer.getVersion() + + version + " was already registered!"); } } diff --git a/sdk-core/src/main/java/io/nem/symbol/sdk/model/transaction/TransactionType.java b/sdk-core/src/main/java/io/nem/symbol/sdk/model/transaction/TransactionType.java index b16b817f..d6bf64b2 100644 --- a/sdk-core/src/main/java/io/nem/symbol/sdk/model/transaction/TransactionType.java +++ b/sdk-core/src/main/java/io/nem/symbol/sdk/model/transaction/TransactionType.java @@ -45,10 +45,10 @@ public enum TransactionType { MULTISIG_ACCOUNT_MODIFICATION(16725, 1), /** Aggregate complete transaction type. */ - AGGREGATE_COMPLETE(16705, 1), + AGGREGATE_COMPLETE(16705, 2), /** Aggregate bonded transaction type */ - AGGREGATE_BONDED(16961, 1), + AGGREGATE_BONDED(16961, 2), /** Voting key link transaction builder. */ VOTING_KEY_LINK(16707, 1), diff --git a/sdk-core/src/test/java/io/nem/symbol/core/crypto/MerkleHashBuilderTest.java b/sdk-core/src/test/java/io/nem/symbol/core/crypto/MerkleHashBuilderTest.java index 9dcaf096..e3186cae 100644 --- a/sdk-core/src/test/java/io/nem/symbol/core/crypto/MerkleHashBuilderTest.java +++ b/sdk-core/src/test/java/io/nem/symbol/core/crypto/MerkleHashBuilderTest.java @@ -16,64 +16,108 @@ package io.nem.symbol.core.crypto; import io.nem.symbol.core.utils.ConvertUtils; +import io.nem.symbol.sdk.infrastructure.RandomUtils; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.stream.IntStream; +import java.util.stream.Stream; + import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; /** Testing of {@link MerkleHashBuilder} */ public class MerkleHashBuilderTest { + byte[] calculateMerkleHash(Stream hashes) { + MerkleHashBuilder builder = new MerkleHashBuilder(); + hashes.forEach(embeddedHash -> builder.update(embeddedHash)); + return builder.getRootHash(); + } + + void assertMerkleHash(String expectedHash, String[] hashes) { + // Act: + byte[] calculatedHash = calculateMerkleHash(Stream.of(hashes).map(ConvertUtils::fromHexToBytes)); + + // Assert: + Assertions.assertEquals(expectedHash, ConvertUtils.toHex(calculatedHash)); + } + + @Test + public void testZero() { + this.assertMerkleHash("0000000000000000000000000000000000000000000000000000000000000000", new String[] {}); + } + + @Test + public void testOne() { + String randomHash = ConvertUtils.toHex(RandomUtils.generateRandomBytes(32)); + this.assertMerkleHash(randomHash, new String[] { randomHash }); + } + + @Test + public void testCanBuildBalancedTree() { + this.assertMerkleHash( + "7D853079F5F9EE30BDAE49C4956AF20CDF989647AFE971C069AC263DA1FFDF7E", + new String[] { + "36C8213162CDBC78767CF43D4E06DDBE0D3367B6CEAEAEB577A50E2052441BC8", + "8A316E48F35CDADD3F827663F7535E840289A16A43E7134B053A86773E474C28", + "6D80E71F00DFB73B358B772AD453AEB652AE347D3E098AE269005A88DA0B84A7", + "2AE2CA59B5BB29721BFB79FE113929B6E52891CAA29CBF562EBEDC46903FF681", + "421D6B68A6DF8BB1D5C9ACF7ED44515E77945D42A491BECE68DA009B551EE6CE", + "7A1711AF5C402CFEFF87F6DA4B9C738100A7AC3EDAD38D698DF36CA3FE883480", + "1E6516B2CC617E919FAE0CF8472BEB2BFF598F19C7A7A7DC260BC6715382822C", + "410330530D04A277A7C96C1E4F34184FDEB0FFDA63563EFD796C404D7A6E5A20" }); + } + + @Test + public void testCanBuildFromUnbalancedTree() { + this.assertMerkleHash( + "DEFB4BF7ACF2145500087A02C88F8D1FCF27B8DEF4E0FDABE09413D87A3F0D09", + new String[] { + "36C8213162CDBC78767CF43D4E06DDBE0D3367B6CEAEAEB577A50E2052441BC8", + "8A316E48F35CDADD3F827663F7535E840289A16A43E7134B053A86773E474C28", + "6D80E71F00DFB73B358B772AD453AEB652AE347D3E098AE269005A88DA0B84A7", + "2AE2CA59B5BB29721BFB79FE113929B6E52891CAA29CBF562EBEDC46903FF681", + "421D6B68A6DF8BB1D5C9ACF7ED44515E77945D42A491BECE68DA009B551EE6CE" }); + } + + @Test + public void testChangingSubHashOrderChangesMerkleHash() { + // Arrange: + ArrayList seed1 = new ArrayList(); + for (int i = 0; i < 8; ++i) + seed1.add(RandomUtils.generateRandomBytes(32)); + + List seed2 = Arrays.asList( + seed1.get(0), seed1.get(1), seed1.get(2), seed1.get(5), + seed1.get(4), seed1.get(3), seed1.get(6), seed1.get(7) + ); + + // Act: + byte[] rootHash1 = this.calculateMerkleHash(seed1.stream()); + byte[] rootHash2 = this.calculateMerkleHash(seed2.stream()); + + // Assert: + Assertions.assertNotEquals(ConvertUtils.toHex(rootHash1), ConvertUtils.toHex(rootHash2)); + } + + @Test + public void testChangingSubHashChangesMerkleHash() { + // Arrange: + ArrayList seed1 = new ArrayList(); + for (int i = 0; i < 8; ++i) + seed1.add(RandomUtils.generateRandomBytes(32)); + + List seed2 = Arrays.asList( + seed1.get(0), seed1.get(1), seed1.get(2), seed1.get(3), + RandomUtils.generateRandomBytes(32), seed1.get(5), seed1.get(6), seed1.get(7) + ); + + // Act: + byte[] rootHash1 = this.calculateMerkleHash(seed1.stream()); + byte[] rootHash2 = this.calculateMerkleHash(seed2.stream()); - @Test - public void testZero() { - MerkleHashBuilder builder = new MerkleHashBuilder(); - Assertions.assertEquals( - "0000000000000000000000000000000000000000000000000000000000000000", - ConvertUtils.toHex(builder.getRootHash())); - } - - @Test - public void testOne() { - MerkleHashBuilder builder = new MerkleHashBuilder(); - builder.update( - ConvertUtils.fromHexToBytes( - "215b158f0bd416b596271bce527cd9dc8e4a639cc271d896f9156af6f441eeb9")); - Assertions.assertEquals( - "215B158F0BD416B596271BCE527CD9DC8E4A639CC271D896F9156AF6F441EEB9", - ConvertUtils.toHex(builder.getRootHash())); - } - - @Test - public void testTwo() { - MerkleHashBuilder builder = new MerkleHashBuilder(); - - builder.update( - ConvertUtils.fromHexToBytes( - "215b158f0bd416b596271bce527cd9dc8e4a639cc271d896f9156af6f441eeb9")); - builder.update( - ConvertUtils.fromHexToBytes( - "976c5ce6bf3f797113e5a3a094c7801c885daf783c50563ffd3ca6a5ef580e25")); - - Assertions.assertEquals( - "1C704E3AC99B124F92D2648649EC72C7A19EA4E2BB24F669B976180A295876FA", - ConvertUtils.toHex(builder.getRootHash())); - } - - @Test - public void testThree() { - MerkleHashBuilder builder = new MerkleHashBuilder(); - - builder.update( - ConvertUtils.fromHexToBytes( - "215b158f0bd416b596271bce527cd9dc8e4a639cc271d896f9156af6f441eeb9")); - builder.update( - ConvertUtils.fromHexToBytes( - "976c5ce6bf3f797113e5a3a094c7801c885daf783c50563ffd3ca6a5ef580e25")); - - builder.update( - ConvertUtils.fromHexToBytes( - "e926cc323886d47234bb0b49219c81e280e8a65748b437c2ae83b09b37a5aaf2")); - - Assertions.assertEquals( - "5DC17B2409D50BCC7C1FAA720D0EC8B79A1705D0C517BCC0BDBD316540974D5E", - ConvertUtils.toHex(builder.getRootHash())); - } + // Assert: + Assertions.assertNotEquals(ConvertUtils.toHex(rootHash1), ConvertUtils.toHex(rootHash2)); + } } diff --git a/sdk-core/src/test/java/io/nem/symbol/sdk/infrastructure/BinarySerializationTest.java b/sdk-core/src/test/java/io/nem/symbol/sdk/infrastructure/BinarySerializationTest.java index 4374b45c..5dc3bd0e 100644 --- a/sdk-core/src/test/java/io/nem/symbol/sdk/infrastructure/BinarySerializationTest.java +++ b/sdk-core/src/test/java/io/nem/symbol/sdk/infrastructure/BinarySerializationTest.java @@ -50,8 +50,7 @@ class BinarySerializationTest { @Test void testAllTransactionAreHandled() { BinarySerializationImpl binarySerialization = new BinarySerializationImpl(); - List notHandledTransactionTypes = - Arrays.stream(TransactionType.values()) + List notHandledTransactionTypes = Arrays.stream(TransactionType.values()) .filter( t -> { try { @@ -64,6 +63,12 @@ void testAllTransactionAreHandled() { }) .collect(Collectors.toList()); + if (null == binarySerialization.resolveSerializer(TransactionType.AGGREGATE_BONDED, 1)) + notHandledTransactionTypes.add(TransactionType.AGGREGATE_BONDED); + + if (null == binarySerialization.resolveSerializer(TransactionType.AGGREGATE_COMPLETE, 1)) + notHandledTransactionTypes.add(TransactionType.AGGREGATE_BONDED); + Assertions.assertTrue( notHandledTransactionTypes.isEmpty(), "The following transaction types are not handled: \n" diff --git a/sdk-core/src/test/java/io/nem/symbol/sdk/model/transaction/AggregateTransactionTest.java b/sdk-core/src/test/java/io/nem/symbol/sdk/model/transaction/AggregateTransactionTest.java index 1f1328b0..68805e9b 100644 --- a/sdk-core/src/test/java/io/nem/symbol/sdk/model/transaction/AggregateTransactionTest.java +++ b/sdk-core/src/test/java/io/nem/symbol/sdk/model/transaction/AggregateTransactionTest.java @@ -64,7 +64,7 @@ void serializeEmpty() { @Test void basicCatbufferDeserialization() { String expected = - "A80000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000019041420000000000000000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"; + "A80000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000029041420000000000000000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"; AggregateBondedTransactionBuilder transactionBuilder = (AggregateBondedTransactionBuilder) @@ -114,7 +114,7 @@ void serializeTwoTransaction() { BinarySerializationImpl binarySerialization = new BinarySerializationImpl(); String expected = - "60010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001904142000000000000000001000000000000006C610D61B3E6839AE85AC18465CF6AD06D8F17A4F145F720BD324880B4FBB12BB8000000000000006D00000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E6000000000190544190F36CA680C35D630662A0C38DC89D4978D10B511B3D241A0D00010000000000672B0000CE560000640000000000000000536F6D65204D6573736167650000004100000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E60000000001904D428869746E9B1A70570A000000000000000100000000000000"; + "60010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002904142000000000000000001000000000000006C610D61B3E6839AE85AC18465CF6AD06D8F17A4F145F720BD324880B4FBB12BB8000000000000006D00000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E6000000000190544190F36CA680C35D630662A0C38DC89D4978D10B511B3D241A0D00010000000000672B0000CE560000640000000000000000536F6D65204D6573736167650000004100000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E60000000001904D428869746E9B1A70570A000000000000000100000000000000"; Assertions.assertEquals(expected, ConvertUtils.toHex(aggregateTransaction.serialize())); @@ -136,7 +136,7 @@ void serializeTwoTransaction() { @Test void basicCatbufferAggregateSerialization() { String expected = - "6001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000190414200000000000000000100000000000000B4C97320255A2F755F6BE2F4DDAC0BB3EBDD25508DBE460EA6988366F404706AB8000000000000006D00000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E6000000000190544190E8FEBD671DD41BEE94EC3BA5831CB608A312C2F203BA840D00010000000000672B0000CE560000640000000000000000536F6D65204D6573736167650000004100000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E60000000001904D428869746E9B1A70570A000000000000000100000000000000"; + "6001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000290414200000000000000000100000000000000B4C97320255A2F755F6BE2F4DDAC0BB3EBDD25508DBE460EA6988366F404706AB8000000000000006D00000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E6000000000190544190E8FEBD671DD41BEE94EC3BA5831CB608A312C2F203BA840D00010000000000672B0000CE560000640000000000000000536F6D65204D6573736167650000004100000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E60000000001904D428869746E9B1A70570A000000000000000100000000000000"; BinarySerialization serialization = new BinarySerializationImpl(); @@ -287,7 +287,7 @@ void serializeThreeCosignature() { void basicCatbufferAggregateSerialization3Consignatures() { String expected = - "E0010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001904141000000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009A49366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456111AAA9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456111AAA9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA6545611100000000000000009A49366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456222BBB9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456222BBB9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA6545622200000000000000009A49366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456333CCC9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456222BBB9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456222"; + "E0010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002904141000000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009A49366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456111AAA9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456111AAA9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA6545611100000000000000009A49366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456222BBB9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456222BBB9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA6545622200000000000000009A49366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456333CCC9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456222BBB9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456222"; AggregateCompleteTransactionBuilder transactionBuilder = (AggregateCompleteTransactionBuilder) @@ -350,7 +350,7 @@ void serializeTwoTransactionTwoCosignature() { .build(); String expected = - "30020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001904142000000000000000001000000000000006C610D61B3E6839AE85AC18465CF6AD06D8F17A4F145F720BD324880B4FBB12BB8000000000000006D00000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E6000000000190544190F36CA680C35D630662A0C38DC89D4978D10B511B3D241A0D00010000000000672B0000CE560000640000000000000000536F6D65204D6573736167650000004100000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E60000000001904D428869746E9B1A70570A00000000000000010000000000000000000000000000009A49366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456111AAA9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456111AAA9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA6545611100000000000000009A49366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456222BBB9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456222BBB9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456222"; + "30020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002904142000000000000000001000000000000006C610D61B3E6839AE85AC18465CF6AD06D8F17A4F145F720BD324880B4FBB12BB8000000000000006D00000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E6000000000190544190F36CA680C35D630662A0C38DC89D4978D10B511B3D241A0D00010000000000672B0000CE560000640000000000000000536F6D65204D6573736167650000004100000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E60000000001904D428869746E9B1A70570A00000000000000010000000000000000000000000000009A49366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456111AAA9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456111AAA9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA6545611100000000000000009A49366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456222BBB9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456222BBB9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456222"; Assertions.assertEquals(expected, ConvertUtils.toHex(aggregateTransaction.serialize())); @@ -436,7 +436,7 @@ void serializeTwoTransactionTwoCosignatureUsingAdd() { .build(); String expected = - "30020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001904142000000000000000001000000000000006C610D61B3E6839AE85AC18465CF6AD06D8F17A4F145F720BD324880B4FBB12BB8000000000000006D00000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E6000000000190544190F36CA680C35D630662A0C38DC89D4978D10B511B3D241A0D00010000000000672B0000CE560000640000000000000000536F6D65204D6573736167650000004100000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E60000000001904D428869746E9B1A70570A00000000000000010000000000000000000000000000009A49366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456111AAA9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456111AAA9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA6545611100000000000000009A49366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456222BBB9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456222BBB9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456222"; + "30020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002904142000000000000000001000000000000006C610D61B3E6839AE85AC18465CF6AD06D8F17A4F145F720BD324880B4FBB12BB8000000000000006D00000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E6000000000190544190F36CA680C35D630662A0C38DC89D4978D10B511B3D241A0D00010000000000672B0000CE560000640000000000000000536F6D65204D6573736167650000004100000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E60000000001904D428869746E9B1A70570A00000000000000010000000000000000000000000000009A49366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456111AAA9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456111AAA9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA6545611100000000000000009A49366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456222BBB9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456222BBB9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456222"; Assertions.assertEquals(expected, ConvertUtils.toHex(aggregateTransaction.serialize())); @@ -475,7 +475,7 @@ void serializeTwoTransactionTwoCosignatureUsingAdd() { void basicCatbufferAggregateSerializationWithCosignatures() { String expected2 = - "3002000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000190414200000000000000000100000000000000B4C97320255A2F755F6BE2F4DDAC0BB3EBDD25508DBE460EA6988366F404706AB8000000000000006D00000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E6000000000190544190E8FEBD671DD41BEE94EC3BA5831CB608A312C2F203BA840D00010000000000672B0000CE560000640000000000000000536F6D65204D6573736167650000004100000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E60000000001904D428869746E9B1A70570A00000000000000010000000000000000000000000000009A49366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456111AAA9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456111AAA9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA6545611100000000000000009A49366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456222BBB9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456222BBB9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456222"; + "3002000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000290414200000000000000000100000000000000B4C97320255A2F755F6BE2F4DDAC0BB3EBDD25508DBE460EA6988366F404706AB8000000000000006D00000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E6000000000190544190E8FEBD671DD41BEE94EC3BA5831CB608A312C2F203BA840D00010000000000672B0000CE560000640000000000000000536F6D65204D6573736167650000004100000000000000F6503F78FBF99544B906872DDB392F4BE707180D285E7919DBACEF2E9573B1E60000000001904D428869746E9B1A70570A00000000000000010000000000000000000000000000009A49366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456111AAA9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456111AAA9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA6545611100000000000000009A49366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456222BBB9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456222BBB9366406ACA952B88BADF5F1E9BE6CE4968141035A60BE503273EA65456222"; AggregateBondedTransactionBuilder transactionBuilder = (AggregateBondedTransactionBuilder)