Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Removing org_name from commands and some code cleaning. #351

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
129 changes: 67 additions & 62 deletions packages/snet_cli/snet/snet_cli/mpe_channel_command.py

Large diffs are not rendered by default.

16 changes: 9 additions & 7 deletions packages/snet_cli/snet/snet_cli/mpe_orgainzation_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,10 @@ def default(self, o):

class PaymentStorageClient(object):

def __init__(self, connection_timeout=None, request_timeout="", endpoints=[]):
def __init__(self, connection_timeout=None, request_timeout="", endpoints=None):
self.connection_timeout = connection_timeout
self.request_timeout = request_timeout
self.endpoints = endpoints
self.endpoints = endpoints if endpoints else []

def add_payment_storage_client_details(self, connection_time_out, request_timeout, endpoints):
self.connection_timeout = connection_time_out
Expand Down Expand Up @@ -171,10 +171,10 @@ class OrganizationMetadata(object):

"""

def __init__(self, org_name="", org_id="", groups=[]):
def __init__(self, org_name="", org_id="", groups=None):
self.org_name = org_name
self.org_id = org_id
self.groups = groups
self.groups = groups if groups else []

def add_group(self, group):
self.groups.append(group)
Expand All @@ -198,22 +198,24 @@ def from_file(cls, filepath):
with open(filepath) as f:
return OrganizationMetadata.from_json(json.load(f))

def is_removing_existing_group_from_org(self, current_group_name, existing_registry_metadata_group_names):
@staticmethod
def is_removing_existing_group_from_org(current_group_name, existing_registry_metadata_group_names):
if len(existing_registry_metadata_group_names-current_group_name) == 0:
pass
else:
remvoved_groups = existing_registry_metadata_group_names - current_group_name
raise Exception(
"Cannot remove existing group from organization as it might be attached to services, groups you are removing are %s" % remvoved_groups)
"Cannot remove existing group from organization as it might be attached to services, "
"groups you are removing are %s" % remvoved_groups)

def validate(self, existing_registry_metadata=None):

if self.org_id is None:
raise Exception("Org_id cannot be null")
if self.org_name is None:
raise Exception("Org_name cannot be null")
unique_group_names = set()
if self.groups:
unique_group_names = set()
for group in self.groups:
unique_group_names.add(group.group_name)

Expand Down
31 changes: 15 additions & 16 deletions packages/snet_cli/snet/snet_cli/mpe_service_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,6 @@
import re
import json
import base64
import secrets

from collections import defaultdict
from enum import Enum
Expand All @@ -56,7 +55,7 @@ class AssetType(Enum):

@staticmethod
def is_single_value(asset_type):
if asset_type == AssetType.HERO_IMAGE.value or asset_type == AssetType.DOCUMENTATION.value or asset_type == AssetType.TERMS_OF_USE.value:
if asset_type in [AssetType.HERO_IMAGE.value, AssetType.DOCUMENTATION.value, AssetType.TERMS_OF_USE.value]:
return True


Expand All @@ -83,10 +82,10 @@ def set_simple_field(self, f, v):
self.m[f] = v

def set_fixed_price_in_cogs(self, group_name, price):
if (type(price) != int):
if not isinstance(price, int):
raise Exception("Price should have int type")

if (not self.is_group_name_exists(group_name)):
if not self.is_group_name_exists(group_name):
raise Exception("the group %s is not present" % str(group_name))

for group in self.m["groups"]:
Expand All @@ -105,10 +104,10 @@ def set_fixed_price_in_cogs(self, group_name, price):
"price_in_cogs": price, "default": True}]

def set_method_price_in_cogs(self, group_name, package_name, service_name, method, price):
if (type(price) != int):
if type(price) != int:
raise Exception("Price should have int type")

if (not self.is_group_name_exists(group_name)):
if not self.is_group_name_exists(group_name):
raise Exception("the group %s is not present" % str(group_name))

groups = self.m["groups"]
Expand Down Expand Up @@ -156,7 +155,7 @@ def set_method_price_in_cogs(self, group_name, package_name, service_name, metho

def add_group(self, group_name):
""" Return new group_id in base64 """
if (self.is_group_name_exists(group_name)):
if self.is_group_name_exists(group_name):
raise Exception("the group \"%s\" is already present" %
str(group_name))

Expand Down Expand Up @@ -204,15 +203,15 @@ def add_endpoint_to_group(self, group_name, endpoint):
endpoint = 'http://' + endpoint
if not is_valid_endpoint(endpoint):
raise Exception("Endpoint is not a valid URL")
if (not self.is_group_name_exists(group_name)):
if not self.is_group_name_exists(group_name):
raise Exception("the group %s is not present" % str(group_name))
if (endpoint in self.get_all_endpoints_for_group(group_name)):
if endpoint in self.get_all_endpoints_for_group(group_name):
raise Exception("the endpoint %s is already present" %
str(endpoint))

groups = self.m["groups"]
for group in groups:
if (group["group_name"] == group_name):
if group["group_name"] == group_name:
if 'endpoints' in group:
group['endpoints'].append(endpoint)
else:
Expand All @@ -231,7 +230,7 @@ def is_group_name_exists(self, group_name):
""" check if group with given name is already exists """
groups = self.m["groups"]
for g in groups:
if (g["group_name"] == group_name):
if g["group_name"] == group_name:
return True
return False

Expand All @@ -240,7 +239,7 @@ def get_group_by_group_id(self, group_id):
group_id_base64 = base64.b64encode(group_id).decode('ascii')
groups = self.m["groups"]
for g in groups:
if (g["group_id"] == group_id_base64):
if g["group_id"] == group_id_base64:
return g
return None

Expand Down Expand Up @@ -272,10 +271,10 @@ def __contains__(self, key):
def get_group_name_nonetrick(self, group_name=None):
""" In all getter function in case of single payment group, group_name can be None """
groups = self.m["groups"]
if (len(groups) == 0):
if not len(groups):
raise Exception("Cannot find any groups in metadata")
if (not group_name):
if (len(groups) > 1):
if not group_name:
if len(groups) > 1:
raise Exception(
"We have more than one payment group in metadata, so group_name should be specified")
return groups[0]["group_name"]
Expand All @@ -284,7 +283,7 @@ def get_group_name_nonetrick(self, group_name=None):
def get_group(self, group_name=None):
group_name = self.get_group_name_nonetrick(group_name)
for g in self.m["groups"]:
if (g["group_name"] == group_name):
if g["group_name"] == group_name:
return g
raise Exception('Cannot find group "%s" in metadata' % group_name)

Expand Down
19 changes: 13 additions & 6 deletions packages/snet_cli/snet/snet_cli/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,7 @@ def serializable(o):
else:
return o.__dict__


def safe_address_converter(a):
if not web3.eth.is_checksum_address(a):
raise Exception("%s is not is not a valid Ethereum checksum address"%a)
Expand Down Expand Up @@ -160,7 +161,7 @@ def compile_proto(entry_path, codegen_dir, proto_file=None, target_language="pyt
if target_language == "python":
compiler_args.insert(0, "protoc")
compiler_args.append("--python_out={}".format(codegen_dir))
compiler_args.append("--grpc_python_out={}".format(codegen_dir))
compiler_args.append("--grpc_python_out={}".format(codegen_dir))
compiler = protoc
elif target_language == "nodejs":
protoc_node_compiler_path = Path(RESOURCES_PATH.joinpath("node_modules").joinpath("grpc-tools").joinpath("bin").joinpath("protoc.js")).absolute()
Expand All @@ -187,16 +188,18 @@ def compile_proto(entry_path, codegen_dir, proto_file=None, target_language="pyt
print(e)
return False


def abi_get_element_by_name(abi, name):
""" Return element of abi (return None if fails to find) """
if (abi and "abi" in abi):
if abi and "abi" in abi:
for a in abi["abi"]:
if ("name" in a and a["name"] == name):
if "name" in a and a["name"] == name:
return a
return None


def abi_decode_struct_to_dict(abi, struct_list):
return {el_abi["name"] : el for el_abi, el in zip(abi["outputs"], struct_list)}
return {el_abi["name"]: el for el_abi, el in zip(abi["outputs"], struct_list)}


def int4bytes_big(b):
Expand Down Expand Up @@ -236,17 +239,19 @@ def remove_http_https_prefix(endpoint):
endpoint = endpoint.replace("http://","")
return endpoint


def open_grpc_channel(endpoint):
"""
open grpc channel:
- for http:// we open insecure_channel
- for https:// we open secure_channel (with default credentials)
- without prefix we open insecure_channel
"""
if (endpoint.startswith("https://")):
if endpoint.startswith("https://"):
return grpc.secure_channel(remove_http_https_prefix(endpoint), grpc.ssl_channel_credentials())
return grpc.insecure_channel(remove_http_https_prefix(endpoint))


def rgetattr(obj, attr):
"""
>>> from types import SimpleNamespace
Expand Down Expand Up @@ -287,11 +292,13 @@ def get_address_from_private(private_key):
return web3.eth.Account.privateKeyToAccount(private_key).address


class add_to_path():
class add_to_path:
def __init__(self, path):
self.path = path

def __enter__(self):
sys.path.insert(0, self.path)

def __exit__(self, exc_type, exc_value, traceback):
try:
sys.path.remove(self.path)
Expand Down
12 changes: 6 additions & 6 deletions packages/snet_cli/snet/snet_cli/utils_ipfs.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,12 +29,12 @@ def publish_proto_in_ipfs(ipfs_client, protodir):
return base58 encoded ipfs hash
"""

if (not os.path.isdir(protodir)):
if not os.path.isdir(protodir):
raise Exception("Directory %s doesn't exists" % protodir)

files = glob.glob(os.path.join(protodir, "*.proto"))

if (len(files) == 0):
if len(files) == 0:
raise Exception("Cannot find any %s files" %
(os.path.join(protodir, "*.proto")))

Expand Down Expand Up @@ -94,7 +94,7 @@ def hash_to_bytesuri(s):

def bytesuri_to_hash(s):
s = s.rstrip(b"\0").decode('ascii')
if (not s.startswith("ipfs://")):
if not s.startswith("ipfs://"):
raise Exception("We support only ipfs uri in Registry")
return s[7:]

Expand All @@ -108,14 +108,14 @@ def safe_extract_proto_from_ipfs(ipfs_client, ipfs_hash, protodir):
spec_tar = get_from_ipfs_and_checkhash(ipfs_client, ipfs_hash)
with tarfile.open(fileobj=io.BytesIO(spec_tar)) as f:
for m in f.getmembers():
if (os.path.dirname(m.name) != ""):
if os.path.dirname(m.name) != "":
raise Exception(
"tarball has directories. We do not support it.")
if (not m.isfile()):
if not m.isfile():
raise Exception(
"tarball contains %s which is not a files" % m.name)
fullname = os.path.join(protodir, m.name)
if (os.path.exists(fullname)):
if os.path.exists(fullname):
raise Exception("%s already exists." % fullname)
# now it is safe to call extractall
f.extractall(protodir)
51 changes: 28 additions & 23 deletions packages/snet_cli/snet/snet_cli/utils_proto.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from google.protobuf import json_format


def import_protobuf_from_dir(proto_dir, method_name, service_name = None):
def import_protobuf_from_dir(proto_dir, method_name, service_name=None):
"""
Dynamic import of grpc-protobuf from given directory (proto_dir)
service_name should be provided only in the case of conflicting method names (two methods with the same name in difference services).
Expand All @@ -14,23 +14,27 @@ def import_protobuf_from_dir(proto_dir, method_name, service_name = None):
"""
proto_dir = Path(proto_dir)
# <SERVICE>_pb2_grpc.py import <SERVICE>_pb2.py so we are forced to add proto_dir to path
sys.path.append(str(proto_dir))
sys.path.append(str(proto_dir))
grpc_pyfiles = [str(os.path.basename(p)) for p in proto_dir.glob("*_pb2_grpc.py")]

good_rez = []
for grpc_pyfile in grpc_pyfiles:
is_found, rez = _import_protobuf_from_file(grpc_pyfile, method_name, service_name);
if (is_found): good_rez.append(rez)
if (len(good_rez) == 0):
raise Exception("Error while loading protobuf. Cannot find method=%s"%method_name)
if (len(good_rez) > 1):
if (service_name):
raise Exception("Error while loading protobuf. Found method %s.%s in multiply .proto files. We don't support packages yet!"%(service_name, method_name))
is_found, rez = _import_protobuf_from_file(grpc_pyfile, method_name, service_name)
if is_found:
good_rez.append(rez)
if len(good_rez) == 0:
raise Exception("Error while loading protobuf. Cannot find method=%s" % method_name)
if len(good_rez) > 1:
if service_name:
raise Exception("Error while loading protobuf. Found method %s.%s in multiply .proto files. "
"We don't support packages yet!" % (service_name, method_name))
else:
raise Exception("Error while loading protobuf. Found method %s in multiply .proto files. You could try to specify service_name."%method_name)
raise Exception("Error while loading protobuf. Found method %s in multiply .proto files. "
"You could try to specify service_name." % method_name)
return good_rez[0]

def _import_protobuf_from_file(grpc_pyfile, method_name, service_name = None):

def _import_protobuf_from_file(grpc_pyfile, method_name, service_name=None):
"""
helper function which try to import method from the given _pb2_grpc.py file
service_name should be provided only in case of name conflict
Expand All @@ -39,40 +43,41 @@ def _import_protobuf_from_file(grpc_pyfile, method_name, service_name = None):
"""

prefix = grpc_pyfile[:-12]
pb2 = __import__("%s_pb2"%prefix)
pb2_grpc = __import__("%s_pb2_grpc"%prefix)

pb2 = __import__("%s_pb2" % prefix)
pb2_grpc = __import__("%s_pb2_grpc" % prefix)

# we take all objects from pb2_grpc module which endswith "Stub", and we remove this postfix to get service_name
all_service_names = [stub_name[:-4] for stub_name in dir(pb2_grpc) if stub_name.endswith("Stub")]

# if service_name was specified we take only this service_name
if (service_name):
if (service_name not in all_service_names):
if service_name:
if service_name not in all_service_names:
return False, None
all_service_names = [service_name]
all_service_names = [service_name]

found_services = []
for service_name in all_service_names:
service_descriptor = getattr(pb2, "DESCRIPTOR").services_by_name[service_name]
service_descriptor = getattr(pb2, "DESCRIPTOR").services_by_name[service_name]
for method in service_descriptor.methods:
if(method.name == method_name):
if method.name == method_name:
request_class = method.input_type._concrete_class
response_class = method.output_type._concrete_class
stub_class = getattr(pb2_grpc, "%sStub"%service_name)
stub_class = getattr(pb2_grpc, "%sStub" % service_name)

found_services.append(service_name)
if (len(found_services) == 0):
if len(found_services) == 0:
return False, None
if (len(found_services) > 1):
if len(found_services) > 1:
raise Exception("Error while loading protobuf. We found methods %s in multiply services [%s]."
" You should specify service_name."%(method_name, ", ".join(found_services)))
" You should specify service_name." % (method_name, ", ".join(found_services)))
return True, (stub_class, request_class, response_class)


def switch_to_json_payload_encoding(call_fn, response_class):
""" Switch payload encoding to JSON for GRPC call """
def json_serializer(*args, **kwargs):
return bytes(json_format.MessageToJson(args[0], True, preserving_proto_field_name=True), "utf-8")

def json_deserializer(*args, **kwargs):
resp = response_class()
json_format.Parse(args[0], resp, True)
Expand Down
Loading