Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 3 additions & 4 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,11 @@ build-backend = "setuptools.build_meta"

[project]
name = "snet-cli"
version = "3.1.0"
version = "3.1.1"
description = "SingularityNET CLI"
readme = "README.md"
requires-python = ">=3.10"
license = {text = "MIT"}
license = "MIT"
authors = [
{name = "SingularityNET Foundation", email = "info@singularitynet.io"}
]
Expand All @@ -30,8 +30,7 @@ dependencies = [
"mnemonic==0.21",
"pyyaml~=6.0.1",
"ipfshttpclient==0.4.13.2",
"pymultihash==0.8.2",
"base58==2.1.1",
"py-multihash~=3.0",
"argcomplete~=3.1",
"grpcio-health-checking~=1.59",
"jsonschema~=4.1",
Expand Down
7 changes: 1 addition & 6 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,12 @@ protobuf~=6.30
grpcio~=1.59
grpcio-tools~=1.59
wheel~=0.45
# jsonrpcclient~=4.0
# eth-hash~=0.5
rlp~=4.0
# eth-rlp~=2.0
web3~=7.0
mnemonic==0.21
# pycoin==0.92.20241201
pyyaml~=6.0.1
ipfshttpclient==0.4.13.2
pymultihash==0.8.2
base58==2.1.1
py-multihash~=3.0
argcomplete~=3.1
grpcio-health-checking~=1.59
jsonschema~=4.1
Expand Down
14 changes: 8 additions & 6 deletions setup.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
import os
from pathlib import Path
from setuptools import setup
from setuptools.command.develop import develop as _develop
from setuptools.command.install import install as _install
from setuptools.command.build_py import build_py as _build_py
from grpc_tools import protoc
from pkg_resources import resource_filename
import importlib.resources


def install_and_compile_proto():
"""
Expand All @@ -14,8 +14,7 @@ def install_and_compile_proto():
proto_dir = Path(__file__).absolute().parent.joinpath(
"snet", "cli", "resources", "proto")

# Locate the standard grpc_tools internal protos (google/protobuf/...)
grpc_protos_include = resource_filename('grpc_tools', '_proto')
grpc_protos_include = str(importlib.resources.files('grpc_tools').joinpath('_proto'))

print(f"Proto directory: {proto_dir}")
print(f"Grpc include directory: {grpc_protos_include}")
Expand All @@ -31,7 +30,7 @@ def install_and_compile_proto():
command = [
'grpc_tools.protoc',
f'-I{proto_dir}',
f'-I{grpc_protos_include}', # <--- CRITICAL FIX: Add standard protos to include path
f'-I{grpc_protos_include}',
f'--python_out={proto_dir}',
f'--grpc_python_out={proto_dir}',
str(fn)
Expand All @@ -41,27 +40,30 @@ def install_and_compile_proto():
print(f"Error: Failed to compile {fn}")
raise RuntimeError(f"Protocol buffer compilation failed for {fn}")


class build_py(_build_py):
"""
Override build_py to compile protos before building the wheel.
This is the hook used by 'python -m build'.
"""
def run(self):
self.execute(install_and_compile_proto, (), msg="Compile protocol buffers")
_build_py.run(self)


class develop(_develop):
"""Post-installation for development mode (pip install -e .)."""
def run(self):
self.execute(install_and_compile_proto, (), msg="Compile protocol buffers")
_develop.run(self)


class install(_install):
"""Post-installation for legacy installation mode."""
def run(self):
self.execute(install_and_compile_proto, (), msg="Compile protocol buffers")
_install.run(self)


setup(
cmdclass={
'develop': develop,
Expand Down
33 changes: 20 additions & 13 deletions snet/cli/utils/ipfs_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@
import io
import os

import base58
import multihash
import hashlib


def publish_file_in_ipfs(ipfs_client, filepath, wrap_with_directory=True):
Expand Down Expand Up @@ -96,26 +96,33 @@ def publish_proto_in_filecoin(filecoin_client, protodir):

def get_from_ipfs_and_checkhash(ipfs_client, ipfs_hash_base58, validate=True):
"""
Get file from IPFS. If validate is True, verify the integrity of the file using its hash.
Get file from IPFS and validate hash
"""

data = ipfs_client.cat(ipfs_hash_base58)

if validate:
block_data = ipfs_client.block.get(ipfs_hash_base58)

# print(f"IPFS hash (Base58): {ipfs_hash_base58}")
# print(f"Block data length: {len(block_data)}")

# Decode Base58 bash to multihash
try:
decoded_hash_bytes = base58.b58decode(ipfs_hash_base58)
mh = multihash.decode(decoded_hash_bytes)
except Exception as e:
raise ValueError(f"Invalid multihash for IPFS hash: {ipfs_hash_base58}. Error: {str(e)}") from e
mh_bytes = multihash.from_b58_string(ipfs_hash_base58)
decoded = multihash.decode(mh_bytes)

hash_func_name = decoded.name
expected_digest = decoded.digest

if not mh.verify(block_data): # Correctly using mh instance for verification
raise Exception("IPFS hash mismatch with data")
if hash_func_name == 'sha2-256': # Standard for IPFS (CIDv0)
actual_digest = hashlib.sha256(block_data).digest()
else:
# Handle other algorithms supported by hashlib if necessary
h = hashlib.new(hash_func_name.replace('-', ''))
h.update(block_data)
actual_digest = h.digest()

if actual_digest != expected_digest:
raise Exception("IPFS hash mismatch with data")

except Exception as e:
raise ValueError(f"Integrity check failed: {str(e)}") from e

return data

Expand Down