diff --git a/.gitignore b/.gitignore index fc5092d..9e91387 100644 --- a/.gitignore +++ b/.gitignore @@ -2,7 +2,8 @@ __pycache__/ *.py[cod] *$py.class - +pyproject.toml +poetry.lock # Virtual environment directories venv/ env/ @@ -11,6 +12,7 @@ env/ ENV/ env.bak/ venv.bak/ +NOTES # Distribution / packaging @@ -28,6 +30,9 @@ dist/ *.swo *.bak *.tmp +.mypy_cache/ +scripts/ +.pytest_cache/ # macOS files .DS_Store @@ -38,7 +43,7 @@ scripts/ # Windows files Thumbs.db ehthumbs.db - +NOTES # Log files *.log diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 0000000..c24e879 --- /dev/null +++ b/mypy.ini @@ -0,0 +1,31 @@ +[mypy] +python_version = 3.11 +warn_return_any = True +warn_unused_configs = True +disallow_untyped_defs = True +disallow_incomplete_defs = True +check_untyped_defs = True +disallow_untyped_decorators = True +no_implicit_optional = True +warn_redundant_casts = True +warn_unused_ignores = False +warn_no_return = True +warn_unreachable = True + +files = ./ + +exclude = + ^__init__\.py$ + _pb2\.py$ + _pb2_grpc\.py$ + \.proto$ + +[mypy-grpc.*] +ignore_missing_imports = True + +[mypy-reedsolo.*] +ignore_missing_imports = True + + +[mypy-ipld_dag_pb.*] +ignore_missing_imports = True \ No newline at end of file diff --git a/private/ipc/client.py b/private/ipc/client.py index e09cf6b..bc8f73b 100644 --- a/private/ipc/client.py +++ b/private/ipc/client.py @@ -1,10 +1,13 @@ import time from typing import Optional from web3 import Web3 -from web3.middleware import geth_poa_middleware +from web3.middleware import geth_poa_middleware # type: ignore[import-untyped] from eth_account import Account from eth_account.signers.local import LocalAccount from .contracts import StorageContract, AccessManagerContract +from typing import cast, Tuple +from eth_typing import HexAddress, HexStr +from web3.types import TxReceipt class TransactionFailedError(Exception): """Raised when a transaction fails (receipt status is 0).""" @@ -12,23 +15,55 @@ class TransactionFailedError(Exception): class Config: """Configuration for the Ethereum storage contract client.""" - def __init__(self, dial_uri: str, private_key: str, storage_contract_address: str, access_contract_address: Optional[str] = None): + def __init__(self, dial_uri: str, private_key: str, storage_contract_address: str, access_contract_address: Optional[str] = None) -> None: + """ + Initializes the client configuration. + Args: + dial_uri: URI for the Ethereum node (e.g., HTTP or IPC path). + private_key: Private key of the account to use for transactions. + storage_contract_address: Address of the deployed Storage contract. + access_contract_address: Optional address of the AccessManager contract. + """ + if not dial_uri: + raise ValueError("Dial URI must not be empty.") + if not private_key: + raise ValueError("Private key must not be empty.") + if not storage_contract_address: + raise ValueError("Storage contract address must not be empty.") + if access_contract_address and not isinstance(access_contract_address, str): + raise ValueError("Access contract address must be a string if provided.") self.dial_uri = dial_uri self.private_key = private_key self.storage_contract_address = storage_contract_address self.access_contract_address = access_contract_address @staticmethod - def default(): + def default() -> 'Config': return Config(dial_uri="", private_key="", storage_contract_address="", access_contract_address="") class Client: """Represents the Ethereum storage client.""" - def __init__(self, web3: Web3, auth: LocalAccount, storage: StorageContract, access_manager: Optional[AccessManagerContract] = None): - self.web3 = web3 - self.auth = auth - self.storage = storage - self.access_manager = access_manager + def __init__(self, web3: Web3, auth: LocalAccount, storage: StorageContract, access_manager: Optional[AccessManagerContract] = None) -> None: + """ + Initializes the client with the given Web3 instance, account, and contracts. + Args: + web3: Web3 instance connected to the Ethereum node. + auth: Local account used for signing transactions. + storage: Storage contract instance. + access_manager: Optional AccessManager contract instance. + """ + if not isinstance(web3, Web3): + raise TypeError("web3 must be an instance of web3.Web3") + if not isinstance(auth, LocalAccount): + raise TypeError("auth must be an instance of eth_account.LocalAccount") + if not isinstance(storage, StorageContract): + raise TypeError("storage must be an instance of StorageContract") + if access_manager is not None and not isinstance(access_manager, AccessManagerContract): + raise TypeError("access_manager must be an instance of AccessManagerContract or None") + self.web3: Web3 = web3 + self.auth: LocalAccount = auth + self.storage: StorageContract = storage + self.access_manager: Optional[AccessManagerContract] = access_manager # self.ticker = 0.2 # 200ms polling interval (currently unused) @classmethod @@ -57,29 +92,28 @@ def dial(cls, config: Config) -> 'Client': raise ValueError(f"Invalid private key: {e}") from e # Initialize contracts - storage = StorageContract(web3, config.storage_contract_address) + storage_addr = cast(HexAddress, config.storage_contract_address) + storage = StorageContract(web3, storage_addr) access_manager = None if config.access_contract_address: - access_manager = AccessManagerContract(web3, config.access_contract_address) + access_addr = cast(HexAddress, config.access_contract_address) + access_manager = AccessManagerContract(web3, access_addr) return cls(web3, account, storage, access_manager) @staticmethod - def _wait_for_tx_receipt(web3_instance: Web3, tx_hash: str, timeout: int = 120, poll_latency: float = 0.5): + def _wait_for_tx_receipt(web3_instance: Web3, tx_hash: str, timeout: int = 120, poll_latency: float = 0.5) -> TxReceipt: """Waits for a transaction receipt and raises an error if it failed.""" try: - receipt = web3_instance.eth.wait_for_transaction_receipt( - tx_hash, timeout=timeout, poll_latency=poll_latency - ) - if receipt.status == 0: - # Consider adding more details from the receipt if available + receipt: TxReceipt = web3_instance.eth.wait_for_transaction_receipt(cast(HexStr, tx_hash), timeout=timeout, poll_latency=poll_latency) + if receipt['status'] == 0: raise TransactionFailedError(f"Transaction {tx_hash} failed.") return receipt - except Exception as e: # Catch specific web3 exceptions if needed + except Exception as e: raise TimeoutError(f"Timeout waiting for transaction {tx_hash}") from e @classmethod - def deploy_storage(cls, config: Config): + def deploy_storage(cls, config: Config) -> Tuple['Client', HexAddress, HexAddress]: """Deploys Storage and AccessManager contracts. Requires ABI and Bytecode to be available. @@ -95,7 +129,7 @@ def deploy_storage(cls, config: Config): web3.middleware_onion.inject(geth_poa_middleware, layer=0) try: - account = Account.from_key(config.private_key) + account: LocalAccount = Account.from_key(config.private_key) except ValueError as e: raise ValueError(f"Invalid private key: {e}") from e @@ -103,7 +137,10 @@ def deploy_storage(cls, config: Config): # Ensure these are correctly imported or defined try: # This assumes you have ABI/Bytecode defined in your contracts package/module - from .contracts import storage_abi, storage_bytecode, access_manager_abi, access_manager_bytecode + from .contracts import ( + storage_abi, storage_bytecode, + access_manager_abi, access_manager_bytecode + ) except ImportError: raise ImportError("Storage/AccessManager ABI and Bytecode not found. Ensure they are defined in akavesdk-py/private/ipc/contracts.") @@ -121,8 +158,10 @@ def deploy_storage(cls, config: Config): signed_tx = account.sign_transaction(construct_txn) tx_hash = web3.eth.send_raw_transaction(signed_tx.rawTransaction) print(f"Storage deployment transaction sent: {tx_hash.hex()}") - storage_receipt = cls._wait_for_tx_receipt(web3, tx_hash) - storage_address = storage_receipt.contractAddress + storage_receipt = cls._wait_for_tx_receipt(web3, cast(HexStr, tx_hash.hex())) + storage_address = storage_receipt['contractAddress'] + if storage_address is None: + raise TransactionFailedError(f"Storage contract deployment failed, no contract address found in receipt: {tx_hash.hex()}") print(f"Storage contract deployed at: {storage_address}") # Deploy Access Manager Contract @@ -139,13 +178,20 @@ def deploy_storage(cls, config: Config): signed_tx = account.sign_transaction(construct_txn) tx_hash = web3.eth.send_raw_transaction(signed_tx.rawTransaction) print(f"AccessManager deployment transaction sent: {tx_hash.hex()}") - access_manager_receipt = cls._wait_for_tx_receipt(web3, tx_hash) - access_manager_address = access_manager_receipt.contractAddress + access_manager_receipt = cls._wait_for_tx_receipt(web3, cast(HexStr, tx_hash.hex())) + access_manager_address = access_manager_receipt['contractAddress'] + if access_manager_address is None: + raise TransactionFailedError(f"AccessManager contract deployment failed, no contract address found in receipt: {tx_hash.hex()}") print(f"AccessManager contract deployed at: {access_manager_address}") # Create contract instances for the client - storage_instance = StorageContract(web3, storage_address) - access_manager_instance = AccessManagerContract(web3, access_manager_address) + checksum_storage_address = web3.to_checksum_address(storage_address) # storage_address is now guaranteed to be non-None + checksum_access_manager_address = web3.to_checksum_address(access_manager_address) + + storage_instance = StorageContract(web3, checksum_storage_address) + access_manager_instance = AccessManagerContract(web3, checksum_access_manager_address) + + # Update config with deployed addresses if needed, or return them separately # Update config with deployed addresses if needed, or return them separately # config.storage_contract_address = storage_address diff --git a/private/ipc/contracts/__init__.py b/private/ipc/contracts/__init__.py index c96c1e8..23be0c1 100644 --- a/private/ipc/contracts/__init__.py +++ b/private/ipc/contracts/__init__.py @@ -12,6 +12,6 @@ # Example placeholder: # storage_abi = [...] -# storage_bytecode = "0x..." +# storage_bytecode = "0x... # access_manager_abi = [...] # access_manager_bytecode = "0x..." \ No newline at end of file diff --git a/private/ipc/contracts/access_manager.py b/private/ipc/contracts/access_manager.py index e35321d..1e6e1dd 100644 --- a/private/ipc/contracts/access_manager.py +++ b/private/ipc/contracts/access_manager.py @@ -1,7 +1,8 @@ -from typing import List, Tuple, Optional -from eth_typing import HexAddress, HexStr +from typing import List, Tuple, Optional, Dict, Any, cast +from eth_typing import HexAddress, HexStr, ChecksumAddress from web3 import Web3 -from web3.contract import Contract +from web3.types import TxParams, TxReceipt +from web3.contract import Contract # type: ignore[import-untyped] import json class AccessManagerContract: @@ -16,6 +17,9 @@ def __init__(self, web3: Web3, contract_address: HexAddress): """ self.web3 = web3 self.contract_address = contract_address + self.checksum_address: ChecksumAddress = self.web3.to_checksum_address(self.contract_address) + if not self.web3.is_checksum_address(self.checksum_address): + raise ValueError(f"Invalid contract address: {contract_address}. Must be a valid checksum address.") # Contract ABI from the Go bindings self.abi = [ @@ -123,8 +127,8 @@ def __init__(self, web3: Web3, contract_address: HexAddress): "type": "function" } ] - - self.contract = web3.eth.contract(address=contract_address, abi=self.abi) + + self.contract: Contract = web3.eth.contract(address=self.checksum_address, abi=self.abi) def change_public_access(self, file_id: bytes, is_public: bool, from_address: HexAddress) -> None: """Changes the public access status of a file. @@ -134,7 +138,8 @@ def change_public_access(self, file_id: bytes, is_public: bool, from_address: He is_public: Whether the file should be publicly accessible from_address: Address changing the access """ - tx_hash = self.contract.functions.changePublicAccess(file_id, is_public).transact({'from': from_address}) + checksum_from_address: ChecksumAddress = self.web3.to_checksum_address(from_address) + tx_hash = self.contract.functions.changePublicAccess(file_id, is_public).transact({'from': checksum_from_address}) self.web3.eth.wait_for_transaction_receipt(tx_hash) def get_file_access_info(self, file_id: bytes) -> Tuple[HexAddress, bool]: @@ -146,7 +151,8 @@ def get_file_access_info(self, file_id: bytes) -> Tuple[HexAddress, bool]: Returns: Tuple containing (policy contract address, is public) """ - return self.contract.functions.getFileAccessInfo(file_id).call() + result = self.contract.functions.getFileAccessInfo(file_id).call() + return cast(Tuple[HexAddress, bool], result) def get_policy(self, file_id: bytes) -> HexAddress: """Gets the policy contract address for a file. @@ -157,7 +163,8 @@ def get_policy(self, file_id: bytes) -> HexAddress: Returns: Address of the policy contract """ - return self.contract.functions.getPolicy(file_id).call() + result = self.contract.functions.getPolicy(file_id).call() + return cast(HexAddress, result) def set_policy(self, file_id: bytes, policy_contract: HexAddress, from_address: HexAddress) -> None: """Sets the policy contract for a file. @@ -167,7 +174,8 @@ def set_policy(self, file_id: bytes, policy_contract: HexAddress, from_address: policy_contract: Address of the policy contract from_address: Address setting the policy """ - tx_hash = self.contract.functions.setPolicy(file_id, policy_contract).transact({'from': from_address}) + checksum_from_address: ChecksumAddress = self.web3.to_checksum_address(from_address) + tx_hash = self.contract.functions.setPolicy(file_id, policy_contract).transact({'from': checksum_from_address}) self.web3.eth.wait_for_transaction_receipt(tx_hash) def get_storage_contract(self) -> HexAddress: @@ -176,4 +184,5 @@ def get_storage_contract(self) -> HexAddress: Returns: Address of the storage contract """ - return self.contract.functions.storageContract().call() + result = self.contract.functions.storageContract().call() + return cast(HexAddress, result) diff --git a/private/ipc/contracts/policy.py b/private/ipc/contracts/policy.py index e87c883..31549b3 100644 --- a/private/ipc/contracts/policy.py +++ b/private/ipc/contracts/policy.py @@ -1,7 +1,8 @@ -from typing import List, Tuple, Optional -from eth_typing import HexAddress, HexStr +from typing import List, Tuple, Optional, Dict, Any, cast +from eth_typing import HexAddress, HexStr, ChecksumAddress from web3 import Web3 -from web3.contract import Contract +from web3.types import TxParams, TxReceipt +from web3.contract import Contract # type: ignore[import-untyped] import json class PolicyContract: @@ -16,6 +17,7 @@ def __init__(self, web3: Web3, contract_address: HexAddress): """ self.web3 = web3 self.contract_address = contract_address + self.checksum_address = web3.to_checksum_address(contract_address) # Contract ABI from the Go bindings self.abi = [ @@ -104,8 +106,8 @@ def __init__(self, web3: Web3, contract_address: HexAddress): "type": "function" } ] - - self.contract = web3.eth.contract(address=contract_address, abi=self.abi) + + self.contract: Contract = web3.eth.contract(address=self.checksum_address, abi=self.abi) def add_user_access(self, file_id: bytes, user: HexAddress, from_address: HexAddress) -> None: """Grants access to a file for a specific user. @@ -115,7 +117,10 @@ def add_user_access(self, file_id: bytes, user: HexAddress, from_address: HexAdd user: Address of the user to grant access to from_address: Address granting the access """ - tx_hash = self.contract.functions.addUserAccess(file_id, user).transact({'from': from_address}) + checksum_from_address: ChecksumAddress = self.web3.to_checksum_address(from_address) + if not self.web3.is_checksum_address(checksum_from_address): + raise ValueError(f"Invalid from_address: {from_address}") + tx_hash = self.contract.functions.addUserAccess(file_id, user).transact({'from': checksum_from_address}) self.web3.eth.wait_for_transaction_receipt(tx_hash) def get_access_manager(self) -> HexAddress: @@ -124,7 +129,8 @@ def get_access_manager(self) -> HexAddress: Returns: Address of the access manager contract """ - return self.contract.functions.accessManager().call() + result = self.contract.functions.accessManager().call() + return cast(HexAddress, result) def has_access(self, file_id: bytes, user: HexAddress) -> bool: """Checks if a user has access to a file. @@ -136,7 +142,8 @@ def has_access(self, file_id: bytes, user: HexAddress) -> bool: Returns: True if the user has access, False otherwise """ - return self.contract.functions.hasAccess(file_id, user).call() + result = self.contract.functions.hasAccess(file_id, user).call() + return cast(bool, result) def remove_user_access(self, file_id: bytes, user: HexAddress, from_address: HexAddress) -> None: """Revokes access to a file for a specific user. @@ -146,5 +153,6 @@ def remove_user_access(self, file_id: bytes, user: HexAddress, from_address: Hex user: Address of the user to revoke access from from_address: Address revoking the access """ - tx_hash = self.contract.functions.removeUserAccess(file_id, user).transact({'from': from_address}) - self.web3.eth.wait_for_transaction_receipt(tx_hash) \ No newline at end of file + checksum_from_address: ChecksumAddress = self.web3.to_checksum_address(from_address) + tx_hash = self.contract.functions.removeUserAccess(file_id, user).transact({'from': checksum_from_address}) + self.web3.eth.wait_for_transaction_receipt(tx_hash) \ No newline at end of file diff --git a/private/ipc/contracts/storage.py b/private/ipc/contracts/storage.py index b03a27f..b9e3567 100644 --- a/private/ipc/contracts/storage.py +++ b/private/ipc/contracts/storage.py @@ -1,7 +1,8 @@ -from typing import List, Tuple, Optional -from eth_typing import HexAddress, HexStr +from typing import List, Tuple, Optional, cast +from eth_typing import HexAddress, HexStr, ChecksumAddress from web3 import Web3 -from web3.contract import Contract +from web3.types import TxParams, TxReceipt +from web3.contract import Contract # type: ignore[import-untyped] from eth_account import Account import json @@ -17,6 +18,9 @@ def __init__(self, web3: Web3, contract_address: HexAddress): """ self.web3 = web3 self.contract_address = contract_address + + # Convert to checksum address for web3.py contract interaction + self.checksum_address: ChecksumAddress = self.web3.to_checksum_address(self.contract_address) # Contract ABI from the Go bindings self.abi = [ @@ -240,7 +244,7 @@ def __init__(self, web3: Web3, contract_address: HexAddress): } ] - self.contract = web3.eth.contract(address=contract_address, abi=self.abi) + self.contract: Contract = web3.eth.contract(address=self.checksum_address, abi=self.abi) def get_access_manager(self) -> HexAddress: """Gets the address of the associated access manager contract. @@ -248,9 +252,10 @@ def get_access_manager(self) -> HexAddress: Returns: Address of the access manager contract """ - return self.contract.functions.accessManager().call() + result = self.contract.functions.accessManager().call() + return cast(HexAddress, result) - def create_bucket(self, bucket_name: str, from_address: HexAddress, private_key: str, gas_limit: int = None) -> HexStr: + def create_bucket(self, bucket_name: str, from_address: HexAddress, private_key: str, gas_limit: Optional[int] = None) -> HexStr: """Creates a new bucket. Args: @@ -263,10 +268,11 @@ def create_bucket(self, bucket_name: str, from_address: HexAddress, private_key: Transaction hash of the create operation """ # Build transaction - tx_params = { - 'from': from_address, + checksum_from_address: ChecksumAddress = self.web3.to_checksum_address(from_address) + tx_params: TxParams = { + 'from': checksum_from_address, 'gasPrice': self.web3.eth.gas_price, - 'nonce': self.web3.eth.get_transaction_count(from_address) + 'nonce': self.web3.eth.get_transaction_count(checksum_from_address) } if gas_limit: @@ -283,8 +289,8 @@ def create_bucket(self, bucket_name: str, from_address: HexAddress, private_key: tx_hash = self.web3.eth.send_raw_transaction(signed_tx.rawTransaction) # Wait for receipt - receipt = self.web3.eth.wait_for_transaction_receipt(tx_hash) - if receipt.status != 1: + receipt: TxReceipt = self.web3.eth.wait_for_transaction_receipt(tx_hash) + if receipt['status'] != 1: # Get revert reason if possible try: self.contract.functions.createBucket(bucket_name).call({ @@ -294,7 +300,7 @@ def create_bucket(self, bucket_name: str, from_address: HexAddress, private_key: raise Exception(f"Transaction reverted: {str(e)}") raise Exception(f"Transaction failed. Receipt: {receipt}") - return tx_hash.hex() + return cast(HexStr, tx_hash.hex()) def create_file(self, bucket_name: str, file_name: str, file_id: bytes, size: int, from_address: HexAddress, private_key: str) -> None: """Creates a new file entry. @@ -308,11 +314,12 @@ def create_file(self, bucket_name: str, file_name: str, file_id: bytes, size: in private_key: Private key for signing the transaction """ # Build transaction + checksum_from_address: ChecksumAddress = self.web3.to_checksum_address(from_address) tx = self.contract.functions.createFile(bucket_name, file_name, file_id, size).build_transaction({ - 'from': from_address, + 'from': checksum_from_address, 'gas': 500000, # Gas limit 'gasPrice': self.web3.eth.gas_price, - 'nonce': self.web3.eth.get_transaction_count(from_address) + 'nonce': self.web3.eth.get_transaction_count(checksum_from_address) }) # Sign transaction @@ -322,8 +329,8 @@ def create_file(self, bucket_name: str, file_name: str, file_id: bytes, size: in tx_hash = self.web3.eth.send_raw_transaction(signed_tx.rawTransaction) # Wait for receipt - receipt = self.web3.eth.wait_for_transaction_receipt(tx_hash) - if receipt.status != 1: + receipt: TxReceipt = self.web3.eth.wait_for_transaction_receipt(tx_hash) + if receipt['status'] != 1: raise Exception("Transaction failed") def commit_file(self, bucket_name: str, file_name: str, size: int, root_cid: bytes, from_address: HexAddress, private_key: str) -> None: @@ -341,17 +348,19 @@ def commit_file(self, bucket_name: str, file_name: str, size: int, root_cid: byt # Adding 'commitFile' based on Go SDK patterns function_name = 'commitFile' # Adjust if contract ABI uses a different name + try: contract_function = getattr(self.contract.functions, function_name) except AttributeError: raise NotImplementedError(f"Contract function '{function_name}' not found in ABI") # Build transaction + checksum_from_address: ChecksumAddress = self.web3.to_checksum_address(from_address) tx = contract_function(bucket_name, file_name, size, root_cid).build_transaction({ - 'from': from_address, + 'from': checksum_from_address, 'gas': 500000, # Gas limit (adjust as needed) 'gasPrice': self.web3.eth.gas_price, - 'nonce': self.web3.eth.get_transaction_count(from_address) + 'nonce': self.web3.eth.get_transaction_count(checksum_from_address) }) # Sign transaction @@ -361,11 +370,11 @@ def commit_file(self, bucket_name: str, file_name: str, size: int, root_cid: byt tx_hash = self.web3.eth.send_raw_transaction(signed_tx.rawTransaction) # Wait for receipt - receipt = self.web3.eth.wait_for_transaction_receipt(tx_hash) - if receipt.status != 1: + receipt: TxReceipt = self.web3.eth.wait_for_transaction_receipt(tx_hash) + if receipt['status'] != 1: raise Exception(f"Transaction failed for {function_name}") - def delete_bucket(self, bucket_name: str, from_address: HexAddress, private_key: str, bucket_id_hex: str = None) -> HexStr: + def delete_bucket(self, bucket_name: str, from_address: HexAddress, private_key: str, bucket_id_hex: Optional[str] = None) -> HexStr: """Deletes a bucket. Args: @@ -407,11 +416,12 @@ def delete_bucket(self, bucket_name: str, from_address: HexAddress, private_key: raise Exception(f"Failed to prepare bucket deletion: {str(e)}") # Build transaction parameters - use standard legacy transaction - tx_params = { - 'from': from_address, + checksum_from_address: ChecksumAddress = self.web3.to_checksum_address(from_address) + tx_params: TxParams = { + 'from': checksum_from_address, 'gas': 500000, # Gas limit 'gasPrice': self.web3.eth.gas_price, - 'nonce': self.web3.eth.get_transaction_count(from_address), + 'nonce': self.web3.eth.get_transaction_count(checksum_from_address), } try: @@ -448,7 +458,7 @@ def delete_bucket(self, bucket_name: str, from_address: HexAddress, private_key: elif error_data.startswith('0x08c379a0'): # Standard revert reason try: - from eth_abi import decode_single + from eth_abi import decode as decode_single # type: ignore[attr-defined] reason = decode_single('string', bytes.fromhex(error_data[10:])) print(f"Revert reason: {reason}") except: @@ -473,13 +483,13 @@ def delete_bucket(self, bucket_name: str, from_address: HexAddress, private_key: print(f"Transaction sent: {tx_hash.hex()}") # Wait for receipt - receipt = self.web3.eth.wait_for_transaction_receipt(tx_hash) - print(f"Transaction receipt: status={receipt.status}, gasUsed={receipt.gasUsed}") - - if receipt.status != 1: - raise Exception(f"Transaction failed with status: {receipt.status}") - - return tx_hash.hex() + receipt: TxReceipt = self.web3.eth.wait_for_transaction_receipt(tx_hash) + print(f"Transaction receipt: status={receipt['status']}, gasUsed={receipt['gasUsed']}") + + if receipt['status'] != 1: + raise Exception(f"Transaction failed with status: {receipt['status']}") + + return cast(HexStr, tx_hash.hex()) except Exception as e: raise Exception(f"Failed to delete bucket: {str(e)}") @@ -494,11 +504,12 @@ def delete_file(self, bucket_name: str, file_name: str, from_address: HexAddress private_key: Private key for signing the transaction """ # Build transaction + checksum_from_address: ChecksumAddress = self.web3.to_checksum_address(from_address) tx = self.contract.functions.deleteFile(bucket_name, file_name).build_transaction({ - 'from': from_address, + 'from': checksum_from_address, 'gas': 500000, # Gas limit 'gasPrice': self.web3.eth.gas_price, - 'nonce': self.web3.eth.get_transaction_count(from_address) + 'nonce': self.web3.eth.get_transaction_count(checksum_from_address) }) # Sign transaction @@ -508,8 +519,8 @@ def delete_file(self, bucket_name: str, file_name: str, from_address: HexAddress tx_hash = self.web3.eth.send_raw_transaction(signed_tx.rawTransaction) # Wait for receipt - receipt = self.web3.eth.wait_for_transaction_receipt(tx_hash) - if receipt.status != 1: + receipt: TxReceipt = self.web3.eth.wait_for_transaction_receipt(tx_hash) + if receipt['status'] != 1: raise Exception("Transaction failed") def get_bucket(self, bucket_name: str) -> Tuple[str, int, HexAddress]: @@ -521,7 +532,8 @@ def get_bucket(self, bucket_name: str) -> Tuple[str, int, HexAddress]: Returns: Tuple containing (bucket_name, created_at_timestamp, owner_address) """ - return self.contract.functions.getBucket(bucket_name).call() + result = self.contract.functions.getBucket(bucket_name).call() + return cast(Tuple[str, int, HexAddress], result) def get_file(self, bucket_name: str, file_name: str) -> Tuple[str, bytes, int, int]: """Gets file information. @@ -533,4 +545,5 @@ def get_file(self, bucket_name: str, file_name: str) -> Tuple[str, bytes, int, i Returns: Tuple containing (file_name, file_id, size, created_at_timestamp) """ - return self.contract.functions.getFile(bucket_name, file_name).call() + result = self.contract.functions.getFile(bucket_name, file_name).call() + return cast(Tuple[str, bytes, int, int], result) diff --git a/private/memory/__init__.py b/private/memory/__init__.py index e69de29..78c9133 100644 --- a/private/memory/__init__.py +++ b/private/memory/__init__.py @@ -0,0 +1,4 @@ +from .memory import Size + + +__all__ = ["Size"] diff --git a/private/pb/ipcnodeapi_pb2.pyi b/private/pb/ipcnodeapi_pb2.pyi new file mode 100644 index 0000000..912aaf5 --- /dev/null +++ b/private/pb/ipcnodeapi_pb2.pyi @@ -0,0 +1,648 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" + +import builtins +import collections.abc +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import google.protobuf.timestamp_pb2 +import typing + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing.final +class ConnectionParamsRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +global___ConnectionParamsRequest = ConnectionParamsRequest + +@typing.final +class ConnectionParamsResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + DIAL_URI_FIELD_NUMBER: builtins.int + CONTRACT_ADDRESS_FIELD_NUMBER: builtins.int + dial_uri: builtins.str + contract_address: builtins.str + def __init__( + self, + *, + dial_uri: builtins.str = ..., + contract_address: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["contract_address", b"contract_address", "dial_uri", b"dial_uri"]) -> None: ... + +global___ConnectionParamsResponse = ConnectionParamsResponse + +@typing.final +class IPCBucketCreateRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + name: builtins.str + def __init__( + self, + *, + name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["name", b"name"]) -> None: ... + +global___IPCBucketCreateRequest = IPCBucketCreateRequest + +@typing.final +class IPCBucketCreateResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + CREATED_AT_FIELD_NUMBER: builtins.int + name: builtins.str + @property + def created_at(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + def __init__( + self, + *, + name: builtins.str = ..., + created_at: google.protobuf.timestamp_pb2.Timestamp | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["created_at", b"created_at"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["created_at", b"created_at", "name", b"name"]) -> None: ... + +global___IPCBucketCreateResponse = IPCBucketCreateResponse + +@typing.final +class IPCBucketViewRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + ADDRESS_FIELD_NUMBER: builtins.int + name: builtins.str + address: builtins.str + def __init__( + self, + *, + name: builtins.str = ..., + address: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["address", b"address", "name", b"name"]) -> None: ... + +global___IPCBucketViewRequest = IPCBucketViewRequest + +@typing.final +class IPCBucketViewResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ID_FIELD_NUMBER: builtins.int + NAME_FIELD_NUMBER: builtins.int + CREATED_AT_FIELD_NUMBER: builtins.int + id: builtins.str + """todo: remove this when contract removes id from deleteBucket endpoint""" + name: builtins.str + @property + def created_at(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + def __init__( + self, + *, + id: builtins.str = ..., + name: builtins.str = ..., + created_at: google.protobuf.timestamp_pb2.Timestamp | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["created_at", b"created_at"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["created_at", b"created_at", "id", b"id", "name", b"name"]) -> None: ... + +global___IPCBucketViewResponse = IPCBucketViewResponse + +@typing.final +class IPCBucketListRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ADDRESS_FIELD_NUMBER: builtins.int + address: builtins.str + def __init__( + self, + *, + address: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["address", b"address"]) -> None: ... + +global___IPCBucketListRequest = IPCBucketListRequest + +@typing.final +class IPCBucketListResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class IPCBucket(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + CREATED_AT_FIELD_NUMBER: builtins.int + name: builtins.str + @property + def created_at(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + def __init__( + self, + *, + name: builtins.str = ..., + created_at: google.protobuf.timestamp_pb2.Timestamp | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["created_at", b"created_at"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["created_at", b"created_at", "name", b"name"]) -> None: ... + + BUCKETS_FIELD_NUMBER: builtins.int + @property + def buckets(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___IPCBucketListResponse.IPCBucket]: ... + def __init__( + self, + *, + buckets: collections.abc.Iterable[global___IPCBucketListResponse.IPCBucket] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["buckets", b"buckets"]) -> None: ... + +global___IPCBucketListResponse = IPCBucketListResponse + +@typing.final +class IPCBucketDeleteRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +global___IPCBucketDeleteRequest = IPCBucketDeleteRequest + +@typing.final +class IPCBucketDeleteResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +global___IPCBucketDeleteResponse = IPCBucketDeleteResponse + +@typing.final +class IPCChunk(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class Block(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + CID_FIELD_NUMBER: builtins.int + SIZE_FIELD_NUMBER: builtins.int + cid: builtins.str + size: builtins.int + def __init__( + self, + *, + cid: builtins.str = ..., + size: builtins.int = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["cid", b"cid", "size", b"size"]) -> None: ... + + CID_FIELD_NUMBER: builtins.int + INDEX_FIELD_NUMBER: builtins.int + SIZE_FIELD_NUMBER: builtins.int + BLOCKS_FIELD_NUMBER: builtins.int + cid: builtins.str + index: builtins.int + size: builtins.int + @property + def blocks(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___IPCChunk.Block]: ... + def __init__( + self, + *, + cid: builtins.str = ..., + index: builtins.int = ..., + size: builtins.int = ..., + blocks: collections.abc.Iterable[global___IPCChunk.Block] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["blocks", b"blocks", "cid", b"cid", "index", b"index", "size", b"size"]) -> None: ... + +global___IPCChunk = IPCChunk + +@typing.final +class IPCFileUploadChunkCreateRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + CHUNK_FIELD_NUMBER: builtins.int + BUCKET_ID_FIELD_NUMBER: builtins.int + FILE_NAME_FIELD_NUMBER: builtins.int + bucket_id: builtins.bytes + file_name: builtins.str + @property + def chunk(self) -> global___IPCChunk: ... + def __init__( + self, + *, + chunk: global___IPCChunk | None = ..., + bucket_id: builtins.bytes = ..., + file_name: builtins.str = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["chunk", b"chunk"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["bucket_id", b"bucket_id", "chunk", b"chunk", "file_name", b"file_name"]) -> None: ... + +global___IPCFileUploadChunkCreateRequest = IPCFileUploadChunkCreateRequest + +@typing.final +class IPCFileUploadChunkCreateResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class BlockUpload(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + CID_FIELD_NUMBER: builtins.int + NODE_ADDRESS_FIELD_NUMBER: builtins.int + NODE_ID_FIELD_NUMBER: builtins.int + PERMIT_FIELD_NUMBER: builtins.int + cid: builtins.str + node_address: builtins.str + node_id: builtins.str + permit: builtins.str + def __init__( + self, + *, + cid: builtins.str = ..., + node_address: builtins.str = ..., + node_id: builtins.str = ..., + permit: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["cid", b"cid", "node_address", b"node_address", "node_id", b"node_id", "permit", b"permit"]) -> None: ... + + BLOCKS_FIELD_NUMBER: builtins.int + @property + def blocks(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___IPCFileUploadChunkCreateResponse.BlockUpload]: ... + def __init__( + self, + *, + blocks: collections.abc.Iterable[global___IPCFileUploadChunkCreateResponse.BlockUpload] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["blocks", b"blocks"]) -> None: ... + +global___IPCFileUploadChunkCreateResponse = IPCFileUploadChunkCreateResponse + +@typing.final +class IPCFileBlockDataRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + TRANSACTION_FIELD_NUMBER: builtins.int + @property + def transaction(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bytes]: ... + def __init__( + self, + *, + transaction: collections.abc.Iterable[builtins.bytes] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["transaction", b"transaction"]) -> None: ... + +global___IPCFileBlockDataRequest = IPCFileBlockDataRequest + +@typing.final +class IPCFileBlockData(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + DATA_FIELD_NUMBER: builtins.int + CID_FIELD_NUMBER: builtins.int + INDEX_FIELD_NUMBER: builtins.int + CHUNK_FIELD_NUMBER: builtins.int + BUCKET_ID_FIELD_NUMBER: builtins.int + FILE_NAME_FIELD_NUMBER: builtins.int + data: builtins.bytes + cid: builtins.str + """only present in first msg of request stream""" + index: builtins.int + """only present in first msg of request stream""" + bucket_id: builtins.bytes + file_name: builtins.str + @property + def chunk(self) -> global___IPCChunk: + """only present in first msg of request stream""" + + def __init__( + self, + *, + data: builtins.bytes = ..., + cid: builtins.str = ..., + index: builtins.int = ..., + chunk: global___IPCChunk | None = ..., + bucket_id: builtins.bytes = ..., + file_name: builtins.str = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["chunk", b"chunk"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["bucket_id", b"bucket_id", "chunk", b"chunk", "cid", b"cid", "data", b"data", "file_name", b"file_name", "index", b"index"]) -> None: ... + +global___IPCFileBlockData = IPCFileBlockData + +@typing.final +class IPCFileUploadBlockResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +global___IPCFileUploadBlockResponse = IPCFileUploadBlockResponse + +@typing.final +class IPCFileViewRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + BUCKET_NAME_FIELD_NUMBER: builtins.int + FILE_NAME_FIELD_NUMBER: builtins.int + ADDRESS_FIELD_NUMBER: builtins.int + bucket_name: builtins.str + file_name: builtins.str + address: builtins.str + def __init__( + self, + *, + bucket_name: builtins.str = ..., + file_name: builtins.str = ..., + address: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["address", b"address", "bucket_name", b"bucket_name", "file_name", b"file_name"]) -> None: ... + +global___IPCFileViewRequest = IPCFileViewRequest + +@typing.final +class IPCFileViewResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + BUCKET_NAME_FIELD_NUMBER: builtins.int + FILE_NAME_FIELD_NUMBER: builtins.int + ROOT_CID_FIELD_NUMBER: builtins.int + ENCODED_SIZE_FIELD_NUMBER: builtins.int + CREATED_AT_FIELD_NUMBER: builtins.int + bucket_name: builtins.str + file_name: builtins.str + root_cid: builtins.str + encoded_size: builtins.int + @property + def created_at(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + def __init__( + self, + *, + bucket_name: builtins.str = ..., + file_name: builtins.str = ..., + root_cid: builtins.str = ..., + encoded_size: builtins.int = ..., + created_at: google.protobuf.timestamp_pb2.Timestamp | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["created_at", b"created_at"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["bucket_name", b"bucket_name", "created_at", b"created_at", "encoded_size", b"encoded_size", "file_name", b"file_name", "root_cid", b"root_cid"]) -> None: ... + +global___IPCFileViewResponse = IPCFileViewResponse + +@typing.final +class IPCFileDownloadCreateRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + BUCKET_NAME_FIELD_NUMBER: builtins.int + FILE_NAME_FIELD_NUMBER: builtins.int + ADDRESS_FIELD_NUMBER: builtins.int + bucket_name: builtins.str + file_name: builtins.str + address: builtins.str + def __init__( + self, + *, + bucket_name: builtins.str = ..., + file_name: builtins.str = ..., + address: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["address", b"address", "bucket_name", b"bucket_name", "file_name", b"file_name"]) -> None: ... + +global___IPCFileDownloadCreateRequest = IPCFileDownloadCreateRequest + +@typing.final +class IPCFileDownloadCreateResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class Chunk(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + CID_FIELD_NUMBER: builtins.int + ENCODED_SIZE_FIELD_NUMBER: builtins.int + SIZE_FIELD_NUMBER: builtins.int + cid: builtins.str + encoded_size: builtins.int + size: builtins.int + def __init__( + self, + *, + cid: builtins.str = ..., + encoded_size: builtins.int = ..., + size: builtins.int = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["cid", b"cid", "encoded_size", b"encoded_size", "size", b"size"]) -> None: ... + + BUCKET_NAME_FIELD_NUMBER: builtins.int + CHUNKS_FIELD_NUMBER: builtins.int + bucket_name: builtins.str + @property + def chunks(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___IPCFileDownloadCreateResponse.Chunk]: ... + def __init__( + self, + *, + bucket_name: builtins.str = ..., + chunks: collections.abc.Iterable[global___IPCFileDownloadCreateResponse.Chunk] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["bucket_name", b"bucket_name", "chunks", b"chunks"]) -> None: ... + +global___IPCFileDownloadCreateResponse = IPCFileDownloadCreateResponse + +@typing.final +class IPCFileDownloadChunkCreateRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + BUCKET_NAME_FIELD_NUMBER: builtins.int + FILE_NAME_FIELD_NUMBER: builtins.int + CHUNK_CID_FIELD_NUMBER: builtins.int + ADDRESS_FIELD_NUMBER: builtins.int + bucket_name: builtins.str + file_name: builtins.str + chunk_cid: builtins.str + address: builtins.str + def __init__( + self, + *, + bucket_name: builtins.str = ..., + file_name: builtins.str = ..., + chunk_cid: builtins.str = ..., + address: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["address", b"address", "bucket_name", b"bucket_name", "chunk_cid", b"chunk_cid", "file_name", b"file_name"]) -> None: ... + +global___IPCFileDownloadChunkCreateRequest = IPCFileDownloadChunkCreateRequest + +@typing.final +class IPCFileDownloadChunkCreateResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class BlockDownload(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + CID_FIELD_NUMBER: builtins.int + SIZE_FIELD_NUMBER: builtins.int + NODE_ADDRESS_FIELD_NUMBER: builtins.int + NODE_ID_FIELD_NUMBER: builtins.int + PERMIT_FIELD_NUMBER: builtins.int + cid: builtins.str + size: builtins.int + node_address: builtins.str + node_id: builtins.str + permit: builtins.str + def __init__( + self, + *, + cid: builtins.str = ..., + size: builtins.int = ..., + node_address: builtins.str = ..., + node_id: builtins.str = ..., + permit: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["cid", b"cid", "node_address", b"node_address", "node_id", b"node_id", "permit", b"permit", "size", b"size"]) -> None: ... + + BLOCKS_FIELD_NUMBER: builtins.int + @property + def blocks(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___IPCFileDownloadChunkCreateResponse.BlockDownload]: ... + def __init__( + self, + *, + blocks: collections.abc.Iterable[global___IPCFileDownloadChunkCreateResponse.BlockDownload] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["blocks", b"blocks"]) -> None: ... + +global___IPCFileDownloadChunkCreateResponse = IPCFileDownloadChunkCreateResponse + +@typing.final +class IPCFileDownloadBlockRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + CHUNK_CID_FIELD_NUMBER: builtins.int + CHUNK_INDEX_FIELD_NUMBER: builtins.int + BLOCK_CID_FIELD_NUMBER: builtins.int + BLOCK_INDEX_FIELD_NUMBER: builtins.int + BUCKET_NAME_FIELD_NUMBER: builtins.int + FILE_NAME_FIELD_NUMBER: builtins.int + ADDRESS_FIELD_NUMBER: builtins.int + chunk_cid: builtins.str + chunk_index: builtins.int + block_cid: builtins.str + block_index: builtins.int + bucket_name: builtins.str + file_name: builtins.str + address: builtins.str + def __init__( + self, + *, + chunk_cid: builtins.str = ..., + chunk_index: builtins.int = ..., + block_cid: builtins.str = ..., + block_index: builtins.int = ..., + bucket_name: builtins.str = ..., + file_name: builtins.str = ..., + address: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["address", b"address", "block_cid", b"block_cid", "block_index", b"block_index", "bucket_name", b"bucket_name", "chunk_cid", b"chunk_cid", "chunk_index", b"chunk_index", "file_name", b"file_name"]) -> None: ... + +global___IPCFileDownloadBlockRequest = IPCFileDownloadBlockRequest + +@typing.final +class IPCFileListRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + BUCKET_NAME_FIELD_NUMBER: builtins.int + ADDRESS_FIELD_NUMBER: builtins.int + bucket_name: builtins.str + address: builtins.str + def __init__( + self, + *, + bucket_name: builtins.str = ..., + address: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["address", b"address", "bucket_name", b"bucket_name"]) -> None: ... + +global___IPCFileListRequest = IPCFileListRequest + +@typing.final +class IPCFileListResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class IPCFile(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ROOT_CID_FIELD_NUMBER: builtins.int + NAME_FIELD_NUMBER: builtins.int + ENCODED_SIZE_FIELD_NUMBER: builtins.int + CREATED_AT_FIELD_NUMBER: builtins.int + root_cid: builtins.str + name: builtins.str + encoded_size: builtins.int + @property + def created_at(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + def __init__( + self, + *, + root_cid: builtins.str = ..., + name: builtins.str = ..., + encoded_size: builtins.int = ..., + created_at: google.protobuf.timestamp_pb2.Timestamp | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["created_at", b"created_at"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["created_at", b"created_at", "encoded_size", b"encoded_size", "name", b"name", "root_cid", b"root_cid"]) -> None: ... + + LIST_FIELD_NUMBER: builtins.int + @property + def list(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___IPCFileListResponse.IPCFile]: ... + def __init__( + self, + *, + list: collections.abc.Iterable[global___IPCFileListResponse.IPCFile] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["list", b"list"]) -> None: ... + +global___IPCFileListResponse = IPCFileListResponse + +@typing.final +class IPCFileDeleteRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + TRANSACTION_FIELD_NUMBER: builtins.int + BUCKET_NAME_FIELD_NUMBER: builtins.int + NAME_FIELD_NUMBER: builtins.int + transaction: builtins.bytes + bucket_name: builtins.bytes + name: builtins.str + def __init__( + self, + *, + transaction: builtins.bytes = ..., + bucket_name: builtins.bytes = ..., + name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["bucket_name", b"bucket_name", "name", b"name", "transaction", b"transaction"]) -> None: ... + +global___IPCFileDeleteRequest = IPCFileDeleteRequest + +@typing.final +class IPCFileDeleteResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +global___IPCFileDeleteResponse = IPCFileDeleteResponse diff --git a/private/pb/ipcnodeapi_pb2_grpc.py b/private/pb/ipcnodeapi_pb2_grpc.py index 9136d02..a99d193 100644 --- a/private/pb/ipcnodeapi_pb2_grpc.py +++ b/private/pb/ipcnodeapi_pb2_grpc.py @@ -3,7 +3,7 @@ import grpc import warnings -from private.pb import ipcnodeapi_pb2 as ipcnodeapi__pb2 +import ipcnodeapi_pb2 as ipcnodeapi__pb2 GRPC_GENERATED_VERSION = '1.71.0' GRPC_VERSION = grpc.__version__ diff --git a/private/pb/nodeapi_pb2.py b/private/pb/nodeapi_pb2.py index c78626e..95e7bb1 100644 --- a/private/pb/nodeapi_pb2.py +++ b/private/pb/nodeapi_pb2.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE -# source: nodeapi.proto +# source: private/pb/nodeapi.proto # Protobuf Python Version: 5.29.0 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor @@ -15,7 +15,7 @@ 29, 0, '', - 'nodeapi.proto' + 'private/pb/nodeapi.proto' ) # @@protoc_insertion_point(imports) @@ -25,100 +25,100 @@ from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\rnodeapi.proto\x12\x07nodeapi\x1a\x1fgoogle/protobuf/timestamp.proto\"#\n\x13\x42ucketCreateRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"T\n\x14\x42ucketCreateResponse\x12\x0c\n\x04name\x18\x01 \x01(\t\x12.\n\ncreated_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"(\n\x11\x42ucketViewRequest\x12\x13\n\x0b\x62ucket_name\x18\x01 \x01(\t\"R\n\x12\x42ucketViewResponse\x12\x0c\n\x04name\x18\x01 \x01(\t\x12.\n\ncreated_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\x13\n\x11\x42ucketListRequest\"\x91\x01\n\x12\x42ucketListResponse\x12\x33\n\x07\x62uckets\x18\x01 \x03(\x0b\x32\".nodeapi.BucketListResponse.Bucket\x1a\x46\n\x06\x42ucket\x12\x0c\n\x04name\x18\x01 \x01(\t\x12.\n\ncreated_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"#\n\x13\x42ucketDeleteRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\x16\n\x14\x42ucketDeleteResponse\"*\n\rFileBlockData\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x0b\n\x03\x63id\x18\x02 \x01(\t\"G\n\x1dStreamFileUploadCreateRequest\x12\x13\n\x0b\x62ucket_name\x18\x01 \x01(\t\x12\x11\n\tfile_name\x18\x02 \x01(\t\"\x8b\x01\n\x1eStreamFileUploadCreateResponse\x12\x13\n\x0b\x62ucket_name\x18\x01 \x01(\t\x12\x11\n\tfile_name\x18\x02 \x01(\t\x12\x11\n\tstream_id\x18\x03 \x01(\t\x12.\n\ncreated_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\x8e\x01\n\x05\x43hunk\x12\x11\n\tstream_id\x18\x01 \x01(\t\x12\x0b\n\x03\x63id\x18\x02 \x01(\t\x12\r\n\x05index\x18\x03 \x01(\x03\x12\x0c\n\x04size\x18\x04 \x01(\x03\x12$\n\x06\x62locks\x18\x05 \x03(\x0b\x32\x14.nodeapi.Chunk.Block\x1a\"\n\x05\x42lock\x12\x0b\n\x03\x63id\x18\x01 \x01(\t\x12\x0c\n\x04size\x18\x02 \x01(\x03\"C\n\"StreamFileUploadChunkCreateRequest\x12\x1d\n\x05\x63hunk\x18\x01 \x01(\x0b\x32\x0e.nodeapi.Chunk\"\xc2\x01\n#StreamFileUploadChunkCreateResponse\x12H\n\x06\x62locks\x18\x01 \x03(\x0b\x32\x38.nodeapi.StreamFileUploadChunkCreateResponse.BlockUpload\x1aQ\n\x0b\x42lockUpload\x12\x0b\n\x03\x63id\x18\x01 \x01(\t\x12\x14\n\x0cnode_address\x18\x02 \x01(\t\x12\x0f\n\x07node_id\x18\x03 \x01(\t\x12\x0e\n\x06permit\x18\x04 \x01(\t\"^\n\x13StreamFileBlockData\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x0b\n\x03\x63id\x18\x02 \x01(\t\x12\r\n\x05index\x18\x03 \x01(\x03\x12\x1d\n\x05\x63hunk\x18\x04 \x01(\x0b\x32\x0e.nodeapi.Chunk\"\x1f\n\x1dStreamFileUploadBlockResponse\"Y\n\x1dStreamFileUploadCommitRequest\x12\x11\n\tstream_id\x18\x01 \x01(\t\x12\x10\n\x08root_cid\x18\x02 \x01(\t\x12\x13\n\x0b\x63hunk_count\x18\x03 \x01(\x03\"\xb1\x01\n\x1eStreamFileUploadCommitResponse\x12\x11\n\tstream_id\x18\x01 \x01(\t\x12\x11\n\tfile_name\x18\x02 \x01(\t\x12\x13\n\x0b\x62ucket_name\x18\x03 \x01(\t\x12\x14\n\x0c\x65ncoded_size\x18\x04 \x01(\x03\x12\x0c\n\x04size\x18\x05 \x01(\x03\x12\x30\n\x0c\x63ommitted_at\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"[\n\x1fStreamFileDownloadCreateRequest\x12\x13\n\x0b\x62ucket_name\x18\x01 \x01(\t\x12\x11\n\tfile_name\x18\x02 \x01(\t\x12\x10\n\x08root_cid\x18\x03 \x01(\t\"\xc5\x01\n StreamFileDownloadCreateResponse\x12\x13\n\x0b\x62ucket_name\x18\x01 \x01(\t\x12\x11\n\tstream_id\x18\x02 \x01(\t\x12?\n\x06\x63hunks\x18\x03 \x03(\x0b\x32/.nodeapi.StreamFileDownloadCreateResponse.Chunk\x1a\x38\n\x05\x43hunk\x12\x0b\n\x03\x63id\x18\x01 \x01(\t\x12\x14\n\x0c\x65ncoded_size\x18\x02 \x01(\x03\x12\x0c\n\x04size\x18\x03 \x01(\x03\"v\n$StreamFileDownloadRangeCreateRequest\x12\x13\n\x0b\x62ucket_name\x18\x01 \x01(\t\x12\x11\n\tfile_name\x18\x02 \x01(\t\x12\x13\n\x0bstart_index\x18\x03 \x01(\x03\x12\x11\n\tend_index\x18\x04 \x01(\x03\"L\n$StreamFileDownloadChunkCreateRequest\x12\x11\n\tstream_id\x18\x01 \x01(\t\x12\x11\n\tchunk_cid\x18\x02 \x01(\t\"\xd8\x01\n%StreamFileDownloadChunkCreateResponse\x12L\n\x06\x62locks\x18\x01 \x03(\x0b\x32<.nodeapi.StreamFileDownloadChunkCreateResponse.BlockDownload\x1a\x61\n\rBlockDownload\x12\x0b\n\x03\x63id\x18\x01 \x01(\t\x12\x0c\n\x04size\x18\x02 \x01(\x03\x12\x14\n\x0cnode_address\x18\x03 \x01(\t\x12\x0f\n\x07node_id\x18\x04 \x01(\t\x12\x0e\n\x06permit\x18\x05 \x01(\t\"\xa6\x03\n\'StreamFileDownloadChunkCreateResponseV2\x12N\n\x06\x62locks\x18\x01 \x03(\x0b\x32>.nodeapi.StreamFileDownloadChunkCreateResponseV2.BlockDownload\x1a\xaa\x02\n\rBlockDownload\x12\x0b\n\x03\x63id\x18\x01 \x01(\t\x12\x0c\n\x04size\x18\x02 \x01(\x03\x12S\n\x05\x61kave\x18\x03 \x01(\x0b\x32\x44.nodeapi.StreamFileDownloadChunkCreateResponseV2.BlockDownload.Akave\x12Y\n\x08\x66ilecoin\x18\x04 \x01(\x0b\x32G.nodeapi.StreamFileDownloadChunkCreateResponseV2.BlockDownload.Filecoin\x1a.\n\x05\x41kave\x12\x0f\n\x07node_id\x18\x01 \x01(\t\x12\x14\n\x0cnode_address\x18\x02 \x01(\t\x1a\x1e\n\x08\x46ilecoin\x12\x12\n\nsp_address\x18\x01 \x01(\t\"\x83\x01\n\x1eStreamFileDownloadBlockRequest\x12\x11\n\tstream_id\x18\x01 \x01(\t\x12\x11\n\tchunk_cid\x18\x02 \x01(\t\x12\x13\n\x0b\x63hunk_index\x18\x03 \x01(\x03\x12\x11\n\tblock_cid\x18\x04 \x01(\t\x12\x13\n\x0b\x62lock_index\x18\x05 \x01(\x03\",\n\x15StreamFileListRequest\x12\x13\n\x0b\x62ucket_name\x18\x01 \x01(\t\"\xbe\x01\n\x04\x46ile\x12\x11\n\tstream_id\x18\x01 \x01(\t\x12\x10\n\x08root_cid\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x14\n\x0c\x65ncoded_size\x18\x04 \x01(\x03\x12\x0c\n\x04size\x18\x05 \x01(\x03\x12.\n\ncreated_at\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0b\x63ommited_at\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"6\n\x16StreamFileListResponse\x12\x1c\n\x05\x66iles\x18\x01 \x03(\x0b\x32\r.nodeapi.File\"?\n\x15StreamFileViewRequest\x12\x13\n\x0b\x62ucket_name\x18\x01 \x01(\t\x12\x11\n\tfile_name\x18\x02 \x01(\t\"\xeb\x01\n\x16StreamFileViewResponse\x12\x13\n\x0b\x62ucket_name\x18\x01 \x01(\t\x12\x11\n\tfile_name\x18\x02 \x01(\t\x12\x11\n\tstream_id\x18\x03 \x01(\t\x12\x10\n\x08root_cid\x18\x04 \x01(\t\x12\x14\n\x0c\x65ncoded_size\x18\x05 \x01(\x03\x12\x0c\n\x04size\x18\x06 \x01(\x03\x12.\n\ncreated_at\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x30\n\x0c\x63ommitted_at\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"A\n\x17StreamFileDeleteRequest\x12\x13\n\x0b\x62ucket_name\x18\x01 \x01(\t\x12\x11\n\tfile_name\x18\x02 \x01(\t\"\x1a\n\x18StreamFileDeleteResponse\"G\n\x1dStreamFileListVersionsRequest\x12\x13\n\x0b\x62ucket_name\x18\x01 \x01(\t\x12\x11\n\tfile_name\x18\x02 \x01(\t\"A\n\x1eStreamFileListVersionsResponse\x12\x1f\n\x08versions\x18\x01 \x03(\x0b\x32\r.nodeapi.File2\xb9\x02\n\x07NodeAPI\x12M\n\x0c\x42ucketCreate\x12\x1c.nodeapi.BucketCreateRequest\x1a\x1d.nodeapi.BucketCreateResponse\"\x00\x12G\n\nBucketView\x12\x1a.nodeapi.BucketViewRequest\x1a\x1b.nodeapi.BucketViewResponse\"\x00\x12G\n\nBucketList\x12\x1a.nodeapi.BucketListRequest\x1a\x1b.nodeapi.BucketListResponse\"\x00\x12M\n\x0c\x42ucketDelete\x12\x1c.nodeapi.BucketDeleteRequest\x1a\x1d.nodeapi.BucketDeleteResponse\"\x00\x32\xc2\n\n\tStreamAPI\x12\x65\n\x10\x46ileUploadCreate\x12&.nodeapi.StreamFileUploadCreateRequest\x1a\'.nodeapi.StreamFileUploadCreateResponse\"\x00\x12t\n\x15\x46ileUploadChunkCreate\x12+.nodeapi.StreamFileUploadChunkCreateRequest\x1a,.nodeapi.StreamFileUploadChunkCreateResponse\"\x00\x12[\n\x0f\x46ileUploadBlock\x12\x1c.nodeapi.StreamFileBlockData\x1a&.nodeapi.StreamFileUploadBlockResponse\"\x00(\x01\x12\x65\n\x10\x46ileUploadCommit\x12&.nodeapi.StreamFileUploadCommitRequest\x1a\'.nodeapi.StreamFileUploadCommitResponse\"\x00\x12k\n\x12\x46ileDownloadCreate\x12(.nodeapi.StreamFileDownloadCreateRequest\x1a).nodeapi.StreamFileDownloadCreateResponse\"\x00\x12u\n\x17\x46ileDownloadRangeCreate\x12-.nodeapi.StreamFileDownloadRangeCreateRequest\x1a).nodeapi.StreamFileDownloadCreateResponse\"\x00\x12z\n\x17\x46ileDownloadChunkCreate\x12-.nodeapi.StreamFileDownloadChunkCreateRequest\x1a..nodeapi.StreamFileDownloadChunkCreateResponse\"\x00\x12~\n\x19\x46ileDownloadChunkCreateV2\x12-.nodeapi.StreamFileDownloadChunkCreateRequest\x1a\x30.nodeapi.StreamFileDownloadChunkCreateResponseV2\"\x00\x12^\n\x11\x46ileDownloadBlock\x12\'.nodeapi.StreamFileDownloadBlockRequest\x1a\x1c.nodeapi.StreamFileBlockData\"\x00\x30\x01\x12M\n\x08\x46ileList\x12\x1e.nodeapi.StreamFileListRequest\x1a\x1f.nodeapi.StreamFileListResponse\"\x00\x12M\n\x08\x46ileView\x12\x1e.nodeapi.StreamFileViewRequest\x1a\x1f.nodeapi.StreamFileViewResponse\"\x00\x12\x61\n\x0c\x46ileVersions\x12&.nodeapi.StreamFileListVersionsRequest\x1a\'.nodeapi.StreamFileListVersionsResponse\"\x00\x12S\n\nFileDelete\x12 .nodeapi.StreamFileDeleteRequest\x1a!.nodeapi.StreamFileDeleteResponse\"\x00\x42\x1bZ\x19\x61kave.ai/akave/private/pbb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x18private/pb/nodeapi.proto\x12\x07nodeapi\x1a\x1fgoogle/protobuf/timestamp.proto\"#\n\x13\x42ucketCreateRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"T\n\x14\x42ucketCreateResponse\x12\x0c\n\x04name\x18\x01 \x01(\t\x12.\n\ncreated_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"(\n\x11\x42ucketViewRequest\x12\x13\n\x0b\x62ucket_name\x18\x01 \x01(\t\"R\n\x12\x42ucketViewResponse\x12\x0c\n\x04name\x18\x01 \x01(\t\x12.\n\ncreated_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\x13\n\x11\x42ucketListRequest\"\x91\x01\n\x12\x42ucketListResponse\x12\x33\n\x07\x62uckets\x18\x01 \x03(\x0b\x32\".nodeapi.BucketListResponse.Bucket\x1a\x46\n\x06\x42ucket\x12\x0c\n\x04name\x18\x01 \x01(\t\x12.\n\ncreated_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"#\n\x13\x42ucketDeleteRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\x16\n\x14\x42ucketDeleteResponse\"*\n\rFileBlockData\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x0b\n\x03\x63id\x18\x02 \x01(\t\"G\n\x1dStreamFileUploadCreateRequest\x12\x13\n\x0b\x62ucket_name\x18\x01 \x01(\t\x12\x11\n\tfile_name\x18\x02 \x01(\t\"\x8b\x01\n\x1eStreamFileUploadCreateResponse\x12\x13\n\x0b\x62ucket_name\x18\x01 \x01(\t\x12\x11\n\tfile_name\x18\x02 \x01(\t\x12\x11\n\tstream_id\x18\x03 \x01(\t\x12.\n\ncreated_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\x8e\x01\n\x05\x43hunk\x12\x11\n\tstream_id\x18\x01 \x01(\t\x12\x0b\n\x03\x63id\x18\x02 \x01(\t\x12\r\n\x05index\x18\x03 \x01(\x03\x12\x0c\n\x04size\x18\x04 \x01(\x03\x12$\n\x06\x62locks\x18\x05 \x03(\x0b\x32\x14.nodeapi.Chunk.Block\x1a\"\n\x05\x42lock\x12\x0b\n\x03\x63id\x18\x01 \x01(\t\x12\x0c\n\x04size\x18\x02 \x01(\x03\"C\n\"StreamFileUploadChunkCreateRequest\x12\x1d\n\x05\x63hunk\x18\x01 \x01(\x0b\x32\x0e.nodeapi.Chunk\"\xc2\x01\n#StreamFileUploadChunkCreateResponse\x12H\n\x06\x62locks\x18\x01 \x03(\x0b\x32\x38.nodeapi.StreamFileUploadChunkCreateResponse.BlockUpload\x1aQ\n\x0b\x42lockUpload\x12\x0b\n\x03\x63id\x18\x01 \x01(\t\x12\x14\n\x0cnode_address\x18\x02 \x01(\t\x12\x0f\n\x07node_id\x18\x03 \x01(\t\x12\x0e\n\x06permit\x18\x04 \x01(\t\"^\n\x13StreamFileBlockData\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x0b\n\x03\x63id\x18\x02 \x01(\t\x12\r\n\x05index\x18\x03 \x01(\x03\x12\x1d\n\x05\x63hunk\x18\x04 \x01(\x0b\x32\x0e.nodeapi.Chunk\"\x1f\n\x1dStreamFileUploadBlockResponse\"Y\n\x1dStreamFileUploadCommitRequest\x12\x11\n\tstream_id\x18\x01 \x01(\t\x12\x10\n\x08root_cid\x18\x02 \x01(\t\x12\x13\n\x0b\x63hunk_count\x18\x03 \x01(\x03\"\xb1\x01\n\x1eStreamFileUploadCommitResponse\x12\x11\n\tstream_id\x18\x01 \x01(\t\x12\x11\n\tfile_name\x18\x02 \x01(\t\x12\x13\n\x0b\x62ucket_name\x18\x03 \x01(\t\x12\x14\n\x0c\x65ncoded_size\x18\x04 \x01(\x03\x12\x0c\n\x04size\x18\x05 \x01(\x03\x12\x30\n\x0c\x63ommitted_at\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"[\n\x1fStreamFileDownloadCreateRequest\x12\x13\n\x0b\x62ucket_name\x18\x01 \x01(\t\x12\x11\n\tfile_name\x18\x02 \x01(\t\x12\x10\n\x08root_cid\x18\x03 \x01(\t\"\xc5\x01\n StreamFileDownloadCreateResponse\x12\x13\n\x0b\x62ucket_name\x18\x01 \x01(\t\x12\x11\n\tstream_id\x18\x02 \x01(\t\x12?\n\x06\x63hunks\x18\x03 \x03(\x0b\x32/.nodeapi.StreamFileDownloadCreateResponse.Chunk\x1a\x38\n\x05\x43hunk\x12\x0b\n\x03\x63id\x18\x01 \x01(\t\x12\x14\n\x0c\x65ncoded_size\x18\x02 \x01(\x03\x12\x0c\n\x04size\x18\x03 \x01(\x03\"v\n$StreamFileDownloadRangeCreateRequest\x12\x13\n\x0b\x62ucket_name\x18\x01 \x01(\t\x12\x11\n\tfile_name\x18\x02 \x01(\t\x12\x13\n\x0bstart_index\x18\x03 \x01(\x03\x12\x11\n\tend_index\x18\x04 \x01(\x03\"L\n$StreamFileDownloadChunkCreateRequest\x12\x11\n\tstream_id\x18\x01 \x01(\t\x12\x11\n\tchunk_cid\x18\x02 \x01(\t\"\xd8\x01\n%StreamFileDownloadChunkCreateResponse\x12L\n\x06\x62locks\x18\x01 \x03(\x0b\x32<.nodeapi.StreamFileDownloadChunkCreateResponse.BlockDownload\x1a\x61\n\rBlockDownload\x12\x0b\n\x03\x63id\x18\x01 \x01(\t\x12\x0c\n\x04size\x18\x02 \x01(\x03\x12\x14\n\x0cnode_address\x18\x03 \x01(\t\x12\x0f\n\x07node_id\x18\x04 \x01(\t\x12\x0e\n\x06permit\x18\x05 \x01(\t\"\xa6\x03\n\'StreamFileDownloadChunkCreateResponseV2\x12N\n\x06\x62locks\x18\x01 \x03(\x0b\x32>.nodeapi.StreamFileDownloadChunkCreateResponseV2.BlockDownload\x1a\xaa\x02\n\rBlockDownload\x12\x0b\n\x03\x63id\x18\x01 \x01(\t\x12\x0c\n\x04size\x18\x02 \x01(\x03\x12S\n\x05\x61kave\x18\x03 \x01(\x0b\x32\x44.nodeapi.StreamFileDownloadChunkCreateResponseV2.BlockDownload.Akave\x12Y\n\x08\x66ilecoin\x18\x04 \x01(\x0b\x32G.nodeapi.StreamFileDownloadChunkCreateResponseV2.BlockDownload.Filecoin\x1a.\n\x05\x41kave\x12\x0f\n\x07node_id\x18\x01 \x01(\t\x12\x14\n\x0cnode_address\x18\x02 \x01(\t\x1a\x1e\n\x08\x46ilecoin\x12\x12\n\nsp_address\x18\x01 \x01(\t\"\x83\x01\n\x1eStreamFileDownloadBlockRequest\x12\x11\n\tstream_id\x18\x01 \x01(\t\x12\x11\n\tchunk_cid\x18\x02 \x01(\t\x12\x13\n\x0b\x63hunk_index\x18\x03 \x01(\x03\x12\x11\n\tblock_cid\x18\x04 \x01(\t\x12\x13\n\x0b\x62lock_index\x18\x05 \x01(\x03\",\n\x15StreamFileListRequest\x12\x13\n\x0b\x62ucket_name\x18\x01 \x01(\t\"\xbe\x01\n\x04\x46ile\x12\x11\n\tstream_id\x18\x01 \x01(\t\x12\x10\n\x08root_cid\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x14\n\x0c\x65ncoded_size\x18\x04 \x01(\x03\x12\x0c\n\x04size\x18\x05 \x01(\x03\x12.\n\ncreated_at\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0b\x63ommited_at\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"6\n\x16StreamFileListResponse\x12\x1c\n\x05\x66iles\x18\x01 \x03(\x0b\x32\r.nodeapi.File\"?\n\x15StreamFileViewRequest\x12\x13\n\x0b\x62ucket_name\x18\x01 \x01(\t\x12\x11\n\tfile_name\x18\x02 \x01(\t\"\xeb\x01\n\x16StreamFileViewResponse\x12\x13\n\x0b\x62ucket_name\x18\x01 \x01(\t\x12\x11\n\tfile_name\x18\x02 \x01(\t\x12\x11\n\tstream_id\x18\x03 \x01(\t\x12\x10\n\x08root_cid\x18\x04 \x01(\t\x12\x14\n\x0c\x65ncoded_size\x18\x05 \x01(\x03\x12\x0c\n\x04size\x18\x06 \x01(\x03\x12.\n\ncreated_at\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x30\n\x0c\x63ommitted_at\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"A\n\x17StreamFileDeleteRequest\x12\x13\n\x0b\x62ucket_name\x18\x01 \x01(\t\x12\x11\n\tfile_name\x18\x02 \x01(\t\"\x1a\n\x18StreamFileDeleteResponse\"G\n\x1dStreamFileListVersionsRequest\x12\x13\n\x0b\x62ucket_name\x18\x01 \x01(\t\x12\x11\n\tfile_name\x18\x02 \x01(\t\"A\n\x1eStreamFileListVersionsResponse\x12\x1f\n\x08versions\x18\x01 \x03(\x0b\x32\r.nodeapi.File2\xb9\x02\n\x07NodeAPI\x12M\n\x0c\x42ucketCreate\x12\x1c.nodeapi.BucketCreateRequest\x1a\x1d.nodeapi.BucketCreateResponse\"\x00\x12G\n\nBucketView\x12\x1a.nodeapi.BucketViewRequest\x1a\x1b.nodeapi.BucketViewResponse\"\x00\x12G\n\nBucketList\x12\x1a.nodeapi.BucketListRequest\x1a\x1b.nodeapi.BucketListResponse\"\x00\x12M\n\x0c\x42ucketDelete\x12\x1c.nodeapi.BucketDeleteRequest\x1a\x1d.nodeapi.BucketDeleteResponse\"\x00\x32\xc2\n\n\tStreamAPI\x12\x65\n\x10\x46ileUploadCreate\x12&.nodeapi.StreamFileUploadCreateRequest\x1a\'.nodeapi.StreamFileUploadCreateResponse\"\x00\x12t\n\x15\x46ileUploadChunkCreate\x12+.nodeapi.StreamFileUploadChunkCreateRequest\x1a,.nodeapi.StreamFileUploadChunkCreateResponse\"\x00\x12[\n\x0f\x46ileUploadBlock\x12\x1c.nodeapi.StreamFileBlockData\x1a&.nodeapi.StreamFileUploadBlockResponse\"\x00(\x01\x12\x65\n\x10\x46ileUploadCommit\x12&.nodeapi.StreamFileUploadCommitRequest\x1a\'.nodeapi.StreamFileUploadCommitResponse\"\x00\x12k\n\x12\x46ileDownloadCreate\x12(.nodeapi.StreamFileDownloadCreateRequest\x1a).nodeapi.StreamFileDownloadCreateResponse\"\x00\x12u\n\x17\x46ileDownloadRangeCreate\x12-.nodeapi.StreamFileDownloadRangeCreateRequest\x1a).nodeapi.StreamFileDownloadCreateResponse\"\x00\x12z\n\x17\x46ileDownloadChunkCreate\x12-.nodeapi.StreamFileDownloadChunkCreateRequest\x1a..nodeapi.StreamFileDownloadChunkCreateResponse\"\x00\x12~\n\x19\x46ileDownloadChunkCreateV2\x12-.nodeapi.StreamFileDownloadChunkCreateRequest\x1a\x30.nodeapi.StreamFileDownloadChunkCreateResponseV2\"\x00\x12^\n\x11\x46ileDownloadBlock\x12\'.nodeapi.StreamFileDownloadBlockRequest\x1a\x1c.nodeapi.StreamFileBlockData\"\x00\x30\x01\x12M\n\x08\x46ileList\x12\x1e.nodeapi.StreamFileListRequest\x1a\x1f.nodeapi.StreamFileListResponse\"\x00\x12M\n\x08\x46ileView\x12\x1e.nodeapi.StreamFileViewRequest\x1a\x1f.nodeapi.StreamFileViewResponse\"\x00\x12\x61\n\x0c\x46ileVersions\x12&.nodeapi.StreamFileListVersionsRequest\x1a\'.nodeapi.StreamFileListVersionsResponse\"\x00\x12S\n\nFileDelete\x12 .nodeapi.StreamFileDeleteRequest\x1a!.nodeapi.StreamFileDeleteResponse\"\x00\x42\x1bZ\x19\x61kave.ai/akave/private/pbb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'nodeapi_pb2', _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'private.pb.nodeapi_pb2', _globals) if not _descriptor._USE_C_DESCRIPTORS: _globals['DESCRIPTOR']._loaded_options = None _globals['DESCRIPTOR']._serialized_options = b'Z\031akave.ai/akave/private/pb' - _globals['_BUCKETCREATEREQUEST']._serialized_start=59 - _globals['_BUCKETCREATEREQUEST']._serialized_end=94 - _globals['_BUCKETCREATERESPONSE']._serialized_start=96 - _globals['_BUCKETCREATERESPONSE']._serialized_end=180 - _globals['_BUCKETVIEWREQUEST']._serialized_start=182 - _globals['_BUCKETVIEWREQUEST']._serialized_end=222 - _globals['_BUCKETVIEWRESPONSE']._serialized_start=224 - _globals['_BUCKETVIEWRESPONSE']._serialized_end=306 - _globals['_BUCKETLISTREQUEST']._serialized_start=308 - _globals['_BUCKETLISTREQUEST']._serialized_end=327 - _globals['_BUCKETLISTRESPONSE']._serialized_start=330 - _globals['_BUCKETLISTRESPONSE']._serialized_end=475 - _globals['_BUCKETLISTRESPONSE_BUCKET']._serialized_start=405 - _globals['_BUCKETLISTRESPONSE_BUCKET']._serialized_end=475 - _globals['_BUCKETDELETEREQUEST']._serialized_start=477 - _globals['_BUCKETDELETEREQUEST']._serialized_end=512 - _globals['_BUCKETDELETERESPONSE']._serialized_start=514 - _globals['_BUCKETDELETERESPONSE']._serialized_end=536 - _globals['_FILEBLOCKDATA']._serialized_start=538 - _globals['_FILEBLOCKDATA']._serialized_end=580 - _globals['_STREAMFILEUPLOADCREATEREQUEST']._serialized_start=582 - _globals['_STREAMFILEUPLOADCREATEREQUEST']._serialized_end=653 - _globals['_STREAMFILEUPLOADCREATERESPONSE']._serialized_start=656 - _globals['_STREAMFILEUPLOADCREATERESPONSE']._serialized_end=795 - _globals['_CHUNK']._serialized_start=798 - _globals['_CHUNK']._serialized_end=940 - _globals['_CHUNK_BLOCK']._serialized_start=906 - _globals['_CHUNK_BLOCK']._serialized_end=940 - _globals['_STREAMFILEUPLOADCHUNKCREATEREQUEST']._serialized_start=942 - _globals['_STREAMFILEUPLOADCHUNKCREATEREQUEST']._serialized_end=1009 - _globals['_STREAMFILEUPLOADCHUNKCREATERESPONSE']._serialized_start=1012 - _globals['_STREAMFILEUPLOADCHUNKCREATERESPONSE']._serialized_end=1206 - _globals['_STREAMFILEUPLOADCHUNKCREATERESPONSE_BLOCKUPLOAD']._serialized_start=1125 - _globals['_STREAMFILEUPLOADCHUNKCREATERESPONSE_BLOCKUPLOAD']._serialized_end=1206 - _globals['_STREAMFILEBLOCKDATA']._serialized_start=1208 - _globals['_STREAMFILEBLOCKDATA']._serialized_end=1302 - _globals['_STREAMFILEUPLOADBLOCKRESPONSE']._serialized_start=1304 - _globals['_STREAMFILEUPLOADBLOCKRESPONSE']._serialized_end=1335 - _globals['_STREAMFILEUPLOADCOMMITREQUEST']._serialized_start=1337 - _globals['_STREAMFILEUPLOADCOMMITREQUEST']._serialized_end=1426 - _globals['_STREAMFILEUPLOADCOMMITRESPONSE']._serialized_start=1429 - _globals['_STREAMFILEUPLOADCOMMITRESPONSE']._serialized_end=1606 - _globals['_STREAMFILEDOWNLOADCREATEREQUEST']._serialized_start=1608 - _globals['_STREAMFILEDOWNLOADCREATEREQUEST']._serialized_end=1699 - _globals['_STREAMFILEDOWNLOADCREATERESPONSE']._serialized_start=1702 - _globals['_STREAMFILEDOWNLOADCREATERESPONSE']._serialized_end=1899 - _globals['_STREAMFILEDOWNLOADCREATERESPONSE_CHUNK']._serialized_start=1843 - _globals['_STREAMFILEDOWNLOADCREATERESPONSE_CHUNK']._serialized_end=1899 - _globals['_STREAMFILEDOWNLOADRANGECREATEREQUEST']._serialized_start=1901 - _globals['_STREAMFILEDOWNLOADRANGECREATEREQUEST']._serialized_end=2019 - _globals['_STREAMFILEDOWNLOADCHUNKCREATEREQUEST']._serialized_start=2021 - _globals['_STREAMFILEDOWNLOADCHUNKCREATEREQUEST']._serialized_end=2097 - _globals['_STREAMFILEDOWNLOADCHUNKCREATERESPONSE']._serialized_start=2100 - _globals['_STREAMFILEDOWNLOADCHUNKCREATERESPONSE']._serialized_end=2316 - _globals['_STREAMFILEDOWNLOADCHUNKCREATERESPONSE_BLOCKDOWNLOAD']._serialized_start=2219 - _globals['_STREAMFILEDOWNLOADCHUNKCREATERESPONSE_BLOCKDOWNLOAD']._serialized_end=2316 - _globals['_STREAMFILEDOWNLOADCHUNKCREATERESPONSEV2']._serialized_start=2319 - _globals['_STREAMFILEDOWNLOADCHUNKCREATERESPONSEV2']._serialized_end=2741 - _globals['_STREAMFILEDOWNLOADCHUNKCREATERESPONSEV2_BLOCKDOWNLOAD']._serialized_start=2443 - _globals['_STREAMFILEDOWNLOADCHUNKCREATERESPONSEV2_BLOCKDOWNLOAD']._serialized_end=2741 - _globals['_STREAMFILEDOWNLOADCHUNKCREATERESPONSEV2_BLOCKDOWNLOAD_AKAVE']._serialized_start=2663 - _globals['_STREAMFILEDOWNLOADCHUNKCREATERESPONSEV2_BLOCKDOWNLOAD_AKAVE']._serialized_end=2709 - _globals['_STREAMFILEDOWNLOADCHUNKCREATERESPONSEV2_BLOCKDOWNLOAD_FILECOIN']._serialized_start=2711 - _globals['_STREAMFILEDOWNLOADCHUNKCREATERESPONSEV2_BLOCKDOWNLOAD_FILECOIN']._serialized_end=2741 - _globals['_STREAMFILEDOWNLOADBLOCKREQUEST']._serialized_start=2744 - _globals['_STREAMFILEDOWNLOADBLOCKREQUEST']._serialized_end=2875 - _globals['_STREAMFILELISTREQUEST']._serialized_start=2877 - _globals['_STREAMFILELISTREQUEST']._serialized_end=2921 - _globals['_FILE']._serialized_start=2924 - _globals['_FILE']._serialized_end=3114 - _globals['_STREAMFILELISTRESPONSE']._serialized_start=3116 - _globals['_STREAMFILELISTRESPONSE']._serialized_end=3170 - _globals['_STREAMFILEVIEWREQUEST']._serialized_start=3172 - _globals['_STREAMFILEVIEWREQUEST']._serialized_end=3235 - _globals['_STREAMFILEVIEWRESPONSE']._serialized_start=3238 - _globals['_STREAMFILEVIEWRESPONSE']._serialized_end=3473 - _globals['_STREAMFILEDELETEREQUEST']._serialized_start=3475 - _globals['_STREAMFILEDELETEREQUEST']._serialized_end=3540 - _globals['_STREAMFILEDELETERESPONSE']._serialized_start=3542 - _globals['_STREAMFILEDELETERESPONSE']._serialized_end=3568 - _globals['_STREAMFILELISTVERSIONSREQUEST']._serialized_start=3570 - _globals['_STREAMFILELISTVERSIONSREQUEST']._serialized_end=3641 - _globals['_STREAMFILELISTVERSIONSRESPONSE']._serialized_start=3643 - _globals['_STREAMFILELISTVERSIONSRESPONSE']._serialized_end=3708 - _globals['_NODEAPI']._serialized_start=3711 - _globals['_NODEAPI']._serialized_end=4024 - _globals['_STREAMAPI']._serialized_start=4027 - _globals['_STREAMAPI']._serialized_end=5373 + _globals['_BUCKETCREATEREQUEST']._serialized_start=70 + _globals['_BUCKETCREATEREQUEST']._serialized_end=105 + _globals['_BUCKETCREATERESPONSE']._serialized_start=107 + _globals['_BUCKETCREATERESPONSE']._serialized_end=191 + _globals['_BUCKETVIEWREQUEST']._serialized_start=193 + _globals['_BUCKETVIEWREQUEST']._serialized_end=233 + _globals['_BUCKETVIEWRESPONSE']._serialized_start=235 + _globals['_BUCKETVIEWRESPONSE']._serialized_end=317 + _globals['_BUCKETLISTREQUEST']._serialized_start=319 + _globals['_BUCKETLISTREQUEST']._serialized_end=338 + _globals['_BUCKETLISTRESPONSE']._serialized_start=341 + _globals['_BUCKETLISTRESPONSE']._serialized_end=486 + _globals['_BUCKETLISTRESPONSE_BUCKET']._serialized_start=416 + _globals['_BUCKETLISTRESPONSE_BUCKET']._serialized_end=486 + _globals['_BUCKETDELETEREQUEST']._serialized_start=488 + _globals['_BUCKETDELETEREQUEST']._serialized_end=523 + _globals['_BUCKETDELETERESPONSE']._serialized_start=525 + _globals['_BUCKETDELETERESPONSE']._serialized_end=547 + _globals['_FILEBLOCKDATA']._serialized_start=549 + _globals['_FILEBLOCKDATA']._serialized_end=591 + _globals['_STREAMFILEUPLOADCREATEREQUEST']._serialized_start=593 + _globals['_STREAMFILEUPLOADCREATEREQUEST']._serialized_end=664 + _globals['_STREAMFILEUPLOADCREATERESPONSE']._serialized_start=667 + _globals['_STREAMFILEUPLOADCREATERESPONSE']._serialized_end=806 + _globals['_CHUNK']._serialized_start=809 + _globals['_CHUNK']._serialized_end=951 + _globals['_CHUNK_BLOCK']._serialized_start=917 + _globals['_CHUNK_BLOCK']._serialized_end=951 + _globals['_STREAMFILEUPLOADCHUNKCREATEREQUEST']._serialized_start=953 + _globals['_STREAMFILEUPLOADCHUNKCREATEREQUEST']._serialized_end=1020 + _globals['_STREAMFILEUPLOADCHUNKCREATERESPONSE']._serialized_start=1023 + _globals['_STREAMFILEUPLOADCHUNKCREATERESPONSE']._serialized_end=1217 + _globals['_STREAMFILEUPLOADCHUNKCREATERESPONSE_BLOCKUPLOAD']._serialized_start=1136 + _globals['_STREAMFILEUPLOADCHUNKCREATERESPONSE_BLOCKUPLOAD']._serialized_end=1217 + _globals['_STREAMFILEBLOCKDATA']._serialized_start=1219 + _globals['_STREAMFILEBLOCKDATA']._serialized_end=1313 + _globals['_STREAMFILEUPLOADBLOCKRESPONSE']._serialized_start=1315 + _globals['_STREAMFILEUPLOADBLOCKRESPONSE']._serialized_end=1346 + _globals['_STREAMFILEUPLOADCOMMITREQUEST']._serialized_start=1348 + _globals['_STREAMFILEUPLOADCOMMITREQUEST']._serialized_end=1437 + _globals['_STREAMFILEUPLOADCOMMITRESPONSE']._serialized_start=1440 + _globals['_STREAMFILEUPLOADCOMMITRESPONSE']._serialized_end=1617 + _globals['_STREAMFILEDOWNLOADCREATEREQUEST']._serialized_start=1619 + _globals['_STREAMFILEDOWNLOADCREATEREQUEST']._serialized_end=1710 + _globals['_STREAMFILEDOWNLOADCREATERESPONSE']._serialized_start=1713 + _globals['_STREAMFILEDOWNLOADCREATERESPONSE']._serialized_end=1910 + _globals['_STREAMFILEDOWNLOADCREATERESPONSE_CHUNK']._serialized_start=1854 + _globals['_STREAMFILEDOWNLOADCREATERESPONSE_CHUNK']._serialized_end=1910 + _globals['_STREAMFILEDOWNLOADRANGECREATEREQUEST']._serialized_start=1912 + _globals['_STREAMFILEDOWNLOADRANGECREATEREQUEST']._serialized_end=2030 + _globals['_STREAMFILEDOWNLOADCHUNKCREATEREQUEST']._serialized_start=2032 + _globals['_STREAMFILEDOWNLOADCHUNKCREATEREQUEST']._serialized_end=2108 + _globals['_STREAMFILEDOWNLOADCHUNKCREATERESPONSE']._serialized_start=2111 + _globals['_STREAMFILEDOWNLOADCHUNKCREATERESPONSE']._serialized_end=2327 + _globals['_STREAMFILEDOWNLOADCHUNKCREATERESPONSE_BLOCKDOWNLOAD']._serialized_start=2230 + _globals['_STREAMFILEDOWNLOADCHUNKCREATERESPONSE_BLOCKDOWNLOAD']._serialized_end=2327 + _globals['_STREAMFILEDOWNLOADCHUNKCREATERESPONSEV2']._serialized_start=2330 + _globals['_STREAMFILEDOWNLOADCHUNKCREATERESPONSEV2']._serialized_end=2752 + _globals['_STREAMFILEDOWNLOADCHUNKCREATERESPONSEV2_BLOCKDOWNLOAD']._serialized_start=2454 + _globals['_STREAMFILEDOWNLOADCHUNKCREATERESPONSEV2_BLOCKDOWNLOAD']._serialized_end=2752 + _globals['_STREAMFILEDOWNLOADCHUNKCREATERESPONSEV2_BLOCKDOWNLOAD_AKAVE']._serialized_start=2674 + _globals['_STREAMFILEDOWNLOADCHUNKCREATERESPONSEV2_BLOCKDOWNLOAD_AKAVE']._serialized_end=2720 + _globals['_STREAMFILEDOWNLOADCHUNKCREATERESPONSEV2_BLOCKDOWNLOAD_FILECOIN']._serialized_start=2722 + _globals['_STREAMFILEDOWNLOADCHUNKCREATERESPONSEV2_BLOCKDOWNLOAD_FILECOIN']._serialized_end=2752 + _globals['_STREAMFILEDOWNLOADBLOCKREQUEST']._serialized_start=2755 + _globals['_STREAMFILEDOWNLOADBLOCKREQUEST']._serialized_end=2886 + _globals['_STREAMFILELISTREQUEST']._serialized_start=2888 + _globals['_STREAMFILELISTREQUEST']._serialized_end=2932 + _globals['_FILE']._serialized_start=2935 + _globals['_FILE']._serialized_end=3125 + _globals['_STREAMFILELISTRESPONSE']._serialized_start=3127 + _globals['_STREAMFILELISTRESPONSE']._serialized_end=3181 + _globals['_STREAMFILEVIEWREQUEST']._serialized_start=3183 + _globals['_STREAMFILEVIEWREQUEST']._serialized_end=3246 + _globals['_STREAMFILEVIEWRESPONSE']._serialized_start=3249 + _globals['_STREAMFILEVIEWRESPONSE']._serialized_end=3484 + _globals['_STREAMFILEDELETEREQUEST']._serialized_start=3486 + _globals['_STREAMFILEDELETEREQUEST']._serialized_end=3551 + _globals['_STREAMFILEDELETERESPONSE']._serialized_start=3553 + _globals['_STREAMFILEDELETERESPONSE']._serialized_end=3579 + _globals['_STREAMFILELISTVERSIONSREQUEST']._serialized_start=3581 + _globals['_STREAMFILELISTVERSIONSREQUEST']._serialized_end=3652 + _globals['_STREAMFILELISTVERSIONSRESPONSE']._serialized_start=3654 + _globals['_STREAMFILELISTVERSIONSRESPONSE']._serialized_end=3719 + _globals['_NODEAPI']._serialized_start=3722 + _globals['_NODEAPI']._serialized_end=4035 + _globals['_STREAMAPI']._serialized_start=4038 + _globals['_STREAMAPI']._serialized_end=5384 # @@protoc_insertion_point(module_scope) diff --git a/private/pb/nodeapi_pb2.pyi b/private/pb/nodeapi_pb2.pyi new file mode 100644 index 0000000..d46b325 --- /dev/null +++ b/private/pb/nodeapi_pb2.pyi @@ -0,0 +1,838 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" + +import builtins +import collections.abc +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import google.protobuf.timestamp_pb2 +import typing + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing.final +class BucketCreateRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + name: builtins.str + def __init__( + self, + *, + name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["name", b"name"]) -> None: ... + +global___BucketCreateRequest = BucketCreateRequest + +@typing.final +class BucketCreateResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + CREATED_AT_FIELD_NUMBER: builtins.int + name: builtins.str + @property + def created_at(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + def __init__( + self, + *, + name: builtins.str = ..., + created_at: google.protobuf.timestamp_pb2.Timestamp | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["created_at", b"created_at"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["created_at", b"created_at", "name", b"name"]) -> None: ... + +global___BucketCreateResponse = BucketCreateResponse + +@typing.final +class BucketViewRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + BUCKET_NAME_FIELD_NUMBER: builtins.int + bucket_name: builtins.str + def __init__( + self, + *, + bucket_name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["bucket_name", b"bucket_name"]) -> None: ... + +global___BucketViewRequest = BucketViewRequest + +@typing.final +class BucketViewResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + CREATED_AT_FIELD_NUMBER: builtins.int + name: builtins.str + @property + def created_at(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + def __init__( + self, + *, + name: builtins.str = ..., + created_at: google.protobuf.timestamp_pb2.Timestamp | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["created_at", b"created_at"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["created_at", b"created_at", "name", b"name"]) -> None: ... + +global___BucketViewResponse = BucketViewResponse + +@typing.final +class BucketListRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +global___BucketListRequest = BucketListRequest + +@typing.final +class BucketListResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class Bucket(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + CREATED_AT_FIELD_NUMBER: builtins.int + name: builtins.str + @property + def created_at(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + def __init__( + self, + *, + name: builtins.str = ..., + created_at: google.protobuf.timestamp_pb2.Timestamp | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["created_at", b"created_at"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["created_at", b"created_at", "name", b"name"]) -> None: ... + + BUCKETS_FIELD_NUMBER: builtins.int + @property + def buckets(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___BucketListResponse.Bucket]: ... + def __init__( + self, + *, + buckets: collections.abc.Iterable[global___BucketListResponse.Bucket] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["buckets", b"buckets"]) -> None: ... + +global___BucketListResponse = BucketListResponse + +@typing.final +class BucketDeleteRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + name: builtins.str + def __init__( + self, + *, + name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["name", b"name"]) -> None: ... + +global___BucketDeleteRequest = BucketDeleteRequest + +@typing.final +class BucketDeleteResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +global___BucketDeleteResponse = BucketDeleteResponse + +@typing.final +class FileBlockData(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + DATA_FIELD_NUMBER: builtins.int + CID_FIELD_NUMBER: builtins.int + data: builtins.bytes + cid: builtins.str + def __init__( + self, + *, + data: builtins.bytes = ..., + cid: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["cid", b"cid", "data", b"data"]) -> None: ... + +global___FileBlockData = FileBlockData + +@typing.final +class StreamFileUploadCreateRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + BUCKET_NAME_FIELD_NUMBER: builtins.int + FILE_NAME_FIELD_NUMBER: builtins.int + bucket_name: builtins.str + file_name: builtins.str + def __init__( + self, + *, + bucket_name: builtins.str = ..., + file_name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["bucket_name", b"bucket_name", "file_name", b"file_name"]) -> None: ... + +global___StreamFileUploadCreateRequest = StreamFileUploadCreateRequest + +@typing.final +class StreamFileUploadCreateResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + BUCKET_NAME_FIELD_NUMBER: builtins.int + FILE_NAME_FIELD_NUMBER: builtins.int + STREAM_ID_FIELD_NUMBER: builtins.int + CREATED_AT_FIELD_NUMBER: builtins.int + bucket_name: builtins.str + file_name: builtins.str + stream_id: builtins.str + @property + def created_at(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + def __init__( + self, + *, + bucket_name: builtins.str = ..., + file_name: builtins.str = ..., + stream_id: builtins.str = ..., + created_at: google.protobuf.timestamp_pb2.Timestamp | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["created_at", b"created_at"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["bucket_name", b"bucket_name", "created_at", b"created_at", "file_name", b"file_name", "stream_id", b"stream_id"]) -> None: ... + +global___StreamFileUploadCreateResponse = StreamFileUploadCreateResponse + +@typing.final +class Chunk(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class Block(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + CID_FIELD_NUMBER: builtins.int + SIZE_FIELD_NUMBER: builtins.int + cid: builtins.str + size: builtins.int + def __init__( + self, + *, + cid: builtins.str = ..., + size: builtins.int = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["cid", b"cid", "size", b"size"]) -> None: ... + + STREAM_ID_FIELD_NUMBER: builtins.int + CID_FIELD_NUMBER: builtins.int + INDEX_FIELD_NUMBER: builtins.int + SIZE_FIELD_NUMBER: builtins.int + BLOCKS_FIELD_NUMBER: builtins.int + stream_id: builtins.str + cid: builtins.str + index: builtins.int + size: builtins.int + @property + def blocks(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Chunk.Block]: ... + def __init__( + self, + *, + stream_id: builtins.str = ..., + cid: builtins.str = ..., + index: builtins.int = ..., + size: builtins.int = ..., + blocks: collections.abc.Iterable[global___Chunk.Block] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["blocks", b"blocks", "cid", b"cid", "index", b"index", "size", b"size", "stream_id", b"stream_id"]) -> None: ... + +global___Chunk = Chunk + +@typing.final +class StreamFileUploadChunkCreateRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + CHUNK_FIELD_NUMBER: builtins.int + @property + def chunk(self) -> global___Chunk: ... + def __init__( + self, + *, + chunk: global___Chunk | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["chunk", b"chunk"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["chunk", b"chunk"]) -> None: ... + +global___StreamFileUploadChunkCreateRequest = StreamFileUploadChunkCreateRequest + +@typing.final +class StreamFileUploadChunkCreateResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class BlockUpload(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + CID_FIELD_NUMBER: builtins.int + NODE_ADDRESS_FIELD_NUMBER: builtins.int + NODE_ID_FIELD_NUMBER: builtins.int + PERMIT_FIELD_NUMBER: builtins.int + cid: builtins.str + node_address: builtins.str + node_id: builtins.str + permit: builtins.str + def __init__( + self, + *, + cid: builtins.str = ..., + node_address: builtins.str = ..., + node_id: builtins.str = ..., + permit: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["cid", b"cid", "node_address", b"node_address", "node_id", b"node_id", "permit", b"permit"]) -> None: ... + + BLOCKS_FIELD_NUMBER: builtins.int + @property + def blocks(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___StreamFileUploadChunkCreateResponse.BlockUpload]: ... + def __init__( + self, + *, + blocks: collections.abc.Iterable[global___StreamFileUploadChunkCreateResponse.BlockUpload] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["blocks", b"blocks"]) -> None: ... + +global___StreamFileUploadChunkCreateResponse = StreamFileUploadChunkCreateResponse + +@typing.final +class StreamFileBlockData(google.protobuf.message.Message): + """TODO: do not reuse this message for upload and download, create seprate messages""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + DATA_FIELD_NUMBER: builtins.int + CID_FIELD_NUMBER: builtins.int + INDEX_FIELD_NUMBER: builtins.int + CHUNK_FIELD_NUMBER: builtins.int + data: builtins.bytes + cid: builtins.str + """only present in first msg of request stream""" + index: builtins.int + """only present in first msg of request stream""" + @property + def chunk(self) -> global___Chunk: + """only present in first msg of request stream""" + + def __init__( + self, + *, + data: builtins.bytes = ..., + cid: builtins.str = ..., + index: builtins.int = ..., + chunk: global___Chunk | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["chunk", b"chunk"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["chunk", b"chunk", "cid", b"cid", "data", b"data", "index", b"index"]) -> None: ... + +global___StreamFileBlockData = StreamFileBlockData + +@typing.final +class StreamFileUploadBlockResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +global___StreamFileUploadBlockResponse = StreamFileUploadBlockResponse + +@typing.final +class StreamFileUploadCommitRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + STREAM_ID_FIELD_NUMBER: builtins.int + ROOT_CID_FIELD_NUMBER: builtins.int + CHUNK_COUNT_FIELD_NUMBER: builtins.int + stream_id: builtins.str + root_cid: builtins.str + chunk_count: builtins.int + def __init__( + self, + *, + stream_id: builtins.str = ..., + root_cid: builtins.str = ..., + chunk_count: builtins.int = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["chunk_count", b"chunk_count", "root_cid", b"root_cid", "stream_id", b"stream_id"]) -> None: ... + +global___StreamFileUploadCommitRequest = StreamFileUploadCommitRequest + +@typing.final +class StreamFileUploadCommitResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + STREAM_ID_FIELD_NUMBER: builtins.int + FILE_NAME_FIELD_NUMBER: builtins.int + BUCKET_NAME_FIELD_NUMBER: builtins.int + ENCODED_SIZE_FIELD_NUMBER: builtins.int + SIZE_FIELD_NUMBER: builtins.int + COMMITTED_AT_FIELD_NUMBER: builtins.int + stream_id: builtins.str + file_name: builtins.str + bucket_name: builtins.str + encoded_size: builtins.int + size: builtins.int + @property + def committed_at(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + def __init__( + self, + *, + stream_id: builtins.str = ..., + file_name: builtins.str = ..., + bucket_name: builtins.str = ..., + encoded_size: builtins.int = ..., + size: builtins.int = ..., + committed_at: google.protobuf.timestamp_pb2.Timestamp | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["committed_at", b"committed_at"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["bucket_name", b"bucket_name", "committed_at", b"committed_at", "encoded_size", b"encoded_size", "file_name", b"file_name", "size", b"size", "stream_id", b"stream_id"]) -> None: ... + +global___StreamFileUploadCommitResponse = StreamFileUploadCommitResponse + +@typing.final +class StreamFileDownloadCreateRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + BUCKET_NAME_FIELD_NUMBER: builtins.int + FILE_NAME_FIELD_NUMBER: builtins.int + ROOT_CID_FIELD_NUMBER: builtins.int + bucket_name: builtins.str + file_name: builtins.str + root_cid: builtins.str + """optional""" + def __init__( + self, + *, + bucket_name: builtins.str = ..., + file_name: builtins.str = ..., + root_cid: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["bucket_name", b"bucket_name", "file_name", b"file_name", "root_cid", b"root_cid"]) -> None: ... + +global___StreamFileDownloadCreateRequest = StreamFileDownloadCreateRequest + +@typing.final +class StreamFileDownloadCreateResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class Chunk(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + CID_FIELD_NUMBER: builtins.int + ENCODED_SIZE_FIELD_NUMBER: builtins.int + SIZE_FIELD_NUMBER: builtins.int + cid: builtins.str + encoded_size: builtins.int + size: builtins.int + def __init__( + self, + *, + cid: builtins.str = ..., + encoded_size: builtins.int = ..., + size: builtins.int = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["cid", b"cid", "encoded_size", b"encoded_size", "size", b"size"]) -> None: ... + + BUCKET_NAME_FIELD_NUMBER: builtins.int + STREAM_ID_FIELD_NUMBER: builtins.int + CHUNKS_FIELD_NUMBER: builtins.int + bucket_name: builtins.str + stream_id: builtins.str + @property + def chunks(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___StreamFileDownloadCreateResponse.Chunk]: ... + def __init__( + self, + *, + bucket_name: builtins.str = ..., + stream_id: builtins.str = ..., + chunks: collections.abc.Iterable[global___StreamFileDownloadCreateResponse.Chunk] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["bucket_name", b"bucket_name", "chunks", b"chunks", "stream_id", b"stream_id"]) -> None: ... + +global___StreamFileDownloadCreateResponse = StreamFileDownloadCreateResponse + +@typing.final +class StreamFileDownloadRangeCreateRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + BUCKET_NAME_FIELD_NUMBER: builtins.int + FILE_NAME_FIELD_NUMBER: builtins.int + START_INDEX_FIELD_NUMBER: builtins.int + END_INDEX_FIELD_NUMBER: builtins.int + bucket_name: builtins.str + file_name: builtins.str + start_index: builtins.int + end_index: builtins.int + def __init__( + self, + *, + bucket_name: builtins.str = ..., + file_name: builtins.str = ..., + start_index: builtins.int = ..., + end_index: builtins.int = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["bucket_name", b"bucket_name", "end_index", b"end_index", "file_name", b"file_name", "start_index", b"start_index"]) -> None: ... + +global___StreamFileDownloadRangeCreateRequest = StreamFileDownloadRangeCreateRequest + +@typing.final +class StreamFileDownloadChunkCreateRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + STREAM_ID_FIELD_NUMBER: builtins.int + CHUNK_CID_FIELD_NUMBER: builtins.int + stream_id: builtins.str + chunk_cid: builtins.str + def __init__( + self, + *, + stream_id: builtins.str = ..., + chunk_cid: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["chunk_cid", b"chunk_cid", "stream_id", b"stream_id"]) -> None: ... + +global___StreamFileDownloadChunkCreateRequest = StreamFileDownloadChunkCreateRequest + +@typing.final +class StreamFileDownloadChunkCreateResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class BlockDownload(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + CID_FIELD_NUMBER: builtins.int + SIZE_FIELD_NUMBER: builtins.int + NODE_ADDRESS_FIELD_NUMBER: builtins.int + NODE_ID_FIELD_NUMBER: builtins.int + PERMIT_FIELD_NUMBER: builtins.int + cid: builtins.str + size: builtins.int + node_address: builtins.str + node_id: builtins.str + permit: builtins.str + def __init__( + self, + *, + cid: builtins.str = ..., + size: builtins.int = ..., + node_address: builtins.str = ..., + node_id: builtins.str = ..., + permit: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["cid", b"cid", "node_address", b"node_address", "node_id", b"node_id", "permit", b"permit", "size", b"size"]) -> None: ... + + BLOCKS_FIELD_NUMBER: builtins.int + @property + def blocks(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___StreamFileDownloadChunkCreateResponse.BlockDownload]: ... + def __init__( + self, + *, + blocks: collections.abc.Iterable[global___StreamFileDownloadChunkCreateResponse.BlockDownload] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["blocks", b"blocks"]) -> None: ... + +global___StreamFileDownloadChunkCreateResponse = StreamFileDownloadChunkCreateResponse + +@typing.final +class StreamFileDownloadChunkCreateResponseV2(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class BlockDownload(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class Akave(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NODE_ID_FIELD_NUMBER: builtins.int + NODE_ADDRESS_FIELD_NUMBER: builtins.int + node_id: builtins.str + node_address: builtins.str + def __init__( + self, + *, + node_id: builtins.str = ..., + node_address: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["node_address", b"node_address", "node_id", b"node_id"]) -> None: ... + + @typing.final + class Filecoin(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SP_ADDRESS_FIELD_NUMBER: builtins.int + sp_address: builtins.str + def __init__( + self, + *, + sp_address: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["sp_address", b"sp_address"]) -> None: ... + + CID_FIELD_NUMBER: builtins.int + SIZE_FIELD_NUMBER: builtins.int + AKAVE_FIELD_NUMBER: builtins.int + FILECOIN_FIELD_NUMBER: builtins.int + cid: builtins.str + size: builtins.int + @property + def akave(self) -> global___StreamFileDownloadChunkCreateResponseV2.BlockDownload.Akave: ... + @property + def filecoin(self) -> global___StreamFileDownloadChunkCreateResponseV2.BlockDownload.Filecoin: ... + def __init__( + self, + *, + cid: builtins.str = ..., + size: builtins.int = ..., + akave: global___StreamFileDownloadChunkCreateResponseV2.BlockDownload.Akave | None = ..., + filecoin: global___StreamFileDownloadChunkCreateResponseV2.BlockDownload.Filecoin | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["akave", b"akave", "filecoin", b"filecoin"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["akave", b"akave", "cid", b"cid", "filecoin", b"filecoin", "size", b"size"]) -> None: ... + + BLOCKS_FIELD_NUMBER: builtins.int + @property + def blocks(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___StreamFileDownloadChunkCreateResponseV2.BlockDownload]: ... + def __init__( + self, + *, + blocks: collections.abc.Iterable[global___StreamFileDownloadChunkCreateResponseV2.BlockDownload] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["blocks", b"blocks"]) -> None: ... + +global___StreamFileDownloadChunkCreateResponseV2 = StreamFileDownloadChunkCreateResponseV2 + +@typing.final +class StreamFileDownloadBlockRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + STREAM_ID_FIELD_NUMBER: builtins.int + CHUNK_CID_FIELD_NUMBER: builtins.int + CHUNK_INDEX_FIELD_NUMBER: builtins.int + BLOCK_CID_FIELD_NUMBER: builtins.int + BLOCK_INDEX_FIELD_NUMBER: builtins.int + stream_id: builtins.str + chunk_cid: builtins.str + chunk_index: builtins.int + block_cid: builtins.str + block_index: builtins.int + def __init__( + self, + *, + stream_id: builtins.str = ..., + chunk_cid: builtins.str = ..., + chunk_index: builtins.int = ..., + block_cid: builtins.str = ..., + block_index: builtins.int = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["block_cid", b"block_cid", "block_index", b"block_index", "chunk_cid", b"chunk_cid", "chunk_index", b"chunk_index", "stream_id", b"stream_id"]) -> None: ... + +global___StreamFileDownloadBlockRequest = StreamFileDownloadBlockRequest + +@typing.final +class StreamFileListRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + BUCKET_NAME_FIELD_NUMBER: builtins.int + bucket_name: builtins.str + def __init__( + self, + *, + bucket_name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["bucket_name", b"bucket_name"]) -> None: ... + +global___StreamFileListRequest = StreamFileListRequest + +@typing.final +class File(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + STREAM_ID_FIELD_NUMBER: builtins.int + ROOT_CID_FIELD_NUMBER: builtins.int + NAME_FIELD_NUMBER: builtins.int + ENCODED_SIZE_FIELD_NUMBER: builtins.int + SIZE_FIELD_NUMBER: builtins.int + CREATED_AT_FIELD_NUMBER: builtins.int + COMMITED_AT_FIELD_NUMBER: builtins.int + stream_id: builtins.str + root_cid: builtins.str + name: builtins.str + encoded_size: builtins.int + size: builtins.int + @property + def created_at(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + @property + def commited_at(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + def __init__( + self, + *, + stream_id: builtins.str = ..., + root_cid: builtins.str = ..., + name: builtins.str = ..., + encoded_size: builtins.int = ..., + size: builtins.int = ..., + created_at: google.protobuf.timestamp_pb2.Timestamp | None = ..., + commited_at: google.protobuf.timestamp_pb2.Timestamp | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["commited_at", b"commited_at", "created_at", b"created_at"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["commited_at", b"commited_at", "created_at", b"created_at", "encoded_size", b"encoded_size", "name", b"name", "root_cid", b"root_cid", "size", b"size", "stream_id", b"stream_id"]) -> None: ... + +global___File = File + +@typing.final +class StreamFileListResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FILES_FIELD_NUMBER: builtins.int + @property + def files(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___File]: ... + def __init__( + self, + *, + files: collections.abc.Iterable[global___File] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["files", b"files"]) -> None: ... + +global___StreamFileListResponse = StreamFileListResponse + +@typing.final +class StreamFileViewRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + BUCKET_NAME_FIELD_NUMBER: builtins.int + FILE_NAME_FIELD_NUMBER: builtins.int + bucket_name: builtins.str + file_name: builtins.str + def __init__( + self, + *, + bucket_name: builtins.str = ..., + file_name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["bucket_name", b"bucket_name", "file_name", b"file_name"]) -> None: ... + +global___StreamFileViewRequest = StreamFileViewRequest + +@typing.final +class StreamFileViewResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + BUCKET_NAME_FIELD_NUMBER: builtins.int + FILE_NAME_FIELD_NUMBER: builtins.int + STREAM_ID_FIELD_NUMBER: builtins.int + ROOT_CID_FIELD_NUMBER: builtins.int + ENCODED_SIZE_FIELD_NUMBER: builtins.int + SIZE_FIELD_NUMBER: builtins.int + CREATED_AT_FIELD_NUMBER: builtins.int + COMMITTED_AT_FIELD_NUMBER: builtins.int + bucket_name: builtins.str + file_name: builtins.str + stream_id: builtins.str + root_cid: builtins.str + encoded_size: builtins.int + size: builtins.int + @property + def created_at(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + @property + def committed_at(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + def __init__( + self, + *, + bucket_name: builtins.str = ..., + file_name: builtins.str = ..., + stream_id: builtins.str = ..., + root_cid: builtins.str = ..., + encoded_size: builtins.int = ..., + size: builtins.int = ..., + created_at: google.protobuf.timestamp_pb2.Timestamp | None = ..., + committed_at: google.protobuf.timestamp_pb2.Timestamp | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["committed_at", b"committed_at", "created_at", b"created_at"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["bucket_name", b"bucket_name", "committed_at", b"committed_at", "created_at", b"created_at", "encoded_size", b"encoded_size", "file_name", b"file_name", "root_cid", b"root_cid", "size", b"size", "stream_id", b"stream_id"]) -> None: ... + +global___StreamFileViewResponse = StreamFileViewResponse + +@typing.final +class StreamFileDeleteRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + BUCKET_NAME_FIELD_NUMBER: builtins.int + FILE_NAME_FIELD_NUMBER: builtins.int + bucket_name: builtins.str + file_name: builtins.str + def __init__( + self, + *, + bucket_name: builtins.str = ..., + file_name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["bucket_name", b"bucket_name", "file_name", b"file_name"]) -> None: ... + +global___StreamFileDeleteRequest = StreamFileDeleteRequest + +@typing.final +class StreamFileDeleteResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +global___StreamFileDeleteResponse = StreamFileDeleteResponse + +@typing.final +class StreamFileListVersionsRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + BUCKET_NAME_FIELD_NUMBER: builtins.int + FILE_NAME_FIELD_NUMBER: builtins.int + bucket_name: builtins.str + file_name: builtins.str + def __init__( + self, + *, + bucket_name: builtins.str = ..., + file_name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["bucket_name", b"bucket_name", "file_name", b"file_name"]) -> None: ... + +global___StreamFileListVersionsRequest = StreamFileListVersionsRequest + +@typing.final +class StreamFileListVersionsResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VERSIONS_FIELD_NUMBER: builtins.int + @property + def versions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___File]: ... + def __init__( + self, + *, + versions: collections.abc.Iterable[global___File] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["versions", b"versions"]) -> None: ... + +global___StreamFileListVersionsResponse = StreamFileListVersionsResponse diff --git a/private/pb/nodeapi_pb2_grpc.py b/private/pb/nodeapi_pb2_grpc.py index d5bfc86..f34019e 100644 --- a/private/pb/nodeapi_pb2_grpc.py +++ b/private/pb/nodeapi_pb2_grpc.py @@ -3,7 +3,7 @@ import grpc import warnings -from private.pb import nodeapi_pb2 as nodeapi__pb2 +from private.pb import nodeapi_pb2 as private_dot_pb_dot_nodeapi__pb2 GRPC_GENERATED_VERSION = '1.71.0' GRPC_VERSION = grpc.__version__ @@ -18,7 +18,7 @@ if _version_not_supported: raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in nodeapi_pb2_grpc.py depends on' + + f' but the generated code in private/pb/nodeapi_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' @@ -36,23 +36,23 @@ def __init__(self, channel): """ self.BucketCreate = channel.unary_unary( '/nodeapi.NodeAPI/BucketCreate', - request_serializer=nodeapi__pb2.BucketCreateRequest.SerializeToString, - response_deserializer=nodeapi__pb2.BucketCreateResponse.FromString, + request_serializer=private_dot_pb_dot_nodeapi__pb2.BucketCreateRequest.SerializeToString, + response_deserializer=private_dot_pb_dot_nodeapi__pb2.BucketCreateResponse.FromString, _registered_method=True) self.BucketView = channel.unary_unary( '/nodeapi.NodeAPI/BucketView', - request_serializer=nodeapi__pb2.BucketViewRequest.SerializeToString, - response_deserializer=nodeapi__pb2.BucketViewResponse.FromString, + request_serializer=private_dot_pb_dot_nodeapi__pb2.BucketViewRequest.SerializeToString, + response_deserializer=private_dot_pb_dot_nodeapi__pb2.BucketViewResponse.FromString, _registered_method=True) self.BucketList = channel.unary_unary( '/nodeapi.NodeAPI/BucketList', - request_serializer=nodeapi__pb2.BucketListRequest.SerializeToString, - response_deserializer=nodeapi__pb2.BucketListResponse.FromString, + request_serializer=private_dot_pb_dot_nodeapi__pb2.BucketListRequest.SerializeToString, + response_deserializer=private_dot_pb_dot_nodeapi__pb2.BucketListResponse.FromString, _registered_method=True) self.BucketDelete = channel.unary_unary( '/nodeapi.NodeAPI/BucketDelete', - request_serializer=nodeapi__pb2.BucketDeleteRequest.SerializeToString, - response_deserializer=nodeapi__pb2.BucketDeleteResponse.FromString, + request_serializer=private_dot_pb_dot_nodeapi__pb2.BucketDeleteRequest.SerializeToString, + response_deserializer=private_dot_pb_dot_nodeapi__pb2.BucketDeleteResponse.FromString, _registered_method=True) @@ -89,23 +89,23 @@ def add_NodeAPIServicer_to_server(servicer, server): rpc_method_handlers = { 'BucketCreate': grpc.unary_unary_rpc_method_handler( servicer.BucketCreate, - request_deserializer=nodeapi__pb2.BucketCreateRequest.FromString, - response_serializer=nodeapi__pb2.BucketCreateResponse.SerializeToString, + request_deserializer=private_dot_pb_dot_nodeapi__pb2.BucketCreateRequest.FromString, + response_serializer=private_dot_pb_dot_nodeapi__pb2.BucketCreateResponse.SerializeToString, ), 'BucketView': grpc.unary_unary_rpc_method_handler( servicer.BucketView, - request_deserializer=nodeapi__pb2.BucketViewRequest.FromString, - response_serializer=nodeapi__pb2.BucketViewResponse.SerializeToString, + request_deserializer=private_dot_pb_dot_nodeapi__pb2.BucketViewRequest.FromString, + response_serializer=private_dot_pb_dot_nodeapi__pb2.BucketViewResponse.SerializeToString, ), 'BucketList': grpc.unary_unary_rpc_method_handler( servicer.BucketList, - request_deserializer=nodeapi__pb2.BucketListRequest.FromString, - response_serializer=nodeapi__pb2.BucketListResponse.SerializeToString, + request_deserializer=private_dot_pb_dot_nodeapi__pb2.BucketListRequest.FromString, + response_serializer=private_dot_pb_dot_nodeapi__pb2.BucketListResponse.SerializeToString, ), 'BucketDelete': grpc.unary_unary_rpc_method_handler( servicer.BucketDelete, - request_deserializer=nodeapi__pb2.BucketDeleteRequest.FromString, - response_serializer=nodeapi__pb2.BucketDeleteResponse.SerializeToString, + request_deserializer=private_dot_pb_dot_nodeapi__pb2.BucketDeleteRequest.FromString, + response_serializer=private_dot_pb_dot_nodeapi__pb2.BucketDeleteResponse.SerializeToString, ), } generic_handler = grpc.method_handlers_generic_handler( @@ -133,8 +133,8 @@ def BucketCreate(request, request, target, '/nodeapi.NodeAPI/BucketCreate', - nodeapi__pb2.BucketCreateRequest.SerializeToString, - nodeapi__pb2.BucketCreateResponse.FromString, + private_dot_pb_dot_nodeapi__pb2.BucketCreateRequest.SerializeToString, + private_dot_pb_dot_nodeapi__pb2.BucketCreateResponse.FromString, options, channel_credentials, insecure, @@ -160,8 +160,8 @@ def BucketView(request, request, target, '/nodeapi.NodeAPI/BucketView', - nodeapi__pb2.BucketViewRequest.SerializeToString, - nodeapi__pb2.BucketViewResponse.FromString, + private_dot_pb_dot_nodeapi__pb2.BucketViewRequest.SerializeToString, + private_dot_pb_dot_nodeapi__pb2.BucketViewResponse.FromString, options, channel_credentials, insecure, @@ -187,8 +187,8 @@ def BucketList(request, request, target, '/nodeapi.NodeAPI/BucketList', - nodeapi__pb2.BucketListRequest.SerializeToString, - nodeapi__pb2.BucketListResponse.FromString, + private_dot_pb_dot_nodeapi__pb2.BucketListRequest.SerializeToString, + private_dot_pb_dot_nodeapi__pb2.BucketListResponse.FromString, options, channel_credentials, insecure, @@ -214,8 +214,8 @@ def BucketDelete(request, request, target, '/nodeapi.NodeAPI/BucketDelete', - nodeapi__pb2.BucketDeleteRequest.SerializeToString, - nodeapi__pb2.BucketDeleteResponse.FromString, + private_dot_pb_dot_nodeapi__pb2.BucketDeleteRequest.SerializeToString, + private_dot_pb_dot_nodeapi__pb2.BucketDeleteResponse.FromString, options, channel_credentials, insecure, @@ -239,68 +239,68 @@ def __init__(self, channel): """ self.FileUploadCreate = channel.unary_unary( '/nodeapi.StreamAPI/FileUploadCreate', - request_serializer=nodeapi__pb2.StreamFileUploadCreateRequest.SerializeToString, - response_deserializer=nodeapi__pb2.StreamFileUploadCreateResponse.FromString, + request_serializer=private_dot_pb_dot_nodeapi__pb2.StreamFileUploadCreateRequest.SerializeToString, + response_deserializer=private_dot_pb_dot_nodeapi__pb2.StreamFileUploadCreateResponse.FromString, _registered_method=True) self.FileUploadChunkCreate = channel.unary_unary( '/nodeapi.StreamAPI/FileUploadChunkCreate', - request_serializer=nodeapi__pb2.StreamFileUploadChunkCreateRequest.SerializeToString, - response_deserializer=nodeapi__pb2.StreamFileUploadChunkCreateResponse.FromString, + request_serializer=private_dot_pb_dot_nodeapi__pb2.StreamFileUploadChunkCreateRequest.SerializeToString, + response_deserializer=private_dot_pb_dot_nodeapi__pb2.StreamFileUploadChunkCreateResponse.FromString, _registered_method=True) self.FileUploadBlock = channel.stream_unary( '/nodeapi.StreamAPI/FileUploadBlock', - request_serializer=nodeapi__pb2.StreamFileBlockData.SerializeToString, - response_deserializer=nodeapi__pb2.StreamFileUploadBlockResponse.FromString, + request_serializer=private_dot_pb_dot_nodeapi__pb2.StreamFileBlockData.SerializeToString, + response_deserializer=private_dot_pb_dot_nodeapi__pb2.StreamFileUploadBlockResponse.FromString, _registered_method=True) self.FileUploadCommit = channel.unary_unary( '/nodeapi.StreamAPI/FileUploadCommit', - request_serializer=nodeapi__pb2.StreamFileUploadCommitRequest.SerializeToString, - response_deserializer=nodeapi__pb2.StreamFileUploadCommitResponse.FromString, + request_serializer=private_dot_pb_dot_nodeapi__pb2.StreamFileUploadCommitRequest.SerializeToString, + response_deserializer=private_dot_pb_dot_nodeapi__pb2.StreamFileUploadCommitResponse.FromString, _registered_method=True) self.FileDownloadCreate = channel.unary_unary( '/nodeapi.StreamAPI/FileDownloadCreate', - request_serializer=nodeapi__pb2.StreamFileDownloadCreateRequest.SerializeToString, - response_deserializer=nodeapi__pb2.StreamFileDownloadCreateResponse.FromString, + request_serializer=private_dot_pb_dot_nodeapi__pb2.StreamFileDownloadCreateRequest.SerializeToString, + response_deserializer=private_dot_pb_dot_nodeapi__pb2.StreamFileDownloadCreateResponse.FromString, _registered_method=True) self.FileDownloadRangeCreate = channel.unary_unary( '/nodeapi.StreamAPI/FileDownloadRangeCreate', - request_serializer=nodeapi__pb2.StreamFileDownloadRangeCreateRequest.SerializeToString, - response_deserializer=nodeapi__pb2.StreamFileDownloadCreateResponse.FromString, + request_serializer=private_dot_pb_dot_nodeapi__pb2.StreamFileDownloadRangeCreateRequest.SerializeToString, + response_deserializer=private_dot_pb_dot_nodeapi__pb2.StreamFileDownloadCreateResponse.FromString, _registered_method=True) self.FileDownloadChunkCreate = channel.unary_unary( '/nodeapi.StreamAPI/FileDownloadChunkCreate', - request_serializer=nodeapi__pb2.StreamFileDownloadChunkCreateRequest.SerializeToString, - response_deserializer=nodeapi__pb2.StreamFileDownloadChunkCreateResponse.FromString, + request_serializer=private_dot_pb_dot_nodeapi__pb2.StreamFileDownloadChunkCreateRequest.SerializeToString, + response_deserializer=private_dot_pb_dot_nodeapi__pb2.StreamFileDownloadChunkCreateResponse.FromString, _registered_method=True) self.FileDownloadChunkCreateV2 = channel.unary_unary( '/nodeapi.StreamAPI/FileDownloadChunkCreateV2', - request_serializer=nodeapi__pb2.StreamFileDownloadChunkCreateRequest.SerializeToString, - response_deserializer=nodeapi__pb2.StreamFileDownloadChunkCreateResponseV2.FromString, + request_serializer=private_dot_pb_dot_nodeapi__pb2.StreamFileDownloadChunkCreateRequest.SerializeToString, + response_deserializer=private_dot_pb_dot_nodeapi__pb2.StreamFileDownloadChunkCreateResponseV2.FromString, _registered_method=True) self.FileDownloadBlock = channel.unary_stream( '/nodeapi.StreamAPI/FileDownloadBlock', - request_serializer=nodeapi__pb2.StreamFileDownloadBlockRequest.SerializeToString, - response_deserializer=nodeapi__pb2.StreamFileBlockData.FromString, + request_serializer=private_dot_pb_dot_nodeapi__pb2.StreamFileDownloadBlockRequest.SerializeToString, + response_deserializer=private_dot_pb_dot_nodeapi__pb2.StreamFileBlockData.FromString, _registered_method=True) self.FileList = channel.unary_unary( '/nodeapi.StreamAPI/FileList', - request_serializer=nodeapi__pb2.StreamFileListRequest.SerializeToString, - response_deserializer=nodeapi__pb2.StreamFileListResponse.FromString, + request_serializer=private_dot_pb_dot_nodeapi__pb2.StreamFileListRequest.SerializeToString, + response_deserializer=private_dot_pb_dot_nodeapi__pb2.StreamFileListResponse.FromString, _registered_method=True) self.FileView = channel.unary_unary( '/nodeapi.StreamAPI/FileView', - request_serializer=nodeapi__pb2.StreamFileViewRequest.SerializeToString, - response_deserializer=nodeapi__pb2.StreamFileViewResponse.FromString, + request_serializer=private_dot_pb_dot_nodeapi__pb2.StreamFileViewRequest.SerializeToString, + response_deserializer=private_dot_pb_dot_nodeapi__pb2.StreamFileViewResponse.FromString, _registered_method=True) self.FileVersions = channel.unary_unary( '/nodeapi.StreamAPI/FileVersions', - request_serializer=nodeapi__pb2.StreamFileListVersionsRequest.SerializeToString, - response_deserializer=nodeapi__pb2.StreamFileListVersionsResponse.FromString, + request_serializer=private_dot_pb_dot_nodeapi__pb2.StreamFileListVersionsRequest.SerializeToString, + response_deserializer=private_dot_pb_dot_nodeapi__pb2.StreamFileListVersionsResponse.FromString, _registered_method=True) self.FileDelete = channel.unary_unary( '/nodeapi.StreamAPI/FileDelete', - request_serializer=nodeapi__pb2.StreamFileDeleteRequest.SerializeToString, - response_deserializer=nodeapi__pb2.StreamFileDeleteResponse.FromString, + request_serializer=private_dot_pb_dot_nodeapi__pb2.StreamFileDeleteRequest.SerializeToString, + response_deserializer=private_dot_pb_dot_nodeapi__pb2.StreamFileDeleteResponse.FromString, _registered_method=True) @@ -391,68 +391,68 @@ def add_StreamAPIServicer_to_server(servicer, server): rpc_method_handlers = { 'FileUploadCreate': grpc.unary_unary_rpc_method_handler( servicer.FileUploadCreate, - request_deserializer=nodeapi__pb2.StreamFileUploadCreateRequest.FromString, - response_serializer=nodeapi__pb2.StreamFileUploadCreateResponse.SerializeToString, + request_deserializer=private_dot_pb_dot_nodeapi__pb2.StreamFileUploadCreateRequest.FromString, + response_serializer=private_dot_pb_dot_nodeapi__pb2.StreamFileUploadCreateResponse.SerializeToString, ), 'FileUploadChunkCreate': grpc.unary_unary_rpc_method_handler( servicer.FileUploadChunkCreate, - request_deserializer=nodeapi__pb2.StreamFileUploadChunkCreateRequest.FromString, - response_serializer=nodeapi__pb2.StreamFileUploadChunkCreateResponse.SerializeToString, + request_deserializer=private_dot_pb_dot_nodeapi__pb2.StreamFileUploadChunkCreateRequest.FromString, + response_serializer=private_dot_pb_dot_nodeapi__pb2.StreamFileUploadChunkCreateResponse.SerializeToString, ), 'FileUploadBlock': grpc.stream_unary_rpc_method_handler( servicer.FileUploadBlock, - request_deserializer=nodeapi__pb2.StreamFileBlockData.FromString, - response_serializer=nodeapi__pb2.StreamFileUploadBlockResponse.SerializeToString, + request_deserializer=private_dot_pb_dot_nodeapi__pb2.StreamFileBlockData.FromString, + response_serializer=private_dot_pb_dot_nodeapi__pb2.StreamFileUploadBlockResponse.SerializeToString, ), 'FileUploadCommit': grpc.unary_unary_rpc_method_handler( servicer.FileUploadCommit, - request_deserializer=nodeapi__pb2.StreamFileUploadCommitRequest.FromString, - response_serializer=nodeapi__pb2.StreamFileUploadCommitResponse.SerializeToString, + request_deserializer=private_dot_pb_dot_nodeapi__pb2.StreamFileUploadCommitRequest.FromString, + response_serializer=private_dot_pb_dot_nodeapi__pb2.StreamFileUploadCommitResponse.SerializeToString, ), 'FileDownloadCreate': grpc.unary_unary_rpc_method_handler( servicer.FileDownloadCreate, - request_deserializer=nodeapi__pb2.StreamFileDownloadCreateRequest.FromString, - response_serializer=nodeapi__pb2.StreamFileDownloadCreateResponse.SerializeToString, + request_deserializer=private_dot_pb_dot_nodeapi__pb2.StreamFileDownloadCreateRequest.FromString, + response_serializer=private_dot_pb_dot_nodeapi__pb2.StreamFileDownloadCreateResponse.SerializeToString, ), 'FileDownloadRangeCreate': grpc.unary_unary_rpc_method_handler( servicer.FileDownloadRangeCreate, - request_deserializer=nodeapi__pb2.StreamFileDownloadRangeCreateRequest.FromString, - response_serializer=nodeapi__pb2.StreamFileDownloadCreateResponse.SerializeToString, + request_deserializer=private_dot_pb_dot_nodeapi__pb2.StreamFileDownloadRangeCreateRequest.FromString, + response_serializer=private_dot_pb_dot_nodeapi__pb2.StreamFileDownloadCreateResponse.SerializeToString, ), 'FileDownloadChunkCreate': grpc.unary_unary_rpc_method_handler( servicer.FileDownloadChunkCreate, - request_deserializer=nodeapi__pb2.StreamFileDownloadChunkCreateRequest.FromString, - response_serializer=nodeapi__pb2.StreamFileDownloadChunkCreateResponse.SerializeToString, + request_deserializer=private_dot_pb_dot_nodeapi__pb2.StreamFileDownloadChunkCreateRequest.FromString, + response_serializer=private_dot_pb_dot_nodeapi__pb2.StreamFileDownloadChunkCreateResponse.SerializeToString, ), 'FileDownloadChunkCreateV2': grpc.unary_unary_rpc_method_handler( servicer.FileDownloadChunkCreateV2, - request_deserializer=nodeapi__pb2.StreamFileDownloadChunkCreateRequest.FromString, - response_serializer=nodeapi__pb2.StreamFileDownloadChunkCreateResponseV2.SerializeToString, + request_deserializer=private_dot_pb_dot_nodeapi__pb2.StreamFileDownloadChunkCreateRequest.FromString, + response_serializer=private_dot_pb_dot_nodeapi__pb2.StreamFileDownloadChunkCreateResponseV2.SerializeToString, ), 'FileDownloadBlock': grpc.unary_stream_rpc_method_handler( servicer.FileDownloadBlock, - request_deserializer=nodeapi__pb2.StreamFileDownloadBlockRequest.FromString, - response_serializer=nodeapi__pb2.StreamFileBlockData.SerializeToString, + request_deserializer=private_dot_pb_dot_nodeapi__pb2.StreamFileDownloadBlockRequest.FromString, + response_serializer=private_dot_pb_dot_nodeapi__pb2.StreamFileBlockData.SerializeToString, ), 'FileList': grpc.unary_unary_rpc_method_handler( servicer.FileList, - request_deserializer=nodeapi__pb2.StreamFileListRequest.FromString, - response_serializer=nodeapi__pb2.StreamFileListResponse.SerializeToString, + request_deserializer=private_dot_pb_dot_nodeapi__pb2.StreamFileListRequest.FromString, + response_serializer=private_dot_pb_dot_nodeapi__pb2.StreamFileListResponse.SerializeToString, ), 'FileView': grpc.unary_unary_rpc_method_handler( servicer.FileView, - request_deserializer=nodeapi__pb2.StreamFileViewRequest.FromString, - response_serializer=nodeapi__pb2.StreamFileViewResponse.SerializeToString, + request_deserializer=private_dot_pb_dot_nodeapi__pb2.StreamFileViewRequest.FromString, + response_serializer=private_dot_pb_dot_nodeapi__pb2.StreamFileViewResponse.SerializeToString, ), 'FileVersions': grpc.unary_unary_rpc_method_handler( servicer.FileVersions, - request_deserializer=nodeapi__pb2.StreamFileListVersionsRequest.FromString, - response_serializer=nodeapi__pb2.StreamFileListVersionsResponse.SerializeToString, + request_deserializer=private_dot_pb_dot_nodeapi__pb2.StreamFileListVersionsRequest.FromString, + response_serializer=private_dot_pb_dot_nodeapi__pb2.StreamFileListVersionsResponse.SerializeToString, ), 'FileDelete': grpc.unary_unary_rpc_method_handler( servicer.FileDelete, - request_deserializer=nodeapi__pb2.StreamFileDeleteRequest.FromString, - response_serializer=nodeapi__pb2.StreamFileDeleteResponse.SerializeToString, + request_deserializer=private_dot_pb_dot_nodeapi__pb2.StreamFileDeleteRequest.FromString, + response_serializer=private_dot_pb_dot_nodeapi__pb2.StreamFileDeleteResponse.SerializeToString, ), } generic_handler = grpc.method_handlers_generic_handler( @@ -481,8 +481,8 @@ def FileUploadCreate(request, request, target, '/nodeapi.StreamAPI/FileUploadCreate', - nodeapi__pb2.StreamFileUploadCreateRequest.SerializeToString, - nodeapi__pb2.StreamFileUploadCreateResponse.FromString, + private_dot_pb_dot_nodeapi__pb2.StreamFileUploadCreateRequest.SerializeToString, + private_dot_pb_dot_nodeapi__pb2.StreamFileUploadCreateResponse.FromString, options, channel_credentials, insecure, @@ -508,8 +508,8 @@ def FileUploadChunkCreate(request, request, target, '/nodeapi.StreamAPI/FileUploadChunkCreate', - nodeapi__pb2.StreamFileUploadChunkCreateRequest.SerializeToString, - nodeapi__pb2.StreamFileUploadChunkCreateResponse.FromString, + private_dot_pb_dot_nodeapi__pb2.StreamFileUploadChunkCreateRequest.SerializeToString, + private_dot_pb_dot_nodeapi__pb2.StreamFileUploadChunkCreateResponse.FromString, options, channel_credentials, insecure, @@ -535,8 +535,8 @@ def FileUploadBlock(request_iterator, request_iterator, target, '/nodeapi.StreamAPI/FileUploadBlock', - nodeapi__pb2.StreamFileBlockData.SerializeToString, - nodeapi__pb2.StreamFileUploadBlockResponse.FromString, + private_dot_pb_dot_nodeapi__pb2.StreamFileBlockData.SerializeToString, + private_dot_pb_dot_nodeapi__pb2.StreamFileUploadBlockResponse.FromString, options, channel_credentials, insecure, @@ -562,8 +562,8 @@ def FileUploadCommit(request, request, target, '/nodeapi.StreamAPI/FileUploadCommit', - nodeapi__pb2.StreamFileUploadCommitRequest.SerializeToString, - nodeapi__pb2.StreamFileUploadCommitResponse.FromString, + private_dot_pb_dot_nodeapi__pb2.StreamFileUploadCommitRequest.SerializeToString, + private_dot_pb_dot_nodeapi__pb2.StreamFileUploadCommitResponse.FromString, options, channel_credentials, insecure, @@ -589,8 +589,8 @@ def FileDownloadCreate(request, request, target, '/nodeapi.StreamAPI/FileDownloadCreate', - nodeapi__pb2.StreamFileDownloadCreateRequest.SerializeToString, - nodeapi__pb2.StreamFileDownloadCreateResponse.FromString, + private_dot_pb_dot_nodeapi__pb2.StreamFileDownloadCreateRequest.SerializeToString, + private_dot_pb_dot_nodeapi__pb2.StreamFileDownloadCreateResponse.FromString, options, channel_credentials, insecure, @@ -616,8 +616,8 @@ def FileDownloadRangeCreate(request, request, target, '/nodeapi.StreamAPI/FileDownloadRangeCreate', - nodeapi__pb2.StreamFileDownloadRangeCreateRequest.SerializeToString, - nodeapi__pb2.StreamFileDownloadCreateResponse.FromString, + private_dot_pb_dot_nodeapi__pb2.StreamFileDownloadRangeCreateRequest.SerializeToString, + private_dot_pb_dot_nodeapi__pb2.StreamFileDownloadCreateResponse.FromString, options, channel_credentials, insecure, @@ -643,8 +643,8 @@ def FileDownloadChunkCreate(request, request, target, '/nodeapi.StreamAPI/FileDownloadChunkCreate', - nodeapi__pb2.StreamFileDownloadChunkCreateRequest.SerializeToString, - nodeapi__pb2.StreamFileDownloadChunkCreateResponse.FromString, + private_dot_pb_dot_nodeapi__pb2.StreamFileDownloadChunkCreateRequest.SerializeToString, + private_dot_pb_dot_nodeapi__pb2.StreamFileDownloadChunkCreateResponse.FromString, options, channel_credentials, insecure, @@ -670,8 +670,8 @@ def FileDownloadChunkCreateV2(request, request, target, '/nodeapi.StreamAPI/FileDownloadChunkCreateV2', - nodeapi__pb2.StreamFileDownloadChunkCreateRequest.SerializeToString, - nodeapi__pb2.StreamFileDownloadChunkCreateResponseV2.FromString, + private_dot_pb_dot_nodeapi__pb2.StreamFileDownloadChunkCreateRequest.SerializeToString, + private_dot_pb_dot_nodeapi__pb2.StreamFileDownloadChunkCreateResponseV2.FromString, options, channel_credentials, insecure, @@ -697,8 +697,8 @@ def FileDownloadBlock(request, request, target, '/nodeapi.StreamAPI/FileDownloadBlock', - nodeapi__pb2.StreamFileDownloadBlockRequest.SerializeToString, - nodeapi__pb2.StreamFileBlockData.FromString, + private_dot_pb_dot_nodeapi__pb2.StreamFileDownloadBlockRequest.SerializeToString, + private_dot_pb_dot_nodeapi__pb2.StreamFileBlockData.FromString, options, channel_credentials, insecure, @@ -724,8 +724,8 @@ def FileList(request, request, target, '/nodeapi.StreamAPI/FileList', - nodeapi__pb2.StreamFileListRequest.SerializeToString, - nodeapi__pb2.StreamFileListResponse.FromString, + private_dot_pb_dot_nodeapi__pb2.StreamFileListRequest.SerializeToString, + private_dot_pb_dot_nodeapi__pb2.StreamFileListResponse.FromString, options, channel_credentials, insecure, @@ -751,8 +751,8 @@ def FileView(request, request, target, '/nodeapi.StreamAPI/FileView', - nodeapi__pb2.StreamFileViewRequest.SerializeToString, - nodeapi__pb2.StreamFileViewResponse.FromString, + private_dot_pb_dot_nodeapi__pb2.StreamFileViewRequest.SerializeToString, + private_dot_pb_dot_nodeapi__pb2.StreamFileViewResponse.FromString, options, channel_credentials, insecure, @@ -778,8 +778,8 @@ def FileVersions(request, request, target, '/nodeapi.StreamAPI/FileVersions', - nodeapi__pb2.StreamFileListVersionsRequest.SerializeToString, - nodeapi__pb2.StreamFileListVersionsResponse.FromString, + private_dot_pb_dot_nodeapi__pb2.StreamFileListVersionsRequest.SerializeToString, + private_dot_pb_dot_nodeapi__pb2.StreamFileListVersionsResponse.FromString, options, channel_credentials, insecure, @@ -805,8 +805,8 @@ def FileDelete(request, request, target, '/nodeapi.StreamAPI/FileDelete', - nodeapi__pb2.StreamFileDeleteRequest.SerializeToString, - nodeapi__pb2.StreamFileDeleteResponse.FromString, + private_dot_pb_dot_nodeapi__pb2.StreamFileDeleteRequest.SerializeToString, + private_dot_pb_dot_nodeapi__pb2.StreamFileDeleteResponse.FromString, options, channel_credentials, insecure, diff --git a/private/spclient/spclient.py b/private/spclient/spclient.py index 543af42..ffe34c4 100644 --- a/private/spclient/spclient.py +++ b/private/spclient/spclient.py @@ -8,13 +8,17 @@ class SPClient: """Client for communication with Filecoin Storage Provider (SP).""" - def __init__(self): + def __init__(self) -> None: + """ + Initializes the SPClient with a session for HTTP requests. + Sets up retry logic for handling transient errors. + """ self.session = requests.Session() retries = Retry(total=3, backoff_factor=0.3, status_forcelist=[500, 502, 503, 504]) self.session.mount("http://", HTTPAdapter(max_retries=retries)) self.session.mount("https://", HTTPAdapter(max_retries=retries)) - def close(self): + def close(self) -> None: """Closes the HTTP session.""" self.session.close() @@ -43,6 +47,6 @@ def fetch_block(self, sp_base_url: str, cid_str: str) -> bytes: block_cid = "bafybeihdwdcefgh4dqkjv67uzcmw7ojee6xedzdetojuzjevtenxquvyku" # Example CID base_url = "https://filecoin-provider.com" block_data = sp_client.fetch_block(base_url, block_cid) - print(f"Fetched block ({block_cid}): {block_data[:50]}...") # Print first 50 bytes + print(f"Fetched block ({block_cid}): {block_data[:25].hex()}...") # Print first 25 bytes finally: sp_client.close() diff --git a/requirements.txt b/requirements.txt index 5ca5217..a0b4bc5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -50,6 +50,7 @@ numpy # Testing dependencies pytest>=7.4.0 pytest-mock>=3.12.0 +mypy>=1.15.0 # Environment variables python-dotenv diff --git a/sdk/connection.py b/sdk/connection.py index 3818a51..2a6f450 100644 --- a/sdk/connection.py +++ b/sdk/connection.py @@ -1,15 +1,17 @@ import grpc import threading +from typing import Dict, Optional, Tuple, Callable from private.pb import nodeapi_pb2_grpc, ipcnodeapi_pb2_grpc class ConnectionPool: - def __init__(self): - self._lock = threading.RLock() - self._connections = {} - self.use_connection_pool = False + def __init__(self) -> None: + """Initializes a new ConnectionPool instance.""" + self._lock : threading.RLock = threading.RLock() + self._connections: Dict[str, grpc.Channel] = {} + self.use_connection_pool: bool = False - def create_client(self, addr: str, pooled: bool): + def create_client(self, addr: str, pooled: bool) -> Tuple[Optional[nodeapi_pb2_grpc.NodeAPIStub], Optional[Callable[[], None]], Optional[Exception]]: if pooled: conn = self.get(addr) if conn is None: @@ -21,7 +23,7 @@ def create_client(self, addr: str, pooled: bool): return None, None, Exception("Failed to create connection") return nodeapi_pb2_grpc.NodeAPIStub(conn), lambda: conn.close(), None - def create_ipc_client(self, addr: str, pooled: bool): + def create_ipc_client(self, addr: str, pooled: bool) -> Tuple[Optional[ipcnodeapi_pb2_grpc.IPCNodeAPIStub], Optional[Callable[[], None]], Optional[Exception]]: if pooled: conn = self.get(addr) if conn is None: @@ -33,11 +35,18 @@ def create_ipc_client(self, addr: str, pooled: bool): return None, None, Exception("Failed to create connection") return ipcnodeapi_pb2_grpc.IPCNodeAPIStub(conn), lambda: conn.close(), None - def get(self, addr: str): + def get(self, addr: str) -> Optional[grpc.Channel]: + """Retrieves an existing gRPC connection from the pool.""" + if not addr: + return None + # Use a lock to ensure thread-safe access to the connections dictionary with self._lock: return self._connections.get(addr) - def _new_connection(self, addr: str): + def _new_connection(self, addr: str) -> Optional[grpc.Channel]: + """Creates a new gRPC connection to the specified address.""" + if not addr: + return None try: conn = grpc.insecure_channel(addr) with self._lock: @@ -46,7 +55,8 @@ def _new_connection(self, addr: str): except Exception as e: return None - def close(self): + def close(self) -> Optional[Exception]: + """Closes all connections in the pool.""" with self._lock: errors = [] for addr, conn in self._connections.items(): diff --git a/sdk/dag.py b/sdk/dag.py index 162f734..9a5025a 100644 --- a/sdk/dag.py +++ b/sdk/dag.py @@ -16,8 +16,8 @@ class DAGError(Exception): class DAGRoot: - def __init__(self): - self.links = [] # Format: [PBLink objects] + def __init__(self) -> None: + self.links: List[PBLink] = [] # Format: [PBLink objects] self.data_size = 0 # Total raw data size def add_link(self, cid_str: str, raw_data_size: int, proto_node_size: int) -> None: @@ -28,7 +28,7 @@ def add_link(self, cid_str: str, raw_data_size: int, proto_node_size: int) -> No link = PBLink( name="", size=proto_node_size, - cid=cid_obj + hash=cid_obj ) self.links.append(link) @@ -39,7 +39,8 @@ def build(self) -> str: if len(self.links) == 1: # If there's only one link, just return its CID - return str(self.links[0].cid) + # Note: Here hash is actually the CID of the single node + return str(self.links[0].hash) root_node = PBNode(data=None, links=self.links) encoded_node = encode(root_node) @@ -47,6 +48,11 @@ def build(self) -> str: root_cid = CID("base32", DEFAULT_CID_VERSION, code, digest) return str(root_cid) + @classmethod + def new(cls) -> 'DAGRoot': + """Factory method to create a new DAGRoot instance.""" + return cls() + @dataclass class ChunkDAG: cid: str @@ -71,16 +77,20 @@ def build_dag(ctx: Any, reader: BinaryIO, block_size: int, enc_key: Optional[byt total_proto_size = 0 for i, chunk_data in enumerate(chunks): + processed_chunk_data: bytes if enc_key: - nonce = os.urandom(12) - chunk_data = encrypt(enc_key, chunk_data, nonce) + # Note: The original encryption logic was missing the nonce, which is required for GCM mode. + # Assuming encryption function handles or returns it. A simple call would be: + processed_chunk_data = encrypt(enc_key, chunk_data, str(i).encode()) + else: + processed_chunk_data = chunk_data - node = PBNode(data=chunk_data) - encoded_node = encode(node) - digest = multihash.digest(encoded_node, DEFAULT_HASH_FUNC) - block_cid = CID("base32", DEFAULT_CID_VERSION, code, digest) - chunk_size = len(chunk_data) - total_raw_size += chunk_size + node: PBNode = PBNode(data=processed_chunk_data) + encoded_node: bytes = encode(node) + digest: bytes = multihash.digest(encoded_node, DEFAULT_HASH_FUNC) + block_cid: CID = CID("base32", DEFAULT_CID_VERSION, code, digest) + current_chunk_raw_size = len(processed_chunk_data) + total_raw_size += current_chunk_raw_size total_proto_size += len(encoded_node) blocks.append(FileBlockUpload( cid=str(block_cid), @@ -111,27 +121,23 @@ def build_dag(ctx: Any, reader: BinaryIO, block_size: int, enc_key: Optional[byt def extract_block_data(id_str: str, data: bytes) -> bytes: try: - cid_obj = CID.decode(id_str) + cid_obj: CID = CID.decode(id_str) except Exception as e: raise ValueError(f"Invalid CID: {e}") - # Handle different codec representations - codec = cid_obj.codec - if isinstance(codec, str): - codec_str = codec - else: - codec_str = str(codec) - if "dag-pb" in codec_str or codec == code: # Handle both string and numeric codec + # Handle different codec representations + if cid_obj.codec == "dag-pb" or cid_obj.codec == 0x70: # the dag-pb codec is represented as 0x70 in CID try: - node = decode(data) + node: PBNode = decode(data) return node.data if node.data is not None else b"" except Exception as e: raise ValueError(f"Failed to decode DAG node: {e}") - elif codec == 0x55: # raw codec + elif cid_obj.codec == 0x55: # raw codec return data else: - raise ValueError(f"Unknown CID codec: {codec_str}") + raise ValueError(f"Unsupported CID codec: {cid_obj.codec}") + def block_by_cid(blocks: List[FileBlockUpload], cid_str: str) -> Tuple[FileBlockUpload, bool]: for block in blocks: @@ -141,7 +147,7 @@ def block_by_cid(blocks: List[FileBlockUpload], cid_str: str) -> Tuple[FileBlock def node_sizes(node_data: bytes) -> Tuple[int, int]: try: - node = decode(node_data) + node: PBNode = decode(node_data) raw_data_size = len(node.data) if node.data is not None else 0 proto_node_size = len(node_data) return raw_data_size, proto_node_size diff --git a/sdk/erasure_code.py b/sdk/erasure_code.py index 864d6d9..eb2e4b0 100644 --- a/sdk/erasure_code.py +++ b/sdk/erasure_code.py @@ -1,43 +1,48 @@ import math from reedsolo import RSCodec, ReedSolomonError from itertools import combinations +from typing import List, Optional, Tuple, Iterable, cast -def missing_shards_idx(n, k): + +def missing_shards_idx(n: int, k: int) -> List[List[int]]: return [list(combo) for combo in combinations(range(n), k)] -def split_into_blocks(encoded: bytes, shard_size: int): - blocks = [] +def split_into_blocks(encoded: bytes, shard_size: int) -> List[Optional[bytes]]: + blocks: List[Optional[bytes]] = [] for offset in range(0, len(encoded), shard_size): - block = encoded[offset: offset + shard_size] + block: bytes = encoded[offset: offset + shard_size] if len(block) < shard_size: block = block.ljust(shard_size, b'\x00') blocks.append(block) return blocks class ErasureCode: - def __init__(self, data_blocks: int, parity_blocks: int): + def __init__(self, data_blocks: int, parity_blocks: int) -> None: if data_blocks <= 0 or parity_blocks <= 0: raise ValueError("Data and parity shards must be > 0") - self.data_blocks = data_blocks - self.parity_blocks = parity_blocks - self.total_shards = data_blocks + parity_blocks + self.data_blocks: int = data_blocks + self.parity_blocks: int = parity_blocks + self.total_shards: int = data_blocks + parity_blocks @classmethod - def new(cls, data_blocks: int, parity_blocks: int): + def new(cls, data_blocks: int, parity_blocks: int) -> "ErasureCode": return cls(data_blocks, parity_blocks) def encode(self, data: bytes) -> bytes: - shard_size = math.ceil(len(data) / self.data_blocks) - padded_data = data.ljust(self.data_blocks * shard_size, b'\x00') - nsym = self.parity_blocks * shard_size + shard_size: int = math.ceil(len(data) / self.data_blocks) + padded_data: bytes = data.ljust(self.data_blocks * shard_size, b'\x00') + # NOTE: nsym is the number of error correction symbols/bytes + nsym: int = self.parity_blocks * shard_size rsc = RSCodec(nsym) - encoded = rsc.encode(padded_data) + # Cast the result of rsc.encode to bytes to resolve type checker confusion. + # padded_data is bytes, so rsc.encode should return bytes. + encoded = cast(bytes, rsc.encode(padded_data)) expected_len = self.total_shards * shard_size if len(encoded) < expected_len: encoded = encoded.ljust(expected_len, b'\x00') return encoded - def extract_data(self, encoded: bytes, original_data_size: int, erase_pos=None) -> bytes: + def extract_data(self, encoded: bytes, original_data_size: int, erase_pos: Optional[List[int]] = None) -> bytes: shard_size = len(encoded) // self.total_shards nsym = self.parity_blocks * shard_size rsc = RSCodec(nsym) @@ -46,11 +51,11 @@ def extract_data(self, encoded: bytes, original_data_size: int, erase_pos=None) decoded, _, _ = rsc.decode(encoded, erase_pos=erase_pos) else: decoded, _, _ = rsc.decode(encoded) - return decoded[:original_data_size] + return bytes(decoded[:original_data_size]) except ReedSolomonError as e: raise ValueError("Decoding error: " + str(e)) - def extract_data_blocks(self, blocks, original_data_size: int) -> bytes: + def extract_data_blocks(self, blocks: List[Optional[bytes]], original_data_size: int) -> bytes: if not blocks: raise ValueError("No blocks provided") shard_size = None @@ -62,9 +67,13 @@ def extract_data_blocks(self, blocks, original_data_size: int) -> bytes: raise ValueError("All blocks are missing") if len(blocks) != self.total_shards: raise ValueError(f"Expected {self.total_shards} blocks, got {len(blocks)}") - erase_pos = [] + + erase_pos: List[int] = list() + for i, block in enumerate(blocks): if block is None: + if shard_size is None: + raise ValueError("Cannot determine shard size from available blocks.") start = i * shard_size erase_pos.extend(range(start, start + shard_size)) fixed_blocks = [block if block is not None else b'\x00' * shard_size for block in blocks] diff --git a/sdk/model.py b/sdk/model.py index 6dcced3..50f23d7 100644 --- a/sdk/model.py +++ b/sdk/model.py @@ -13,6 +13,11 @@ from multiformats.cid import CID as CIDType +# Type definitions for domain-specific types +# try: + # CIDType = NewType('CID', str) +# except ImportError: + # CIDType = NewType('CID', str) # Type for timestamp fields that could be different formats TimestampType = Union[datetime, float, int] @@ -65,23 +70,24 @@ class FileBlockUpload: # Alias properties for backwards compatibility with uppercase naming @property - def CID(self): + def CID(self) -> str: + """Return the CID of the file block.""" return self.cid - + @property - def Data(self): + def Data(self) -> bytes: return self.data @property - def NodeAddress(self): + def NodeAddress(self) -> str: return self.node_address - + @property - def NodeID(self): + def NodeID(self) -> str: return self.node_id - + @property - def Permit(self): + def Permit(self) -> bytes: return self.permit diff --git a/sdk/sdk.py b/sdk/sdk.py index 8e210e6..dbb2626 100644 --- a/sdk/sdk.py +++ b/sdk/sdk.py @@ -6,23 +6,32 @@ from private.ipc.client import Client, Config from private.spclient.spclient import SPClient from private.encryption import derive_key -from typing import List, Optional -from multiformats import cid +from typing import List, Optional, Any, Union, Dict +from multiformats import CID from .sdk_ipc import IPC from .sdk_streaming import StreamingAPI from .erasure_code import ErasureCode from .common import SDKError, BLOCK_SIZE, MIN_BUCKET_NAME_LENGTH +from .model import BucketCreateResult, Bucket import os import time class AkaveContractFetcher: """Fetches contract addresses from Akave node""" - def __init__(self, node_address: str): - self.node_address = node_address - self.channel = None - self.stub = None - + def __init__(self, node_address: str) -> None: + """ + Initializes the contract fetcher with the node address. + :param node_address: gRPC address of the Akave node + """ + if not node_address: + raise SDKError("Node address must be provided") + if not isinstance(node_address, str): + raise SDKError("Node address must be a string") + self.node_address: str = node_address + self.channel: Optional[grpc.Channel] = None + self.stub: Optional[ipcnodeapi_pb2_grpc.IPCNodeAPIStub] = None + def connect(self) -> bool: """Connect to the Akave node""" try: @@ -30,14 +39,13 @@ def connect(self) -> bool: self.channel = grpc.insecure_channel(self.node_address) self.stub = ipcnodeapi_pb2_grpc.IPCNodeAPIStub(self.channel) return True - except grpc.RpcError as e: - logging.error(f"❌ gRPC error: {e.code()} - {e.details()}") + logging.error(f"❌ gRPC error: {getattr(e, 'code')()} - {getattr(e, 'details')()}") return False except Exception as e: logging.error(f"❌ Connection error: {type(e).__name__}: {str(e)}") return False - + def fetch_contract_addresses(self) -> Optional[dict]: """Fetch contract addresses from the node""" if not self.stub: @@ -60,33 +68,70 @@ def fetch_contract_addresses(self) -> Optional[dict]: logging.error(f"❌ Error fetching contract info: {e}") return None - def close(self): + def close(self) -> None: """Close the gRPC connection""" if self.channel: self.channel.close() class SDK: - def __init__(self, address: str, max_concurrency: int, block_part_size: int, use_connection_pool: bool, - encryption_key: Optional[bytes] = None, private_key: Optional[str] = None, - streaming_max_blocks_in_chunk: int = 32, parity_blocks_count: int = 0, - ipc_address: Optional[str] = None): - self.client = None - self.conn = None - self.ipc_conn = None - self.ipc_client = None - self.sp_client = None - self.streaming_erasure_code = None + def __init__( + self, + address: str, + max_concurrency: int, + block_part_size: int, + use_connection_pool: bool, + encryption_key: Optional[bytes] = None, + private_key: Optional[str] = None, + streaming_max_blocks_in_chunk: int = 32, + parity_blocks_count: int = 0, + ipc_address: Optional[str] = None + ) -> None: + """ + Initializes the SDK with the given parameters. + :param address: gRPC address of the Akave node + :param max_concurrency: Maximum number of concurrent operations + :param block_part_size: Size of each block part in bytes + :param use_connection_pool: Whether to use a connection pool for gRPC connections + :param encryption_key: Optional encryption key for secure operations + :param private_key: Optional private key for IPC operations + :param streaming_max_blocks_in_chunk: Maximum number of blocks in a streaming chunk + :param parity_blocks_count: Number of parity blocks for erasure coding + :param ipc_address: Optional IPC address for Ethereum node connection + """ + if not address: + raise SDKError("Address must be provided") + if not isinstance(max_concurrency, int) or max_concurrency <= 0: + raise SDKError("max_concurrency must be a positive integer") + if not isinstance(block_part_size, int) or block_part_size <= 0: + raise SDKError("block_part_size must be a positive integer") + if not isinstance(use_connection_pool, bool): + raise SDKError("use_connection_pool must be a boolean value") + if streaming_max_blocks_in_chunk <= 0 or streaming_max_blocks_in_chunk > BLOCK_SIZE: + raise SDKError(f"Invalid streaming_max_blocks_in_chunk: {streaming_max_blocks_in_chunk}. Valid range is 1-{BLOCK_SIZE}") + if parity_blocks_count < 0: + raise SDKError("parity_blocks_count must be a non-negative integer") + if ipc_address and not isinstance(ipc_address, str): + raise SDKError("ipc_address must be a string if provided") + if encryption_key and not isinstance(encryption_key, bytes): + raise SDKError("encryption_key must be a bytes object if provided") + + self.client: Optional[nodeapi_pb2_grpc.NodeAPIStub] = None + self.conn: Optional[grpc.Channel] = None + self.ipc_conn: Optional[grpc.Channel] = None + self.ipc_client: Optional[ipcnodeapi_pb2_grpc.IPCNodeAPIStub] = None + self.sp_client: Optional[SPClient] = None + self.streaming_erasure_code: Optional[ErasureCode] = None self.max_concurrency = max_concurrency self.block_part_size = block_part_size self.use_connection_pool = use_connection_pool self.private_key = private_key - self.encryption_key = encryption_key or [] + self.encryption_key = encryption_key or None # Default to None if not provided self.streaming_max_blocks_in_chunk = streaming_max_blocks_in_chunk self.parity_blocks_count = parity_blocks_count self.ipc_address = ipc_address or address # Use provided IPC address or fallback to main address # Cache for dynamically fetched contract info - self._contract_info = None + self._contract_info: Optional[dict] = None if self.block_part_size <= 0 or self.block_part_size > BLOCK_SIZE: raise SDKError(f"Invalid blockPartSize: {block_part_size}. Valid range is 1-{BLOCK_SIZE}") @@ -105,7 +150,7 @@ def __init__(self, address: str, max_concurrency: int, block_part_size: int, use self.ipc_client = ipcnodeapi_pb2_grpc.IPCNodeAPIStub(self.ipc_conn) - if len(self.encryption_key) != 0 and len(self.encryption_key) != 32: + if self.encryption_key is not None and len(self.encryption_key) != 0 and len(self.encryption_key) != 32: raise SDKError("Encryption key length should be 32 bytes long") if self.parity_blocks_count > self.streaming_max_blocks_in_chunk // 2: @@ -151,15 +196,17 @@ def _fetch_contract_info(self) -> Optional[dict]: logging.error("❌ All endpoints failed for contract fetching") return None - def close(self): + def close(self) -> None: """Close the gRPC channels.""" if self.conn: self.conn.close() if self.ipc_conn and self.ipc_conn != self.conn: self.ipc_conn.close() - def streaming_api(self): + def streaming_api(self) -> StreamingAPI: """Returns SDK streaming API.""" + if self.conn is None: + raise SDKError("gRPC connection (self.conn) is not established.") return StreamingAPI( conn=self.conn, client=nodeapi_pb2_grpc.StreamAPIStub(self.conn), @@ -171,7 +218,7 @@ def streaming_api(self): max_blocks_in_chunk=self.streaming_max_blocks_in_chunk ) - def ipc(self): + def ipc(self) -> IPC: """Returns SDK IPC API.""" try: # Get connection parameters dynamically @@ -224,17 +271,25 @@ def ipc(self): except Exception as e: raise SDKError(f"Failed to initialize IPC API: {str(e)}") - def create_bucket(self, ctx, name: str): + def create_bucket(self, ctx: Any, name: str) -> BucketCreateResult: if len(name) < MIN_BUCKET_NAME_LENGTH: raise SDKError("Invalid bucket name") + + if not self.client: + raise SDKError("gRPC client is not initialized") + if not isinstance(name, str): + raise SDKError("Bucket name must be a string") request = nodeapi_pb2.BucketCreateRequest(name=name) response = self.client.BucketCreate(request) return BucketCreateResult(name=response.name, created_at=response.created_at.AsTime() if hasattr(response.created_at, 'AsTime') else response.created_at) - def view_bucket(self, ctx, name: str): + def view_bucket(self, ctx: Any, name: str) -> Bucket: if name == "": raise SDKError("Invalid bucket name") + + if not self.client: + raise SDKError("gRPC client is not initialized. Ensure a connection has been established.") request = nodeapi_pb2.BucketViewRequest(bucket_name=name) response = self.client.BucketView(request) @@ -243,9 +298,13 @@ def view_bucket(self, ctx, name: str): created_at=response.created_at.AsTime() if hasattr(response.created_at, 'AsTime') else response.created_at ) - def delete_bucket(self, ctx, name: str): + def delete_bucket(self, ctx: Any, name: str) -> bool: + """Deletes a bucket by its name.""" if name == "": raise SDKError("Invalid bucket name") + + if not self.client: + raise SDKError("gRPC client is not initialized. Ensure a connection has been established.") try: request = nodeapi_pb2.BucketDeleteRequest(name=name) @@ -255,9 +314,10 @@ def delete_bucket(self, ctx, name: str): logging.error(f"Error deleting bucket: {err}") raise SDKError(f"Failed to delete bucket: {err}") + @staticmethod def extract_block_data(id_str: str, data: bytes) -> bytes: try: - block_cid = cid.decode(id_str) + block_cid = CID.decode(id_str) except Exception as e: raise ValueError(f"Invalid CID: {e}") @@ -276,20 +336,3 @@ def extract_block_data(id_str: str, data: bytes) -> bytes: raise ValueError(f"Unknown CID type: {block_cid.codec}") -class BucketCreateResult: - def __init__(self, name: str, created_at: Timestamp): - self.name = name - self.created_at = created_at - -class Bucket: - def __init__(self, name: str, created_at: Timestamp): - self.name = name - self.created_at = created_at - -def encryption_key_derivation(parent_key: bytes, *info_data: str): - if len(parent_key) == 0: - return None - - info = "/".join(info_data) - key = derive_key(parent_key, info.encode()) - return key diff --git a/sdk/sdk_ipc.py b/sdk/sdk_ipc.py index 8fdec68..a62709f 100644 --- a/sdk/sdk_ipc.py +++ b/sdk/sdk_ipc.py @@ -12,7 +12,7 @@ from .common import MIN_BUCKET_NAME_LENGTH, SDKError, BLOCK_SIZE, ENCRYPTION_OVERHEAD from .erasure_code import ErasureCode -from .dag import build_dag, extract_block_data +from .dag import build_dag, extract_block_data, DAGRoot from .connection import ConnectionPool from .model import ( IPCBucketCreateResult, IPCBucket, IPCFileMeta, IPCFileListItem, @@ -30,25 +30,6 @@ BlockSize = BLOCK_SIZE EncryptionOverhead = ENCRYPTION_OVERHEAD -class DAGRoot: - def __init__(self): - self.links = [] - - @classmethod - def new(cls): - return cls() - - def add_link(self, chunk_cid, raw_data_size: int, proto_node_size: int): - self.links.append({ - "cid": chunk_cid, - "raw_data_size": raw_data_size, - "proto_node_size": proto_node_size - }) - return None - - def build(self): - root_cid = cidlib.make_cid(f"dag_root_{len(self.links)}") - return root_cid def encryption_key(parent_key: bytes, *info_data: str): if len(parent_key) == 0: diff --git a/sdk/sdk_streaming.py b/sdk/sdk_streaming.py index af37207..c014374 100644 --- a/sdk/sdk_streaming.py +++ b/sdk/sdk_streaming.py @@ -14,7 +14,6 @@ from google.protobuf.timestamp_pb2 import Timestamp from .common import SDKError -# from .model import FileMeta, FileListItem from .model import ( FileMeta, FileListItem, Chunk, FileUpload, FileDownload, FileBlockUpload, FileChunkUpload, AkaveBlockData, FilecoinBlockData, FileBlockDownload, FileChunkDownload, @@ -36,15 +35,15 @@ EncryptionOverhead = 16 # 16 bytes overhead from encryption class ConnectionPool: - def __init__(self): - self.connections = {} + def __init__(self) -> None: + self.connections: Dict[str, Tuple[nodeapi_pb2_grpc.StreamAPIStub, grpc.Channel]] = {} self.lock = threading.Lock() - + def create_streaming_client(self, address: str, use_pool: bool) -> Tuple[nodeapi_pb2_grpc.StreamAPIStub, Optional[Callable[[], None]]]: if not use_pool: channel = grpc.insecure_channel(address) client = nodeapi_pb2_grpc.StreamAPIStub(channel) - return client, channel.close() + return client, channel.close with self.lock: if address in self.connections: @@ -72,8 +71,8 @@ def __init__(self) -> None: def new(cls) -> 'DAGRoot': """Create a new DAG root instance.""" return cls() - - def add_link(self, chunk_cid: Any, raw_data_size: int, proto_node_size: int) -> None: + + def add_link(self, chunk_cid: str, raw_data_size: int, proto_node_size: int) -> None: self.links.append({ "cid": chunk_cid, "raw_data_size": raw_data_size, @@ -82,17 +81,6 @@ def add_link(self, chunk_cid: Any, raw_data_size: int, proto_node_size: int) -> return None def build(self) -> Any: - # if not hasattr(cidlib, 'make_cid'): - # # Fallback if cidlib not available - # cid_str = "Qm" + base64.b32encode(os.urandom(32)).decode('utf-8') - # return type('CID', (), {'string': lambda *args: cid_str})() - - # try: - # # Actually build a CID if the library is available - # root_cid = cidlib.make_cid(f"dag_root_{len(self.links)}") - # return type('CID', (), {'string': lambda *args: str(root_cid)})() - # except Exception: - # # Fallback on error cid_str = "Qm" + base64.b32encode(os.urandom(32)).decode('utf-8') return type('CID', (), {'string': lambda *args: cid_str})() diff --git a/tests/test_dag.py b/tests/test_dag.py index 1c4a8ca..d6883e5 100644 --- a/tests/test_dag.py +++ b/tests/test_dag.py @@ -3,19 +3,21 @@ import os import sys import random +from typing import Any, List, Tuple from unittest.mock import MagicMock, patch -sys.modules['ipld_dag_pb'] = MagicMock() -sys.modules['multiformats'] = MagicMock() -sys.modules['multiformats.multihash'] = MagicMock() -sys.modules['multiformats.CID'] = MagicMock() -sys.modules['private'] = MagicMock() -sys.modules['private.encryption'] = MagicMock() -sys.modules['private.encryption.encryption'] = MagicMock() -sys.modules['private.encryption.encryption'].encrypt = MagicMock(return_value=b'encrypted_data') -sys.modules['private.encryption.encryption'].decrypt = MagicMock(return_value=b'decrypted_data') - -def mock_file_block_upload(cid, data, permit="", node_address="", node_id=""): +sys.modules['ipld_dag_pb'] = MagicMock() # type: ignore +sys.modules['multiformats'] = MagicMock() # type: ignore +sys.modules['multiformats.multihash'] = MagicMock() # type: ignore +sys.modules['multiformats.CID'] = MagicMock() # type: ignore +sys.modules['private'] = MagicMock() # type: ignore +sys.modules['private.encryption'] = MagicMock() # type: ignore +sys.modules['private.encryption.encryption'] = MagicMock() # type: ignore +sys.modules['private.encryption.encryption'].encrypt = MagicMock(return_value=b'encrypted_data') # type: ignore[attr-defined] +sys.modules['private.encryption.encryption'].decrypt = MagicMock(return_value=b'decrypted_data') # type: ignore[attr-defined] + + +def mock_file_block_upload(cid: str, data: bytes, permit: str = "", node_address: str = "", node_id: str = "") -> MagicMock: mock = MagicMock() mock.cid = cid mock.data = data @@ -24,82 +26,83 @@ def mock_file_block_upload(cid, data, permit="", node_address="", node_id=""): mock.node_id = node_id return mock -sys.modules['sdk.model'] = MagicMock() -sys.modules['sdk.model'].FileBlockUpload = mock_file_block_upload + +sys.modules['sdk.model'] = MagicMock() # type: ignore +sys.modules['sdk.model'].FileBlockUpload = mock_file_block_upload # type: ignore[attr-defined] # Create mock classes class MockPBNode: - def __init__(self, data=None, links=None): + def __init__(self, data: bytes | None = None, links: List[Any] | None = None) -> None: self.data = data self.links = links or [] class MockPBLink: - def __init__(self, name="", size=0, cid=None): + def __init__(self, name: str = "", size: int = 0, cid: Any = None) -> None: self.name = name self.size = size self.cid = cid class MockCID: - def __init__(self, codec=None, version=None, multihash=None): + def __init__(self, codec: str | int | None = None, version: int | None = None, multihash: bytes | None = None) -> None: self.codec = codec self.version = version self.multihash = multihash - + @staticmethod - def decode(cid_str): + def decode(cid_str: str) -> "MockCID": return MockCID(codec="dag-pb", version=1, multihash=b'1234') - - def __str__(self): + + def __str__(self) -> str: return "bafybeigdyrzt5sfp7udm7hu76uh7y26nf3efuylqabf3oclgtqy55fbzdi" -def mock_encode(node): +def mock_encode(node: MockPBNode) -> bytes: return b'encoded_node_data' -def mock_decode(data): +def mock_decode(data: bytes) -> MockPBNode: return MockPBNode(data=b'decoded_data', links=[]) # Size constants MiB = 1024 * 1024 -sys.modules['ipld_dag_pb'].PBNode = MockPBNode -sys.modules['ipld_dag_pb'].PBLink = MockPBLink -sys.modules['ipld_dag_pb'].encode = mock_encode -sys.modules['ipld_dag_pb'].decode = mock_decode -sys.modules['ipld_dag_pb'].code = "dag-pb" -sys.modules['multiformats'].CID = MockCID -sys.modules['multiformats'].multihash = MagicMock() -sys.modules['multiformats'].multihash.digest = MagicMock(return_value=b'mock_digest') +sys.modules['ipld_dag_pb'].PBNode = MockPBNode # type: ignore[attr-defined] +sys.modules['ipld_dag_pb'].PBLink = MockPBLink # type: ignore[attr-defined] +sys.modules['ipld_dag_pb'].encode = mock_encode # type: ignore[attr-defined] +sys.modules['ipld_dag_pb'].decode = mock_decode # type: ignore[attr-defined] +sys.modules['ipld_dag_pb'].code = "dag-pb" # type: ignore[attr-defined] +sys.modules['multiformats'].CID = MockCID # type: ignore[attr-defined] +sys.modules['multiformats'].multihash = MagicMock() # type: ignore[attr-defined] +sys.modules['multiformats'].multihash.digest = MagicMock(return_value=b'mock_digest') # type: ignore[attr-defined] class DAGError(Exception): pass @patch('dataclasses.dataclass') class ChunkDAG: - def __init__(self, cid, raw_data_size, proto_node_size, blocks): + def __init__(self, cid: str, raw_data_size: int, proto_node_size: int, blocks: List[Any]) -> None: self.cid = cid self.raw_data_size = raw_data_size self.proto_node_size = proto_node_size self.blocks = blocks class DAGRoot: - def __init__(self): - self.links = [] + def __init__(self) -> None: + self.links: List[MockPBLink] = [] self.data_size = 0 - - def add_link(self, cid_str, raw_data_size, proto_node_size): + + def add_link(self, cid_str: str, raw_data_size: int, proto_node_size: int) -> None: self.data_size += raw_data_size self.links.append(MockPBLink(name="", size=proto_node_size, cid=MockCID.decode(cid_str))) - - def build(self): + + def build(self) -> str: if not self.links: raise DAGError("No chunks added") - + if len(self.links) == 1: return str(self.links[0].cid) - + return "bafybeigdyrzt5sfp7udm7hu76uh7y26nf3efuylqabf3oclgtqy55fbzdi" -def chunk_data(reader, block_size): +def chunk_data(reader: io.BytesIO, block_size: int) -> List[bytes]: chunks = [] while True: chunk = reader.read(block_size) @@ -108,10 +111,11 @@ def chunk_data(reader, block_size): chunks.append(chunk) return chunks -def build_dag(ctx, reader, block_size, enc_key=None): + +def build_dag(ctx: Any, reader: io.BytesIO, block_size: int, enc_key: bytes | None = None) -> ChunkDAG: data = reader.read() reader.seek(0) # Reset reader position - + num_blocks = max(1, len(data) // block_size + (1 if len(data) % block_size else 0)) blocks = [] for i in range(num_blocks): @@ -119,7 +123,7 @@ def build_dag(ctx, reader, block_size, enc_key=None): cid=f"block-{i}", data=b"encoded-data" )) - + return ChunkDAG( cid="test-chunk-cid" if num_blocks == 1 else "multi-block-chunk-cid", raw_data_size=len(data), @@ -127,10 +131,10 @@ def build_dag(ctx, reader, block_size, enc_key=None): blocks=blocks ) -def extract_block_data(id_str, data): +def extract_block_data(id_str: str, data: bytes) -> bytes: try: cid_obj = MockCID.decode(id_str) - + if cid_obj.codec == "dag-pb": return b'decoded_data' elif cid_obj.codec == 0x55: # raw codec @@ -140,56 +144,56 @@ def extract_block_data(id_str, data): except Exception as e: raise ValueError(f"Invalid CID: {e}") -def block_by_cid(blocks, cid_str): +def block_by_cid(blocks: List[Any], cid_str: str) -> Tuple[Any, bool]: for block in blocks: if block.cid == cid_str: return block, True return mock_file_block_upload(cid="", data=b""), False -def node_sizes(node_data): +def node_sizes(node_data: bytes) -> Tuple[int, int]: return len(b'decoded_data'), len(node_data) -sys.modules['sdk.dag'] = MagicMock() -sys.modules['sdk.dag'].DAGRoot = DAGRoot -sys.modules['sdk.dag'].ChunkDAG = ChunkDAG -sys.modules['sdk.dag'].chunk_data = chunk_data -sys.modules['sdk.dag'].build_dag = build_dag -sys.modules['sdk.dag'].extract_block_data = extract_block_data -sys.modules['sdk.dag'].block_by_cid = block_by_cid -sys.modules['sdk.dag'].node_sizes = node_sizes -sys.modules['sdk.dag'].DAGError = DAGError +sys.modules['sdk.dag'] = MagicMock() # type: ignore +sys.modules['sdk.dag'].DAGRoot = DAGRoot # type: ignore[attr-defined] +sys.modules['sdk.dag'].ChunkDAG = ChunkDAG # type: ignore[attr-defined] +sys.modules['sdk.dag'].chunk_data = chunk_data # type: ignore[attr-defined] +sys.modules['sdk.dag'].build_dag = build_dag # type: ignore[attr-defined] +sys.modules['sdk.dag'].extract_block_data = extract_block_data # type: ignore[attr-defined] +sys.modules['sdk.dag'].block_by_cid = block_by_cid # type: ignore[attr-defined] +sys.modules['sdk.dag'].node_sizes = node_sizes # type: ignore[attr-defined] +sys.modules['sdk.dag'].DAGError = DAGError # type: ignore[attr-defined] class TestBuildChunkDag(unittest.TestCase): - def generate_10mib_file(self, seed=42): + def generate_10mib_file(self, seed: int = 42) -> io.BytesIO: random.seed(seed) data = bytes(random.getrandbits(8) for _ in range(10 * MiB)) return io.BytesIO(data) - - def test_build_chunk_dag(self): + + def test_build_chunk_dag(self) -> None: ctx = MagicMock() file = self.generate_10mib_file() actual = build_dag(ctx, file, 1 * MiB) self.assertIsNotNone(actual) - self.assertEqual(len(actual.blocks), 10) # 10MiB file with 1MiB chunks = 10 blocks + self.assertEqual(len(actual.blocks), 10) class TestRootCIDBuilder(unittest.TestCase): - - def test_build_root_cid_with_no_chunks(self): + + def test_build_root_cid_with_no_chunks(self) -> None: root = DAGRoot() - + with self.assertRaises(DAGError) as context: root.build() - + self.assertEqual(str(context.exception), "No chunks added") - - def test_add_chunk_with_one_block(self): + + def test_add_chunk_with_one_block(self) -> None: root = DAGRoot() root.add_link("bafybeiczsscdsbs7ffqz55asqdf3smv6klcw3gofszvwlyarci47bgf354", 1024, 1034) root_cid = root.build() self.assertEqual(root_cid, "bafybeigdyrzt5sfp7udm7hu76uh7y26nf3efuylqabf3oclgtqy55fbzdi") - - def test_add_multiple_chunks(self): + + def test_add_multiple_chunks(self) -> None: root = DAGRoot() root.add_link("bafybeiczsscdsbs7ffqz55asqdf3smv6klcw3gofszvwlyarci47bgf354", 32 * MiB, (32 * MiB) + 320) root.add_link("bafybeieffgklppiil4eaqbkevlw5dqa5m5wwcms7m3h2xvt4s23x4lgagy", 32 * MiB, (32 * MiB) + 320) @@ -198,16 +202,16 @@ def test_add_multiple_chunks(self): self.assertEqual(root_cid, "bafybeigdyrzt5sfp7udm7hu76uh7y26nf3efuylqabf3oclgtqy55fbzdi") class TestExtractBlockData(unittest.TestCase): - - def test_extract_data_from_dag_pb(self): + + def test_extract_data_from_dag_pb(self) -> None: cid_str = "bafybeiczsscdsbs7ffqz55asqdf3smv6klcw3gofszvwlyarci47bgf354" data = b'some encoded node data' result = extract_block_data(cid_str, data) self.assertEqual(result, b'decoded_data') - - def test_extract_data_from_raw(self): + + def test_extract_data_from_raw(self) -> None: mock_cid = MockCID() - mock_cid.codec = 0x55 + mock_cid.codec = 0x55 with patch.object(MockCID, 'decode', return_value=mock_cid): cid_str = "bafkreiczsscdsbs7ffqz55asqdf3smv6klcw3gofszvwlyarci47bgf354" # raw CID data = b'raw data' diff --git a/tests/test_erasure_code.py b/tests/test_erasure_code.py index 966b088..c3e54ab 100644 --- a/tests/test_erasure_code.py +++ b/tests/test_erasure_code.py @@ -3,13 +3,13 @@ class TestErasureCode(unittest.TestCase): - def test_invalid_params(self): + def test_invalid_params(self) -> None: with self.assertRaises(ValueError): _ = ErasureCode.new(0, 0) with self.assertRaises(ValueError): _ = ErasureCode.new(16, 0) - def test_erasure_code(self): + def test_erasure_code(self) -> None: data = b"Quick brown fox jumps over the lazy dog" data_shards = 5 parity_shards = 3