Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
38 commits
Select commit Hold shift + click to select a range
f74da58
deletions and their fixes
d4v1d03 Nov 23, 2025
f35b898
Merge pull request #90 from d4v1d03/dev
Abhay-2811 Nov 24, 2025
8561e8a
feat: add httpext HTTP client helpers
irajgill Nov 26, 2025
0853894
feat: add httpext HTTP client helpers
irajgill Nov 26, 2025
758f410
feat: add httpext HTTP client helpers
irajgill Nov 26, 2025
006a3ef
feat: add httpext HTTP client helper
irajgill Nov 27, 2025
4a48902
feat: add httpext HTTP client helper
irajgill Nov 27, 2025
12d82da
added cids with working test
d4v1d03 Nov 27, 2025
a7a814e
deletion
d4v1d03 Nov 27, 2025
4bb5624
added eip712 changes with working tests
d4v1d03 Dec 1, 2025
904996e
Merge pull request #93 from d4v1d03/dev
Abhay-2811 Dec 4, 2025
067b439
Merge pull request #92 from irajgill/dev
Abhay-2811 Dec 4, 2025
f539777
added batch client files
d4v1d03 Dec 8, 2025
86b0579
added block parser
d4v1d03 Dec 8, 2025
218f34c
fixed client
d4v1d03 Dec 8, 2025
aef1c7c
fixed protobuf files
d4v1d03 Dec 8, 2025
738dc7c
added pdptest and retry directories
d4v1d03 Dec 8, 2025
e083a9c
Merge pull request #94 from d4v1d03/dev
Abhay-2811 Dec 8, 2025
7fc5f64
fixed sdk directory
d4v1d03 Dec 15, 2025
f068535
Merge branch 'akave-ai:dev' into dev
d4v1d03 Dec 15, 2025
dee418e
sdk.py updated
d4v1d03 Dec 15, 2025
34a7036
removed unnecessary protobufs
d4v1d03 Jan 17, 2026
76668bf
ready for v0.4.4
d4v1d03 Jan 18, 2026
35ebfbd
tests success
d4v1d03 Jan 21, 2026
a0403c8
Merge pull request #95 from d4v1d03/dev
Abhay-2811 Jan 21, 2026
01d54f7
CI fix
d4v1d03 Feb 1, 2026
4070483
Merge branch 'akave-ai:dev' into dev
d4v1d03 Feb 1, 2026
c809467
CI testing
d4v1d03 Feb 1, 2026
1c6db7d
CI fixes
d4v1d03 Feb 1, 2026
9774273
CI fix
d4v1d03 Feb 1, 2026
baba88c
removed opentelemetry
d4v1d03 Feb 1, 2026
ce0ec28
removed specific versions for grpcio and tools
d4v1d03 Feb 1, 2026
8fd5b91
made python >=3.9 available for CI
d4v1d03 Feb 1, 2026
b542b62
Merge pull request #98 from d4v1d03/dev
Abhay-2811 Feb 2, 2026
04eee36
refactor: unit test fixture
d4v1d03 Feb 4, 2026
f51e7c7
refactor: unit test fixed
d4v1d03 Feb 4, 2026
e332006
Merge pull request #100 from d4v1d03/dev
Abhay-2811 Feb 5, 2026
1e87b45
Add tests for chunk DAG root node creation and UnixFS data extraction…
yashksaini-coder Feb 16, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@ name: CI

on:
push:
branches: [ main, develop ]
branches: [ main, dev ]
pull_request:
branches: [ main, develop ]
branches: [ main, dev ]

jobs:
test:
Expand All @@ -13,7 +13,7 @@ jobs:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
python-version: ['3.8', '3.9', '3.10', '3.11', '3.12']
python-version: ['3.9', '3.10', '3.11', '3.12']

steps:
- name: Checkout code
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/code-quality.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@ name: Code Quality

on:
push:
branches: [ main, develop ]
branches: [ main, dev ]
pull_request:
branches: [ main, develop ]
branches: [ main, dev ]

jobs:
lint:
Expand Down
36 changes: 0 additions & 36 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -189,42 +189,6 @@ finally:
print("Connection closed")
```

### Streaming API Usage

```python
from akavesdk import SDK, SDKError

config = SDKConfig(
address="connect.akave.ai:5500",
max_concurrency=10,
block_part_size=1 * 1024 * 1024, # 1MB
use_connection_pool=True
)

# Initialize the SDK
sdk = SDK(config)

try:
# Get streaming API
streaming = sdk.streaming_api()

# List files in a bucket
files = streaming.list_files({}, "my-bucket")
for file in files:
print(f"File: {file.name}, Size: {file.size} bytes")

# Get file info
file_info = streaming.file_info({}, "my-bucket", "my-file.txt")
print(f"File info: {file_info}")
except SDKError as e:
# handle sdk exception
pass
except Exception as e:
# handle generic exception
pass
finally:
sdk.close()
```

## File Size Requirements

Expand Down
9 changes: 4 additions & 5 deletions akavesdk/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,21 +2,20 @@
import os

# Add private directory to path
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
PROJECT_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
PRIVATE_PATH = os.path.join(PROJECT_ROOT, "private")
if PRIVATE_PATH not in sys.path:
sys.path.append(PRIVATE_PATH)

# Import and expose main SDK classes
from sdk.sdk import SDK, BucketCreateResult, Bucket, SDKError, SDKConfig
from sdk.sdk_streaming import StreamingAPI
from sdk.sdk_ipc import IPC
from sdk.erasure_code import ErasureCode
from private.cids import verify_raw, verify, CIDError


# Make SDKError appear under akavesdk in tracebacks
SDKError.__module__ = "akavesdk"

# Define what gets imported with "from akavesdk import *"
__all__ = ["SDK", "SDKError", "SDKConfig", "StreamingAPI", "IPC",
"BucketCreateResult", "Bucket", "ErasureCode"]
__all__ = ["SDK", "SDKError", "SDKConfig", "IPC",
"BucketCreateResult", "Bucket", "verify_raw", "verify", "CIDError"]
6 changes: 2 additions & 4 deletions akavesdk/akavesdk.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,7 @@
# Import SDK classes using absolute imports
from sdk.sdk import SDK, BucketCreateResult, Bucket
from sdk.config import SDKError
from sdk.sdk_streaming import StreamingAPI
from sdk.sdk_ipc import IPC
from sdk.erasure_code import ErasureCode
# Export all classes
__all__ = ["SDK", "SDKError", "StreamingAPI", "IPC",
"BucketCreateResult", "Bucket", "ErasureCode"]
__all__ = ["SDK", "SDKError", "IPC",
"BucketCreateResult", "Bucket"]
5 changes: 5 additions & 0 deletions private/cids/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@

from .cids import verify_raw, verify, CIDError

__all__ = ["verify_raw", "verify", "CIDError"]

67 changes: 67 additions & 0 deletions private/cids/cids.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
from typing import Union

try:
from multiformats import CID, multihash
MULTIFORMATS_AVAILABLE = True
except ImportError:
MULTIFORMATS_AVAILABLE = False
CID = None


class CIDError(Exception):
pass


def verify_raw(provided_cid: str, data: bytes) -> None:
if not MULTIFORMATS_AVAILABLE:
raise CIDError("multiformats library is not available")

try:
parsed_cid = CID.decode(provided_cid)
except Exception as e:
raise CIDError(f"failed to decode provided CID: {e}")

verify(parsed_cid, data)


def verify(c: 'CID', data: bytes) -> None:
if not MULTIFORMATS_AVAILABLE:
raise CIDError("multiformats library is not available")

calculated_cid = _calculate_standard_cid(c, data)

if calculated_cid != c:
raise CIDError(
f"CID mismatch: provided {str(c)}, calculated {str(calculated_cid)}"
)


def _calculate_standard_cid(c: 'CID', data: bytes) -> 'CID':

if not MULTIFORMATS_AVAILABLE:
raise CIDError("multiformats library is not available")

version = c.version
codec = c.codec

if hasattr(c, 'hashfun'):
# hashfun is a multicodec object, convert to string
hash_code = str(c.hashfun).replace("multihash.get('", "").replace("')", "")
else:
hash_code = "sha2-256"

try:
digest = multihash.digest(data, hash_code)
except Exception as e:
raise CIDError(f"failed to create multihash: {e}")

if version == 0:
return CID("base58btc", 0, "dag-pb", digest)
elif version == 1:
base = "base32"
if hasattr(c, 'base'):
base = c.base
return CID(base, 1, codec, digest)
else:
raise CIDError(f"unsupported CID version: {version}")

136 changes: 136 additions & 0 deletions private/cids/cids_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,136 @@
import pytest
import secrets
import sys
import os

project_root = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
if project_root not in sys.path:
sys.path.insert(0, project_root)

try:
from multiformats import CID, multihash
MULTIFORMATS_AVAILABLE = True
except ImportError:
MULTIFORMATS_AVAILABLE = False
pytest.skip("multiformats library not available", allow_module_level=True)

try:
from private.cids.cids import verify_raw, verify, CIDError
except ImportError:
from .cids import verify_raw, verify, CIDError


def test_verify_raw_valid_cidv0_matches():
test_data = secrets.token_bytes(128)

v0hash = multihash.digest(test_data, "sha2-256")
cidv0 = CID("base58btc", 0, "dag-pb", v0hash)

verify_raw(str(cidv0), test_data)


def test_verify_raw_valid_cidv1_matches():
test_data = secrets.token_bytes(128)

hash_digest = multihash.digest(test_data, "sha2-256")
expected_cid = CID("base32", 1, "dag-pb", hash_digest)

verify_raw(str(expected_cid), test_data)


def test_verify_raw_cid_mismatch():
test_data = secrets.token_bytes(128)

hash_digest = multihash.digest(test_data, "sha2-256")
expected_cid = CID("base32", 1, "dag-pb", hash_digest)

wrong_data = b"different data"

with pytest.raises(CIDError) as exc_info:
verify_raw(str(expected_cid), wrong_data)

assert "CID mismatch" in str(exc_info.value)


def test_verify_raw_invalid_cid_format():
test_data = secrets.token_bytes(128)

with pytest.raises(CIDError) as exc_info:
verify_raw("invalid-cid", test_data)

assert "failed to decode provided CID" in str(exc_info.value)


def test_verify_raw_empty_data():
empty_data = b""

# Calculate CID for empty data
hash_digest = multihash.digest(empty_data, "sha2-256")
empty_cid = CID("base32", 1, "dag-pb", hash_digest)

verify_raw(str(empty_cid), empty_data)


def test_verify_valid_cidv1_matches():
test_data = secrets.token_bytes(127)

hash_digest = multihash.digest(test_data, "sha2-256")
expected_cid = CID("base32", 1, "dag-pb", hash_digest)

verify(expected_cid, test_data)


def test_verify_valid_cidv0_matches():
test_data = secrets.token_bytes(127)

hash_digest = multihash.digest(test_data, "sha2-256")
cidv0 = CID("base58btc", 0, "dag-pb", hash_digest)

verify(cidv0, test_data)


def test_verify_cid_mismatch():
test_data = secrets.token_bytes(127)

hash_digest = multihash.digest(test_data, "sha2-256")
expected_cid = CID("base32", 1, "dag-pb", hash_digest)

wrong_data = b"different data"

with pytest.raises(CIDError) as exc_info:
verify(expected_cid, wrong_data)

assert "CID mismatch" in str(exc_info.value)


def test_verify_different_hash_algorithms():
test_data = secrets.token_bytes(64)

hash_256 = multihash.digest(test_data, "sha2-256")
cid_256 = CID("base32", 1, "dag-pb", hash_256)
verify(cid_256, test_data)

hash_512 = multihash.digest(test_data, "sha2-512")
cid_512 = CID("base32", 1, "dag-pb", hash_512)
verify(cid_512, test_data)


def test_verify_different_codecs():
test_data = secrets.token_bytes(64)

hash_digest = multihash.digest(test_data, "sha2-256")
cid_dagpb = CID("base32", 1, "dag-pb", hash_digest)
verify(cid_dagpb, test_data)

cid_raw = CID("base32", 1, "raw", hash_digest)
verify(cid_raw, test_data)


def test_verify_large_data():
large_data = secrets.token_bytes(1024 * 1024)

hash_digest = multihash.digest(large_data, "sha2-256")
expected_cid = CID("base32", 1, "dag-pb", hash_digest)

verify(expected_cid, large_data)

5 changes: 5 additions & 0 deletions private/eip712/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@

from .eip712 import Domain, TypedData, sign, recover_signer_address

__all__ = ["Domain", "TypedData", "sign", "recover_signer_address"]

Loading