diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml
new file mode 100644
index 0000000..ad469dc
--- /dev/null
+++ b/.github/workflows/publish.yml
@@ -0,0 +1,41 @@
+name: "Publish verda to PyPI"
+
+on:
+ release:
+ types: [published]
+
+jobs:
+ deploy:
+ runs-on: ubuntu-24.04
+
+ environment:
+ name: pypi
+
+ permissions:
+ id-token: write
+ contents: read
+
+ steps:
+ - uses: actions/checkout@v5
+
+ - name: Install uv
+ uses: astral-sh/setup-uv@v7
+ with:
+ version: "0.9.11"
+
+ - name: Set up Python
+ run: uv python install
+
+ - name: Build
+ run: uv build
+
+ # check that basic features work and we didn't miss to include crucial files
+ - name: Smoke test (wheel)
+ run: uv run --isolated --no-project --with dist/*.whl --with "responses==0.25.8" tests/smoke.py
+
+ - name: Smoke test (source distribution)
+ run: uv run --isolated --no-project --with dist/*.tar.gz --with "responses==0.25.8" tests/smoke.py
+
+ # we use Trusted publishing, no keys necessary: https://docs.astral.sh/uv/guides/integration/github/#publishing-to-pypi
+ - name: Publish
+ run: uv publish
diff --git a/.github/workflows/publish_package.yml b/.github/workflows/publish_package.yml
deleted file mode 100644
index 087fc8b..0000000
--- a/.github/workflows/publish_package.yml
+++ /dev/null
@@ -1,38 +0,0 @@
-# Based on https://docs.astral.sh/uv/guides/integration/github/#publishing-to-pypi
-name: Publish to PyPI
-
-on:
- release:
- types: [published]
-
-jobs:
- deploy:
- runs-on: ubuntu-24.04
-
- environment:
- name: pypi
-
- permissions:
- id-token: write
- contents: read
-
- steps:
- - uses: actions/checkout@v5
-
- - name: Install uv
- uses: astral-sh/setup-uv@v6
- with:
- version: "0.9.5"
-
- - name: Set up Python
- run: uv python install
-
- - name: Build
- run: uv build
-
- - name: Publish
- # TODO(shamrin) switch to trusted publishing and remove secrets https://docs.astral.sh/uv/guides/integration/github/#publishing-to-pypi
- env:
- UV_PUBLISH_USERNAME: ${{ secrets.PYPI_USERNAME }}
- UV_PUBLISH_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
- run: uv publish
diff --git a/.github/workflows/unit_tests.yml b/.github/workflows/unit_tests.yml
index d1a9669..2e24c62 100644
--- a/.github/workflows/unit_tests.yml
+++ b/.github/workflows/unit_tests.yml
@@ -14,7 +14,7 @@ jobs:
runs-on: ubuntu-24.04
strategy:
matrix:
- python-version: ['3.11', '3.12', '3.13', '3.14']
+ python-version: ['3.10', '3.11', '3.12', '3.13', '3.14']
steps:
- uses: actions/checkout@v5
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 2fc63ee..0484918 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -7,6 +7,42 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased]
+### Highlights
+
+- `datacrunch` package is now `verda`, because the company [changed the name to Verda](https://verda.com/blog/datacrunch-is-changing-its-name-to-verda). Original `datacrunch` package does not go anywhere and would continue tracking `verda` releases.
+
+### Deprecated
+
+- `DataCrunchClient` and `datacrunch` are deprecated, please change your imports to `VerdaClient` and `verda`.
+
+Was:
+
+```shell
+uv add datacrunch
+pip3 install datacrunch
+```
+
+```python
+from datacrunch import DataCrunchClient
+
+datacrunch = DataCrunchClient(...)
+datacrunch.instances.create(...)
+```
+
+Now:
+
+```shell
+uv add verda
+pip3 install verda
+```
+
+```python
+from verda import VerdaClient
+
+verda = VerdaClient(...)
+verda.instances.create(...)
+```
+
### Added
- Added shared filesystem (SFS) type constant and example
@@ -14,6 +50,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Changed
- Refactor `instance_types.py` to use dataclass
+- Put back support for Python 3.10
+- Default API base URL is now `https://api.verda.com/v1`
## [1.16.0] - 2025-10-27
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 7c53670..37b1771 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -2,7 +2,7 @@
Any contributions are welcome!
-Open new issues at https://github.com/DataCrunch-io/datacrunch-python/issues.
+Open new issues at https://github.com/verda-cloud/sdk-python/issues.
You can open pull requests by following the steps:
@@ -10,13 +10,13 @@ You can open pull requests by following the steps:
Prerequisite: install [`uv`](https://docs.astral.sh/uv/).
-1. Fork the `datacrunch-python` repo on GitHub.
+1. Fork this repo on GitHub.
2. Clone your fork locally:
```bash
- git clone git@github.com:{your_username}/datacrunch-python.git
- cd datacrunch-python
+ git clone git@github.com:{your_username}/sdk-python.git
+ cd sdk-python
```
3. Set up local environment and install dependencies:
@@ -80,6 +80,6 @@ To release a new version:
git push --tags
```
-4. [Draft and publish](https://github.com/DataCrunch-io/datacrunch-python/releases) a new release.
+4. [Draft and publish](https://github.com/verda-cloud/sdk-python/releases) a new release.
-5. Check that package is automatically published to [PyPI](https://pypi.org/project/datacrunch/) via [GitHub action](https://github.com/DataCrunch-io/datacrunch-python/actions/workflows/publish_package.yml).
+5. Check that package is automatically published to [PyPI](https://pypi.org/project/verda/) via [GitHub action](https://github.com/verda-cloud/sdk-python/actions/workflows/publish_package.yml).
diff --git a/LICENSE b/LICENSE
index 88ee068..cd3a5d8 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,6 +1,6 @@
MIT License
-Copyright (c) 2020 DataCrunch Oy
+Copyright (c) 2025 Verda Cloud Oy
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
diff --git a/README.md b/README.md
index 9a393a5..1e3231d 100644
--- a/README.md
+++ b/README.md
@@ -1,18 +1,18 @@
-# DataCrunch Python SDK
+# Verda Python SDK
-[
](https://github.com/DataCrunch-io/datacrunch-python/actions?query=workflow%3A%22Unit+Tests%22+branch%3Amaster)
-[
](https://github.com/DataCrunch-io/datacrunch-python/actions?query=workflow%3A%22Code+Style%22+branch%3Amaster)
-[
](https://codecov.io/gh/DataCrunch-io/datacrunch-python)
+[
](https://github.com/verda-cloud/sdk-python/actions?query=workflow%3A%22Unit+Tests%22+branch%3Amaster)
+[
](https://github.com/verda-cloud/sdk-python/actions?query=workflow%3A%22Code+Style%22+branch%3Amaster)
+[
](https://codecov.io/gh/verda-cloud/sdk-python)
[
](https://datacrunch-python.readthedocs.io/en/latest/)
-[
](https://github.com/DataCrunch-io/datacrunch-python/blob/master/LICENSE)
-[
](https://pypi.org/project/datacrunch/)
-[
](https://pypi.org/project/datacrunch/)
+[
](https://github.com/verda-cloud/sdk-python/blob/master/LICENSE)
+[
](https://pypi.org/project/verda/)
+[
](https://pypi.org/project/verda/)
-The official [DataCrunch.io](https://datacrunch.io) Python SDK.
+The official [Verda](https://verda.com) (formerly DataCrunch) Python SDK.
The SDK's documentation is available on [ReadTheDocs](https://datacrunch-python.readthedocs.io/en/latest/)
-DataCrunch's Public API documentation [is available here](https://api.datacrunch.io/v1/docs).
+Verda Public API documentation [is available here](https://api.verda.com/v1/docs).
## Getting Started - Using the SDK:
@@ -20,13 +20,13 @@ DataCrunch's Public API documentation [is available here](https://api.datacrunch
```bash
# via pip
- pip3 install datacrunch
+ pip3 install verda
# via uv
- uv add datacrunch
+ uv add verda
```
-- Generate your client credentials - [instructions in the public API docs](https://api.datacrunch.io/v1/docs#description/quick-start-guide).
+- Generate your client credentials - [instructions in the public API docs](https://api.verda.com/v1/docs#description/quick-start-guide).
- Add your client id and client secret to an environment variable (don't want it to be hardcoded):
@@ -34,11 +34,11 @@ DataCrunch's Public API documentation [is available here](https://api.datacrunch
Linux (bash):
```bash
- export DATACRUNCH_CLIENT_ID=YOUR_ID_HERE
- export DATACRUNCH_CLIENT_SECRET=YOUR_SECRET_HERE
+ export VERDA_CLIENT_ID=YOUR_ID_HERE
+ export VERDA_CLIENT_SECRET=YOUR_SECRET_HERE
```
-- To enable sending inference requests from SDK you must generate an inference key - [Instructions on inference authorization](https://docs.datacrunch.io/inference/authorization)
+- To enable sending inference requests from SDK you must generate an inference key - [Instructions on inference authorization](https://docs.verda.com/inference/authorization)
- Add your inference key to an environment variable
@@ -46,7 +46,7 @@ DataCrunch's Public API documentation [is available here](https://api.datacrunch
Linux (bash):
```bash
- export DATACRUNCH_INFERENCE_KEY=YOUR_API_KEY_HERE
+ export VERDA_INFERENCE_KEY=YOUR_API_KEY_HERE
```
Other platforms:
@@ -58,27 +58,27 @@ DataCrunch's Public API documentation [is available here](https://api.datacrunch
```python
import os
- from datacrunch import DataCrunchClient
+ from verda import VerdaClient
# Get credentials from environment variables
- CLIENT_ID = os.environ.get('DATACRUNCH_CLIENT_ID')
- CLIENT_SECRET = os.environ['DATACRUNCH_CLIENT_SECRET']
+ CLIENT_ID = os.environ.get('VERDA_CLIENT_ID')
+ CLIENT_SECRET = os.environ['VERDA_CLIENT_SECRET']
- # Create datcrunch client
- datacrunch = DataCrunchClient(CLIENT_ID, CLIENT_SECRET)
+ # Create client
+ verda = VerdaClient(CLIENT_ID, CLIENT_SECRET)
# Get all SSH keys
- ssh_keys = [key.id for key in datacrunch.ssh_keys.get()]
+ ssh_keys = [key.id for key in verda.ssh_keys.get()]
# Create a new instance
- instance = datacrunch.instances.create(instance_type='1V100.6V',
- image='ubuntu-24.04-cuda-12.8-open-docker',
- ssh_key_ids=ssh_keys,
- hostname='example',
- description='example instance')
+ instance = verda.instances.create(instance_type='1V100.6V',
+ image='ubuntu-24.04-cuda-12.8-open-docker',
+ ssh_key_ids=ssh_keys,
+ hostname='example',
+ description='example instance')
# Delete instance
- datacrunch.instances.action(instance.id, datacrunch.constants.instance_actions.DELETE)
+ verda.instances.action(instance.id, verda.constants.instance_actions.DELETE)
```
More examples can be found in the `/examples` folder or in the [documentation](https://datacrunch-python.readthedocs.io/en/latest/).
@@ -92,8 +92,8 @@ Prerequisite: install [`uv`](https://docs.astral.sh/uv/).
Clone the repository, create local environment and install dependencies:
```bash
- git clone git@github.com:DataCrunch-io/datacrunch-python.git
- cd datacrunch-python
+ git clone git@github.com:verda-cloud/sdk-python.git
+ cd sdk-python
uv sync
```
@@ -117,13 +117,13 @@ Create a file in the root directory of the project:
```python
# example.py
-from datacrunch.datacrunch import DataCrunchClient
+from verda.verda import VerdaClient
CLIENT_SECRET = 'secret'
CLIENT_ID = 'your-id'
-# Create datacrunch client
-datacrunch = DataCrunchClient(CLIENT_ID, CLIENT_SECRET, base_url='http://localhost:3001/v1')
+# Create client
+verda = VerdaClient(CLIENT_ID, CLIENT_SECRET, base_url='http://localhost:3001/v1')
```
Run it:
@@ -153,4 +153,4 @@ uv run ruff format
## Contact
-You can [contact us here](https://datacrunch.io/contact), or open an issue in the repo.
+You can [contact us here](https://verda.com/contact), or open an issue in the repo.
diff --git a/datacrunch/__init__.py b/datacrunch/__init__.py
deleted file mode 100644
index fa76d9f..0000000
--- a/datacrunch/__init__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-from datacrunch._version import __version__
-from datacrunch.datacrunch import DataCrunchClient
diff --git a/docs/source/API.rst b/docs/source/API.rst
index 456719e..331205b 100644
--- a/docs/source/API.rst
+++ b/docs/source/API.rst
@@ -1,22 +1,22 @@
API Reference
=============
-Main DataCrunch Client
-----------------------
+Main Verda Client
+-----------------
-.. autoclass:: datacrunch.DataCrunchClient
+.. autoclass:: verda.VerdaClient
:members:
API Exception
-------------
-.. autoclass:: datacrunch.exceptions.APIException
+.. autoclass:: verda.exceptions.APIException
:members:
Constants
---------
-.. autoclass:: datacrunch.constants.Constants
+.. autoclass:: verda.constants.Constants
:members:
Services API
diff --git a/docs/source/api/services/balance.rst b/docs/source/api/services/balance.rst
index 374a04b..e310ae1 100644
--- a/docs/source/api/services/balance.rst
+++ b/docs/source/api/services/balance.rst
@@ -1,8 +1,8 @@
Balance
=======
-.. autoclass:: datacrunch.balance.balance.BalanceService
+.. autoclass:: verda.balance.balance.BalanceService
:members:
-.. autoclass:: datacrunch.balance.balance.Balance
+.. autoclass:: verda.balance.balance.Balance
:members:
\ No newline at end of file
diff --git a/docs/source/api/services/containers.rst b/docs/source/api/services/containers.rst
index b33e387..c92fa2d 100644
--- a/docs/source/api/services/containers.rst
+++ b/docs/source/api/services/containers.rst
@@ -1,9 +1,9 @@
Containers
==========
-.. autoclass:: datacrunch.containers.containers.ContainersService
+.. autoclass:: verda.containers.containers.ContainersService
:members:
-.. autoclass:: datacrunch.containers.containers.Container
+.. autoclass:: verda.containers.containers.Container
:members:
diff --git a/docs/source/api/services/images.rst b/docs/source/api/services/images.rst
index 8b0b834..621f17e 100644
--- a/docs/source/api/services/images.rst
+++ b/docs/source/api/services/images.rst
@@ -1,8 +1,8 @@
Images
======
-.. autoclass:: datacrunch.images.images.ImagesService
+.. autoclass:: verda.images.images.ImagesService
:members:
-.. autoclass:: datacrunch.images.images.Image
+.. autoclass:: verda.images.images.Image
:members:
\ No newline at end of file
diff --git a/docs/source/api/services/instance_types.rst b/docs/source/api/services/instance_types.rst
index a5c6cc5..1ee1541 100644
--- a/docs/source/api/services/instance_types.rst
+++ b/docs/source/api/services/instance_types.rst
@@ -1,8 +1,8 @@
Instance Types
==============
-.. autoclass:: datacrunch.instance_types.instance_types.InstanceTypesService
+.. autoclass:: verda.instance_types.instance_types.InstanceTypesService
:members:
-.. autoclass:: datacrunch.instance_types.instance_types.InstanceType
+.. autoclass:: verda.instance_types.instance_types.InstanceType
:members:
\ No newline at end of file
diff --git a/docs/source/api/services/instances.rst b/docs/source/api/services/instances.rst
index b97ab1c..b4c1409 100644
--- a/docs/source/api/services/instances.rst
+++ b/docs/source/api/services/instances.rst
@@ -1,8 +1,8 @@
Instances
=========
-.. autoclass:: datacrunch.instances.instances.InstancesService
+.. autoclass:: verda.instances.instances.InstancesService
:members:
-.. autoclass:: datacrunch.instances.instances.Instance
+.. autoclass:: verda.instances.instances.Instance
:members:
\ No newline at end of file
diff --git a/docs/source/api/services/ssh_keys.rst b/docs/source/api/services/ssh_keys.rst
index 645b03a..ae9da7f 100644
--- a/docs/source/api/services/ssh_keys.rst
+++ b/docs/source/api/services/ssh_keys.rst
@@ -1,8 +1,8 @@
SSH Keys
========
-.. autoclass:: datacrunch.ssh_keys.ssh_keys.SSHKeysService
+.. autoclass:: verda.ssh_keys.ssh_keys.SSHKeysService
:members:
-.. autoclass:: datacrunch.ssh_keys.ssh_keys.SSHKey
+.. autoclass:: verda.ssh_keys.ssh_keys.SSHKey
:members:
\ No newline at end of file
diff --git a/docs/source/api/services/startup_scripts.rst b/docs/source/api/services/startup_scripts.rst
index 148f37e..d9b89ed 100644
--- a/docs/source/api/services/startup_scripts.rst
+++ b/docs/source/api/services/startup_scripts.rst
@@ -1,8 +1,8 @@
Startup Scripts
===============
-.. autoclass:: datacrunch.startup_scripts.startup_scripts.StartupScriptsService
+.. autoclass:: verda.startup_scripts.startup_scripts.StartupScriptsService
:members:
-.. autoclass:: datacrunch.startup_scripts.startup_scripts.StartupScript
+.. autoclass:: verda.startup_scripts.startup_scripts.StartupScript
:members:
\ No newline at end of file
diff --git a/docs/source/api/services/volume_types.rst b/docs/source/api/services/volume_types.rst
index e540026..cdf70cc 100644
--- a/docs/source/api/services/volume_types.rst
+++ b/docs/source/api/services/volume_types.rst
@@ -1,8 +1,8 @@
Volume Types
============
-.. autoclass:: datacrunch.volume_types.volume_types.VolumeTypesService
+.. autoclass:: verda.volume_types.volume_types.VolumeTypesService
:members:
-.. autoclass:: datacrunch.volume_types.volume_types.VolumeType
+.. autoclass:: verda.volume_types.volume_types.VolumeType
:members:
\ No newline at end of file
diff --git a/docs/source/api/services/volumes.rst b/docs/source/api/services/volumes.rst
index dbff07e..b6c850a 100644
--- a/docs/source/api/services/volumes.rst
+++ b/docs/source/api/services/volumes.rst
@@ -1,8 +1,8 @@
Volumes
=======
-.. autoclass:: datacrunch.volumes.volumes.VolumesService
+.. autoclass:: verda.volumes.volumes.VolumesService
:members:
-.. autoclass:: datacrunch.volumes.volumes.Volume
+.. autoclass:: verda.volumes.volumes.Volume
:members:
\ No newline at end of file
diff --git a/docs/source/conf.py b/docs/source/conf.py
index 09297e1..e829e6d 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -22,12 +22,12 @@
# -- Project information -----------------------------------------------------
current_year = datetime.datetime.now().year
-project = 'DataCrunch Python SDK'
-copyright = f'{current_year}, DataCrunch.io' # noqa: A001
-author = 'DataCrunch.io'
+project = ' Verda Python SDK'
+copyright = f'{current_year}, Verda Cloud Oy' # noqa: A001
+author = 'Verda Cloud'
try:
- release = pkg_version('datacrunch')
+ release = pkg_version('verda')
except PackageNotFoundError:
release = '0.0.0+dev'
@@ -118,7 +118,7 @@
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
-htmlhelp_basename = 'DataCrunch-Python-SDKdoc'
+htmlhelp_basename = 'Verda-Python-SDKdoc'
# -- Options for LaTeX output ------------------------------------------------
diff --git a/docs/source/examples.rst b/docs/source/examples.rst
index 4345a56..36651bd 100644
--- a/docs/source/examples.rst
+++ b/docs/source/examples.rst
@@ -5,11 +5,11 @@ In all of the examples, we store the client secret in an environment variable.
To create an environment variable (linux), type in the terminal with your client secret value::
- export DATACRUNCH_CLIENT_SECRET=Z4CZq02rdwdB7ISV0k4Z2gtwAFKiyvr2U1l0KDIeYi
+ export VERDA_CLIENT_SECRET=Z4CZq02rdwdB7ISV0k4Z2gtwAFKiyvr2U1l0KDIeYi
For other platforms check https://en.wikipedia.org/wiki/Environment_variable
-All the examples code can be also found in the repository `Examples folder `_
+All the examples code can be also found in the repository `Examples folder `_
.. toctree::
:maxdepth: 3
diff --git a/docs/source/examples/advanced_create_instance.rst b/docs/source/examples/advanced_create_instance.rst
index fc04494..c51b4e2 100644
--- a/docs/source/examples/advanced_create_instance.rst
+++ b/docs/source/examples/advanced_create_instance.rst
@@ -4,8 +4,8 @@ Advanced Create Instance
.. code-block:: python
import os
- from datacrunch import DataCrunchClient
- from datacrunch.exceptions import APIException
+ from verda import VerdaClient
+ from verda.exceptions import APIException
"""
In this hypothetical example, we check if we have enough balance
@@ -13,7 +13,7 @@ Advanced Create Instance
If there's not enough balance, we deploy a 4V100.20V instance.
This example uses the balance service to check the current balance,
- the instace_types service to check instance type details (price per hour)
+ the instance_types service to check instance type details (price per hour)
We also perform other basic tasks such as creating the client and adding a new SSH key.
"""
@@ -26,27 +26,27 @@ Advanced Create Instance
DURATION = 24 * 7 # one week
# Get client secret from environment variable
- CLIENT_SECRET = os.environ['DATACRUNCH_CLIENT_SECRET']
+ CLIENT_SECRET = os.environ['VERDA_CLIENT_SECRET']
CLIENT_ID = 'Ibk5bdxV64lKAWOqYnvSi' # Replace with your client ID
try:
- # Create datcrunch client
- datacrunch = DataCrunchClient(CLIENT_ID, CLIENT_SECRET)
+ # Create client
+ verda = VerdaClient(CLIENT_ID, CLIENT_SECRET)
# Create new SSH key
public_key = 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAI0qq2Qjt5GPi7DKdcnBHOkvk8xNsG9dA607tnWagOkHC test_key'
- ssh_key = datacrunch.ssh_keys.create('my test key', public_key)
+ ssh_key = verda.ssh_keys.create('my test key', public_key)
# Get all SSH keys
- ssh_keys = datacrunch.ssh_keys.get()
+ ssh_keys = verda.ssh_keys.get()
ssh_keys_ids = list(map(lambda ssh_key: ssh_key.id, ssh_keys))
# Get our current balance
- balance = datacrunch.balance.get()
+ balance = verda.balance.get()
print(balance.amount)
# Get instance types
- instance_types = datacrunch.instance_types.get()
+ instance_types = verda.instance_types.get()
# Deploy 8V instance if enough balance for a week, otherwise deploy a 4V
for instance_details in instance_types:
@@ -55,21 +55,21 @@ Advanced Create Instance
if price_per_hour * DURATION < balance.amount:
# Deploy a new 8V instance
- instance = datacrunch.instances.create(instance_type=INSTANCE_TYPE_8V,
- image='ubuntu-24.04-cuda-12.8-open-docker',
- ssh_key_ids=ssh_keys_ids,
- hostname='example',
- description='large instance'
- os_volume={
- "name": "Large OS volume",
- "size": 95
- })
+ instance = verda.instances.create(instance_type=INSTANCE_TYPE_8V,
+ image='ubuntu-24.04-cuda-12.8-open-docker',
+ ssh_key_ids=ssh_keys_ids,
+ hostname='example',
+ description='large instance'
+ os_volume={
+ "name": "Large OS volume",
+ "size": 95,
+ })
else:
# Deploy a new 4V instance
- instance = datacrunch.instances.create(instance_type=INSTANCE_TYPE_4V,
- image='ubuntu-24.04-cuda-12.8-open-docker',
- ssh_key_ids=ssh_keys_ids,
- hostname='example',
- description='medium instance')
+ instance = verda.instances.create(instance_type=INSTANCE_TYPE_4V,
+ image='ubuntu-24.04-cuda-12.8-open-docker',
+ ssh_key_ids=ssh_keys_ids,
+ hostname='example',
+ description='medium instance')
except APIException as exception:
print(exception)
diff --git a/docs/source/examples/containers/compute_resources.rst b/docs/source/examples/containers/compute_resources.rst
index bc39356..26f84f2 100644
--- a/docs/source/examples/containers/compute_resources.rst
+++ b/docs/source/examples/containers/compute_resources.rst
@@ -1,7 +1,7 @@
Compute Resources
=================
-This example demonstrates how to list and manage compute resources for containers in DataCrunch.
+This example demonstrates how to list and manage compute resources for containers in Verda.
.. literalinclude:: ../../../../examples/containers/compute_resources_example.py
:language: python
diff --git a/docs/source/examples/containers/deployments.rst b/docs/source/examples/containers/deployments.rst
index f14446f..e671009 100644
--- a/docs/source/examples/containers/deployments.rst
+++ b/docs/source/examples/containers/deployments.rst
@@ -1,7 +1,7 @@
Container Deployments
=====================
-This example demonstrates how to create, manage, and monitor container deployments in DataCrunch.
+This example demonstrates how to create, manage, and monitor container deployments in Verda.
.. literalinclude:: ../../../../examples/containers/container_deployments_example.py
:language: python
diff --git a/docs/source/examples/containers/environment_variables.rst b/docs/source/examples/containers/environment_variables.rst
index 0566783..b65ec81 100644
--- a/docs/source/examples/containers/environment_variables.rst
+++ b/docs/source/examples/containers/environment_variables.rst
@@ -1,7 +1,7 @@
Environment Variables
=====================
-This example shows how to manage environment variables for container deployments in DataCrunch.
+This example shows how to manage environment variables for container deployments in Verda.
.. literalinclude:: ../../../../examples/containers/environment_variables_example.py
:language: python
diff --git a/docs/source/examples/containers/fileset_secrets.rst b/docs/source/examples/containers/fileset_secrets.rst
index dc2baee..8ba1890 100644
--- a/docs/source/examples/containers/fileset_secrets.rst
+++ b/docs/source/examples/containers/fileset_secrets.rst
@@ -1,7 +1,7 @@
Fileset Secrets
===============
-This example shows how to manage fileset secrets for container deployments in DataCrunch.
+This example shows how to manage fileset secrets for container deployments in Verda.
Fileset secrets are a way to mount a directory with files into a container.
.. literalinclude:: ../../../../examples/containers/fileset_secret_example.py
diff --git a/docs/source/examples/containers/index.rst b/docs/source/examples/containers/index.rst
index 7492b2d..324ce2f 100644
--- a/docs/source/examples/containers/index.rst
+++ b/docs/source/examples/containers/index.rst
@@ -1,7 +1,7 @@
Container Examples
==================
-This section contains examples demonstrating how to work with containers in DataCrunch.
+This section contains examples demonstrating how to work with containers in Verda.
.. toctree::
:maxdepth: 1
diff --git a/docs/source/examples/containers/registry_credentials.rst b/docs/source/examples/containers/registry_credentials.rst
index a2c4070..02f8d6e 100644
--- a/docs/source/examples/containers/registry_credentials.rst
+++ b/docs/source/examples/containers/registry_credentials.rst
@@ -1,7 +1,7 @@
Registry Credentials
====================
-This example demonstrates how to manage container registry credentials in DataCrunch.
+This example demonstrates how to manage container registry credentials in Verda.
.. literalinclude:: ../../../../examples/containers/registry_credentials_example.py
:language: python
diff --git a/docs/source/examples/containers/scaling.rst b/docs/source/examples/containers/scaling.rst
index 4c00829..515fb24 100644
--- a/docs/source/examples/containers/scaling.rst
+++ b/docs/source/examples/containers/scaling.rst
@@ -1,7 +1,7 @@
Update Deployment Scaling
=========================
-This example shows how to update and manage the scaling of container deployments in DataCrunch.
+This example shows how to update and manage the scaling of container deployments in Verda.
.. literalinclude:: ../../../../examples/containers/update_deployment_scaling_example.py
:language: python
diff --git a/docs/source/examples/containers/secrets.rst b/docs/source/examples/containers/secrets.rst
index 4b76865..85e32e8 100644
--- a/docs/source/examples/containers/secrets.rst
+++ b/docs/source/examples/containers/secrets.rst
@@ -1,7 +1,7 @@
Secrets Management
==================
-This example shows how to manage secrets for container deployments in DataCrunch.
+This example shows how to manage secrets for container deployments in Verda.
.. literalinclude:: ../../../../examples/containers/secrets_example.py
:language: python
diff --git a/docs/source/examples/containers/sglang.rst b/docs/source/examples/containers/sglang.rst
index c204115..fc7dde2 100644
--- a/docs/source/examples/containers/sglang.rst
+++ b/docs/source/examples/containers/sglang.rst
@@ -1,7 +1,7 @@
SGLang Deployment
=================
-This example demonstrates how to deploy and manage SGLang applications in DataCrunch.
+This example demonstrates how to deploy and manage SGLang applications in Verda.
.. literalinclude:: ../../../../examples/containers/sglang_deployment_example.py
:language: python
diff --git a/docs/source/examples/instance_actions.rst b/docs/source/examples/instance_actions.rst
index c59690b..a452f42 100644
--- a/docs/source/examples/instance_actions.rst
+++ b/docs/source/examples/instance_actions.rst
@@ -5,55 +5,55 @@ Instance Actions
import os
import time
- from datacrunch import DataCrunchClient
- from datacrunch.exceptions import APIException
+ from verda import VerdaClient
+ from verda.exceptions import APIException
# Get client secret from environment variable
- CLIENT_SECRET = os.environ['DATACRUNCH_CLIENT_SECRET']
+ CLIENT_SECRET = os.environ['VERDA_CLIENT_SECRET']
CLIENT_ID = 'Ibk5bdxV64lKAWOqYnvSi' # Replace with your client ID
# Create datcrunch client
- datacrunch = DataCrunchClient(CLIENT_ID, CLIENT_SECRET)
+ verda = VerdaClient(CLIENT_ID, CLIENT_SECRET)
# Get all SSH keys
- ssh_keys = datacrunch.ssh_keys.get()
+ ssh_keys = verda.ssh_keys.get()
ssh_keys_ids = list(map(lambda ssh_key: ssh_key.id, ssh_keys))
# Create a new 1V100.6V instance
- instance = datacrunch.instances.create(instance_type='1V100.6V',
- image='ubuntu-24.04-cuda-12.8-open-docker',
- ssh_key_ids=ssh_keys_ids,
- hostname='example',
- description='example instance')
+ instance = verda.instances.create(instance_type='1V100.6V',
+ image='ubuntu-24.04-cuda-12.8-open-docker',
+ ssh_key_ids=ssh_keys_ids,
+ hostname='example',
+ description='example instance')
print(instance.id)
# Try to shutdown instance right away,
# encounter an error (because it's still provisioning)
try:
- datacrunch.instances.action(instance.id, datacrunch.actions.SHUTDOWN)
+ verda.instances.action(instance.id, verda.actions.SHUTDOWN)
except APIException as exception:
print(exception) # we were too eager...
# Wait until instance is running (check every 30sec), only then shut it down
- while(instance.status != datacrunch.instance_status.RUNNING):
+ while(instance.status != verda.instance_status.RUNNING):
time.sleep(30)
- instance = datacrunch.instances.get_by_id(instance.id)
+ instance = verda.instances.get_by_id(instance.id)
# Shutdown!
try:
- datacrunch.instances.action(instance.id, datacrunch.actions.SHUTDOWN)
+ verda.instances.action(instance.id, verda.actions.SHUTDOWN)
except APIException as exception:
print(exception) # no exception this time
# Wait until instance is offline (check every 30sec), only then hibernate
- while(instance.status != datacrunch.instance_status.OFFLINE):
+ while(instance.status != verda.instance_status.OFFLINE):
time.sleep(30)
- instance = datacrunch.instances.get_by_id(instance.id)
+ instance = verda.instances.get_by_id(instance.id)
# Hibernate the instance
try:
- datacrunch.instances.action(instance.id, datacrunch.actions.HIBERNATE)
+ verda.instances.action(instance.id, verda.actions.HIBERNATE)
except APIException as exception:
print(exception)
diff --git a/docs/source/examples/instances_and_volumes.rst b/docs/source/examples/instances_and_volumes.rst
index d3852e1..c8886a6 100644
--- a/docs/source/examples/instances_and_volumes.rst
+++ b/docs/source/examples/instances_and_volumes.rst
@@ -4,67 +4,67 @@ Instances and Volumes
.. code-block:: python
import os
- from datacrunch import DataCrunchClient
+ from verda import VerdaClient
# Get client secret from environment variable
- CLIENT_SECRET = os.environ['DATACRUNCH_CLIENT_SECRET']
+ CLIENT_SECRET = os.environ['VERDA_CLIENT_SECRET']
CLIENT_ID = 'Ibk5bdxV64lKAWOqYnvSi' # Replace with your client ID
# Create datcrunch client
- datacrunch = DataCrunchClient(CLIENT_ID, CLIENT_SECRET)
+ verda = VerdaClient(CLIENT_ID, CLIENT_SECRET)
# Get some volume type constants
- NVMe = datacrunch.constants.volume_types.NVMe
- HDD = datacrunch.constants.volume_types.HDD
+ NVMe = verda.constants.volume_types.NVMe
+ HDD = verda.constants.volume_types.HDD
EXISTING_OS_VOLUME_ID = '81e45bf0-5da2-412b-97d7-c20a7564fca0'
EXAMPLE_VOLUME_ID = '225dde24-ae44-4787-9224-2b9f56f44394'
EXAMPLE_INSTANCE_ID = '1eeabba4-caf7-4b4a-9143-0107034cc7f5'
# Get all SSH keys
- ssh_keys = datacrunch.ssh_keys.get()
+ ssh_keys = verda.ssh_keys.get()
ssh_keys_ids = list(map(lambda ssh_key: ssh_key.id, ssh_keys))
# Create instance with extra attached volumes
- instance_with_extra_volumes = datacrunch.instances.create(instance_type='1V100.6V',
- image='ubuntu-24.04-cuda-12.8-open-docker',
- ssh_key_ids=ssh_keys,
- hostname='example',
- description='example instance',
- volumes=[
- {"type": HDD, "name": "volume-1", "size": 95},
- {"type": NVMe, "name": "volume-2", "size": 95}
- ])
+ instance_with_extra_volumes = verda.instances.create(instance_type='1V100.6V',
+ image='ubuntu-24.04-cuda-12.8-open-docker',
+ ssh_key_ids=ssh_keys,
+ hostname='example',
+ description='example instance',
+ volumes=[
+ {"type": HDD, "name": "volume-1", "size": 95},
+ {"type": NVMe, "name": "volume-2", "size": 95},
+ ])
# Create instance with custom OS volume size and name
- instance_with_custom_os_volume = datacrunch.instances.create(instance_type='1V100.6V',
- image='ubuntu-24.04-cuda-12.8-open-docker',
- ssh_key_ids=ssh_keys,
- hostname='example',
- description='example instance',
- os_volume={
- "name": "OS volume",
- "size": 95
- })
+ instance_with_custom_os_volume = verda.instances.create(instance_type='1V100.6V',
+ image='ubuntu-24.04-cuda-12.8-open-docker',
+ ssh_key_ids=ssh_keys,
+ hostname='example',
+ description='example instance',
+ os_volume={
+ "name": "OS volume",
+ "size": 95,
+ })
# Create instance with existing OS volume as an image
- instance_with_existing_os_volume = datacrunch.instances.create(instance_type='1V100.6V',
- image=EXISTING_OS_VOLUME_ID,
- ssh_key_ids=ssh_keys,
- hostname='example',
- description='example instance')
+ instance_with_existing_os_volume = verda.instances.create(instance_type='1V100.6V',
+ image=EXISTING_OS_VOLUME_ID,
+ ssh_key_ids=ssh_keys,
+ hostname='example',
+ description='example instance')
# Delete instance AND OS volume (the rest of the volumes would be detached)
- datacrunch.instances.action(instance_id=EXAMPLE_INSTANCE_ID,
- action=datacrunch.constants.instance_actions.DELETE)
+ verda.instances.action(instance_id=EXAMPLE_INSTANCE_ID,
+ action=verda.constants.instance_actions.DELETE)
# Delete instance WITHOUT deleting the OS volume (will detach all volumes of the instance)
- datacrunch.instances.action(instance_id=EXAMPLE_INSTANCE_ID,
- action=datacrunch.constants.instance_actions.DELETE,
- volume_ids=[])
+ verda.instances.action(instance_id=EXAMPLE_INSTANCE_ID,
+ action=verda.constants.instance_actions.DELETE,
+ volume_ids=[])
# Delete instance and one of it's volumes (will delete one volume, detach the rest)
- datacrunch.instances.action(instance_id=EXAMPLE_INSTANCE_ID,
- action=datacrunch.constants.instance_actions.DELETE,
- volume_ids=[EXAMPLE_VOLUME_ID])
+ verda.instances.action(instance_id=EXAMPLE_INSTANCE_ID,
+ action=verda.constants.instance_actions.DELETE,
+ volume_ids=[EXAMPLE_VOLUME_ID])
diff --git a/docs/source/examples/simple_create_instance.rst b/docs/source/examples/simple_create_instance.rst
index c845321..4ef52ac 100644
--- a/docs/source/examples/simple_create_instance.rst
+++ b/docs/source/examples/simple_create_instance.rst
@@ -4,26 +4,26 @@ Simple Create Instance
.. code-block:: python
import os
- from datacrunch import DataCrunchClient
+ from verda import VerdaClient
# Get client secret from environment variable
- CLIENT_SECRET = os.environ['DATACRUNCH_CLIENT_SECRET']
+ CLIENT_SECRET = os.environ['VERDA_CLIENT_SECRET']
CLIENT_ID = 'Ibk5bdxV64lKAWOqYnvSi' # Replace with your client ID
# Create datcrunch client
- datacrunch = DataCrunchClient(CLIENT_ID, CLIENT_SECRET)
+ verda = VerdaClient(CLIENT_ID, CLIENT_SECRET)
# Get all SSH keys id's
- ssh_keys = datacrunch.ssh_keys.get()
+ ssh_keys = verda.ssh_keys.get()
ssh_keys_ids = list(map(lambda ssh_key: ssh_key.id, ssh_keys))
# Create a new instance
- instance = datacrunch.instances.create(instance_type='1V100.6V',
- image='ubuntu-24.04-cuda-12.8-open-docker',
- ssh_key_ids=ssh_keys_ids,
- hostname='example',
- description='example instance')
+ instance = verda.instances.create(instance_type='1V100.6V',
+ image='ubuntu-24.04-cuda-12.8-open-docker',
+ ssh_key_ids=ssh_keys_ids,
+ hostname='example',
+ description='example instance')
# Delete instance
- datacrunch.instances.action(instance.id, datacrunch.actions.DELETE)
+ verda.instances.action(instance.id, verda.actions.DELETE)
\ No newline at end of file
diff --git a/docs/source/examples/ssh_keys.rst b/docs/source/examples/ssh_keys.rst
index ddb1f13..82cc2ca 100644
--- a/docs/source/examples/ssh_keys.rst
+++ b/docs/source/examples/ssh_keys.rst
@@ -4,18 +4,18 @@ SSH Keys
.. code-block:: python
import os
- from datacrunch import DataCrunchClient
+ from verda import VerdaClient
# Get client secret from environment variable
- CLIENT_SECRET = os.environ['DATACRUNCH_CLIENT_SECRET']
+ CLIENT_SECRET = os.environ['VERDA_CLIENT_SECRET']
CLIENT_ID = 'Ibk5bdxV64lKAWOqYnvSi' # Replace with your client ID
# Create datcrunch client
- datacrunch = DataCrunchClient(CLIENT_ID, CLIENT_SECRET)
+ verda = VerdaClient(CLIENT_ID, CLIENT_SECRET)
# Create new SSH key
public_key = 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAI0qq2Qjt5GPi7DKdcnBHOkvk8xNsG9dA607tnWagOkHC test_key'
- ssh_key = datacrunch.ssh_keys.create('my test key', public_key)
+ ssh_key = verda.ssh_keys.create('my test key', public_key)
# Print new key id, name, public key
print(ssh_key.id)
@@ -23,10 +23,10 @@ SSH Keys
print(ssh_key.public_key)
# Get all keys
- all_ssh_keys = datacrunch.ssh_keys.get()
+ all_ssh_keys = verda.ssh_keys.get()
# Get single key by id
- some_ssh_key = datacrunch.ssh_keys.get_by_id(ssh_key.id)
+ some_ssh_key = verda.ssh_keys.get_by_id(ssh_key.id)
# Delete ssh key by id
- datacrunch.ssh_keys.delete_by_id(ssh_key.id)
+ verda.ssh_keys.delete_by_id(ssh_key.id)
diff --git a/docs/source/examples/startup_scripts.rst b/docs/source/examples/startup_scripts.rst
index 1f0f9b2..96db370 100644
--- a/docs/source/examples/startup_scripts.rst
+++ b/docs/source/examples/startup_scripts.rst
@@ -4,14 +4,14 @@ Startup Scripts
.. code-block:: python
import os
- from datacrunch import DataCrunchClient
+ from verda import VerdaClient
# Get client secret from environment variable
- CLIENT_SECRET = os.environ['DATACRUNCH_CLIENT_SECRET']
+ CLIENT_SECRET = os.environ['VERDA_CLIENT_SECRET']
CLIENT_ID = 'Ibk5bdxV64lKAWOqYnvSi' # Replace with your client ID
# Create datcrunch client
- datacrunch = DataCrunchClient(CLIENT_ID, CLIENT_SECRET)
+ verda = VerdaClient(CLIENT_ID, CLIENT_SECRET)
# Create new startup script
bash_script = """echo this is a test script for serious cat business
@@ -22,7 +22,7 @@ Startup Scripts
# download a cat picture
curl https://http.cat/200 --output cat.jpg
"""
- script = datacrunch.startup_scripts.create("catty businness", bash_script)
+ script = verda.startup_scripts.create("catty businness", bash_script)
# Print new startup script id, name, script code
print(script.id)
@@ -30,10 +30,10 @@ Startup Scripts
print(script.script)
# Get all startup scripts
- all_scripts = datacrunch.startup_scripts.get()
+ all_scripts = verda.startup_scripts.get()
# Get a single startup script by id
- some_script = datacrunch.startup_scripts.get_by_id(script.id)
+ some_script = verda.startup_scripts.get_by_id(script.id)
# Delete startup script by id
- datacrunch.startup_scripts.delete_by_id(script.id)
+ verda.startup_scripts.delete_by_id(script.id)
diff --git a/docs/source/examples/storage_volumes.rst b/docs/source/examples/storage_volumes.rst
index c71eb75..7377a24 100644
--- a/docs/source/examples/storage_volumes.rst
+++ b/docs/source/examples/storage_volumes.rst
@@ -4,39 +4,39 @@ Storage Volumes
.. code-block:: python
import os
- from datacrunch import DataCrunchClient
+ from verda import VerdaClient
# Get client secret from environment variable
- CLIENT_SECRET = os.environ['DATACRUNCH_CLIENT_SECRET']
+ CLIENT_SECRET = os.environ['VERDA_CLIENT_SECRET']
CLIENT_ID = 'Ibk5bdxV64lKAWOqYnvSi' # Replace with your client ID
# Create datcrunch client
- datacrunch = DataCrunchClient(CLIENT_ID, CLIENT_SECRET)
+ verda = VerdaClient(CLIENT_ID, CLIENT_SECRET)
# Get some volume type constants
- NVMe = datacrunch.constants.volume_types.NVMe
- HDD = datacrunch.constants.volume_types.HDD
+ NVMe = verda.constants.volume_types.NVMe
+ HDD = verda.constants.volume_types.HDD
# Example instance id
INSTANCE_ID = '8705bb38-2574-454f-9967-d18b130bf5ee'
# Get all volumes
- all_volumes = datacrunch.volumes.get()
+ all_volumes = verda.volumes.get()
# Get all attached volumes
- all_attached_volumes = datacrunch.volumes.get(status=datacrunch.constants.volume_status.ATTACHED)
+ all_attached_volumes = verda.volumes.get(status=verda.constants.volume_status.ATTACHED)
# Get volume by id
- random_volume = datacrunch.volumes.get_by_id("0c41e387-3dd8-495f-a285-e861527f2f3d")
+ random_volume = verda.volumes.get_by_id("0c41e387-3dd8-495f-a285-e861527f2f3d")
# Create a 200 GB detached NVMe volume
- nvme_volume = datacrunch.volumes.create(type=NVMe,
+ nvme_volume = verda.volumes.create(type=NVMe,
name="data-storage-1",
size=200)
# Create a 500 GB HDD volume and attach it to an existing shutdown instance
# Note: If the instance isn't shutdown an exception would be raised
- hdd_volume = datacrunch.volumes.create(type=HDD,
+ hdd_volume = verda.volumes.create(type=HDD,
name="data-storage-2",
size=500,
instance_id=INSTANCE_ID)
@@ -45,16 +45,16 @@ Storage Volumes
hdd_volume_id = hdd_volume.id
# attach the nvme volume to the instance
- datacrunch.volumes.attach(nvme_volume_id, INSTANCE_ID)
+ verda.volumes.attach(nvme_volume_id, INSTANCE_ID)
# detach both volumes from the instance
- datacrunch.volumes.detach([nvme_volume_id, hdd_volume_id])
+ verda.volumes.detach([nvme_volume_id, hdd_volume_id])
# rename volume
- datacrunch.volumes.rename(nvme_volume_id, "new-name")
+ verda.volumes.rename(nvme_volume_id, "new-name")
# increase volume size
- datacrunch.volumes.increase_size(nvme_volume_id, 300)
+ verda.volumes.increase_size(nvme_volume_id, 300)
# delete volumes
- datacrunch.volumes.delete([nvme_volume_id, hdd_volume_id])
+ verda.volumes.delete([nvme_volume_id, hdd_volume_id])
diff --git a/docs/source/index.rst b/docs/source/index.rst
index 33ddc67..025fc1f 100644
--- a/docs/source/index.rst
+++ b/docs/source/index.rst
@@ -3,59 +3,59 @@
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
-DataCrunch Python SDK
-=====================
+Verda Python SDK
+================
-Welcome to the documentation for the official DataCrunch Python SDK.
+Welcome to the documentation for the official Verda (formerly Datacrunch) Python SDK.
-The Public API documentation is `available here `_
+The Public API documentation is `available here `_
-The Python SDK is open-sourced and can be `found here `_
+The Python SDK is open-sourced and can be `found here `_
Basic Examples:
---------------
-First, get your client credentials - `instructions available here `_.
+First, get your client credentials - `instructions available here `_.
Deploy a new instance:
.. code-block:: python
import os
- from datacrunch import DataCrunchClient
+ from verda import VerdaClient
# Get client secret from environment variable
- CLIENT_SECRET = os.environ['DATACRUNCH_CLIENT_SECRET']
+ CLIENT_SECRET = os.environ['VERDA_CLIENT_SECRET']
CLIENT_ID = 'Ibk5bdxV64lKAWOqYnvSi' # Replace with your client ID
- # Create datcrunch client
- datacrunch = DataCrunchClient(CLIENT_ID, CLIENT_SECRET)
+ # Create client
+ verda = VerdaClient(CLIENT_ID, CLIENT_SECRET)
# Get all SSH keys id's
- ssh_keys = datacrunch.ssh_keys.get()
+ ssh_keys = verda.ssh_keys.get()
ssh_keys_ids = list(map(lambda ssh_key: ssh_key.id, ssh_keys))
# Create a new instance
- instance = datacrunch.instances.create(instance_type='1V100.6V',
- image='ubuntu-24.04-cuda-12.8-open-docker',
- ssh_key_ids=ssh_keys_ids,
- hostname='example',
- description='example instance')
+ instance = verda.instances.create(instance_type='1V100.6V',
+ image='ubuntu-24.04-cuda-12.8-open-docker',
+ ssh_key_ids=ssh_keys_ids,
+ hostname='example',
+ description='example instance')
List all existing instances, ssh keys, startup scripts:
.. code-block:: python
- instances = datacrunch.instances.get()
- keys = datacrunch.ssh_keys.get()
- scripts = datacrunch.startup_scripts.get()
+ instances = verda.instances.get()
+ keys = verda.ssh_keys.get()
+ scripts = verda.startup_scripts.get()
List all available instance & image types (information about available os images and instances to deploy)
.. code-block:: python
- instance_types = datacrunch.instance_types.get()
- images_types = datacrunch.images.get()
+ instance_types = verda.instance_types.get()
+ images_types = verda.images.get()
.. toctree::
:maxdepth: 4
diff --git a/docs/source/installation.rst b/docs/source/installation.rst
index e0d04e6..869c2ef 100644
--- a/docs/source/installation.rst
+++ b/docs/source/installation.rst
@@ -6,14 +6,14 @@ Stable Release
Use `pip (Package manager for python) `_ to install the latest stable release::
- pip3 install datacrunch
+ pip3 install verda
Install from source
-------------------
-To install the package from the source files, first download them from the `GitHub repository `_::
+To install the package from the source files, first download them from the `GitHub repository `_::
- git clone https://github.com/DataCrunch-io/datacrunch-python.git
+ git clone https://github.com/verda-cloud/sdk-python.git
Then install it by::
diff --git a/examples/advanced_create_instance.py b/examples/advanced_create_instance.py
index 77f164a..8a0f35a 100644
--- a/examples/advanced_create_instance.py
+++ b/examples/advanced_create_instance.py
@@ -1,7 +1,7 @@
import os
-from datacrunch import DataCrunchClient
-from datacrunch.exceptions import APIException
+from verda import VerdaClient
+from verda.exceptions import APIException
"""
In this hypothetical example, we check if we have enough balance
@@ -9,7 +9,7 @@
If there's not enough balance, we deploy a 4V100.20V instance.
This example uses the balance service to check the current balance,
-the instace_types service to check instance type details (price per hour)
+the instance_types service to check instance type details (price per hour)
We also perform other basic tasks such as creating the client and adding a new SSH key.
"""
@@ -22,29 +22,29 @@
DURATION = 24 * 7 # one week
# Get client secret and id from environment variables
-DATACRUNCH_CLIENT_ID = os.environ.get('DATACRUNCH_CLIENT_ID')
-DATACRUNCH_CLIENT_SECRET = os.environ.get('DATACRUNCH_CLIENT_SECRET')
+CLIENT_ID = os.environ.get('VERDA_CLIENT_ID')
+CLIENT_SECRET = os.environ.get('VERDA_CLIENT_SECRET')
try:
- # Create datcrunch client
- datacrunch = DataCrunchClient(DATACRUNCH_CLIENT_ID, DATACRUNCH_CLIENT_SECRET)
+ # Create client
+ verda = VerdaClient(CLIENT_ID, CLIENT_SECRET)
# Create new SSH key
public_key = (
'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAI0qq2Qjt5GPi7DKdcnBHOkvk8xNsG9dA607tnWagOkHC test_key'
)
- ssh_key = datacrunch.ssh_keys.create('my test key', public_key)
+ ssh_key = verda.ssh_keys.create('my test key', public_key)
# Get all SSH keys
- ssh_keys = datacrunch.ssh_keys.get()
+ ssh_keys = verda.ssh_keys.get()
ssh_keys_ids = [ssh_key.id for ssh_key in ssh_keys]
# Get our current balance
- balance = datacrunch.balance.get()
+ balance = verda.balance.get()
print(balance.amount)
# Get instance types
- instance_types = datacrunch.instance_types.get()
+ instance_types = verda.instance_types.get()
# Deploy 8V instance if enough balance for a week, otherwise deploy a 4V
for instance_details in instance_types:
@@ -53,7 +53,7 @@
if price_per_hour * DURATION < balance.amount:
# Deploy a new 8V instance
- instance = datacrunch.instances.create(
+ instance = verda.instances.create(
instance_type=INSTANCE_TYPE_8V,
image='ubuntu-22.04-cuda-12.0-docker',
ssh_key_ids=ssh_keys_ids,
@@ -63,7 +63,7 @@
)
else:
# Deploy a new 4V instance
- instance = datacrunch.instances.create(
+ instance = verda.instances.create(
instance_type=INSTANCE_TYPE_4V,
image='ubuntu-22.04-cuda-12.0-docker',
ssh_key_ids=ssh_keys_ids,
diff --git a/examples/containers/calling_the_endpoint_asynchronously.py b/examples/containers/calling_the_endpoint_asynchronously.py
index 28a8bcb..12cb588 100644
--- a/examples/containers/calling_the_endpoint_asynchronously.py
+++ b/examples/containers/calling_the_endpoint_asynchronously.py
@@ -1,26 +1,26 @@
import os
from time import sleep
-from datacrunch import DataCrunchClient
-from datacrunch.InferenceClient.inference_client import AsyncStatus
+from verda import VerdaClient
+from verda.InferenceClient.inference_client import AsyncStatus
# Configuration - replace with your deployment name
-DEPLOYMENT_NAME = os.environ.get('DATACRUNCH_DEPLOYMENT_NAME')
+DEPLOYMENT_NAME = os.environ.get('VERDA_DEPLOYMENT_NAME')
# Get client secret and id from environment variables
-DATACRUNCH_CLIENT_ID = os.environ.get('DATACRUNCH_CLIENT_ID')
-DATACRUNCH_CLIENT_SECRET = os.environ.get('DATACRUNCH_CLIENT_SECRET')
-DATACRUNCH_INFERENCE_KEY = os.environ.get('DATACRUNCH_INFERENCE_KEY')
-
-# DataCrunch client instance
-datacrunch = DataCrunchClient(
- DATACRUNCH_CLIENT_ID,
- DATACRUNCH_CLIENT_SECRET,
- inference_key=DATACRUNCH_INFERENCE_KEY,
+CLIENT_ID = os.environ.get('VERDA_CLIENT_ID')
+CLIENT_SECRET = os.environ.get('VERDA_CLIENT_SECRET')
+INFERENCE_KEY = os.environ.get('VERDA_INFERENCE_KEY')
+
+# Verda client instance
+verda = VerdaClient(
+ CLIENT_ID,
+ CLIENT_SECRET,
+ inference_key=INFERENCE_KEY,
)
# Get the deployment
-deployment = datacrunch.containers.get_deployment_by_name(DEPLOYMENT_NAME)
+deployment = verda.containers.get_deployment_by_name(DEPLOYMENT_NAME)
# Make an asynchronous request to the endpoint.
# This example demonstrates calling a SGLang deployment which serves LLMs using an OpenAI-compatible API format
diff --git a/examples/containers/calling_the_endpoint_synchronously.py b/examples/containers/calling_the_endpoint_synchronously.py
index 28443ca..0f8ee6b 100644
--- a/examples/containers/calling_the_endpoint_synchronously.py
+++ b/examples/containers/calling_the_endpoint_synchronously.py
@@ -1,24 +1,24 @@
import os
-from datacrunch import DataCrunchClient
+from verda import VerdaClient
# Configuration - replace with your deployment name
-DEPLOYMENT_NAME = os.environ.get('DATACRUNCH_DEPLOYMENT_NAME')
+DEPLOYMENT_NAME = os.environ.get('VERDA_DEPLOYMENT_NAME')
# Get client secret and id from environment variables
-DATACRUNCH_CLIENT_ID = os.environ.get('DATACRUNCH_CLIENT_ID')
-DATACRUNCH_CLIENT_SECRET = os.environ.get('DATACRUNCH_CLIENT_SECRET')
-DATACRUNCH_INFERENCE_KEY = os.environ.get('DATACRUNCH_INFERENCE_KEY')
+CLIENT_ID = os.environ.get('VERDA_CLIENT_ID')
+CLIENT_SECRET = os.environ.get('VERDA_CLIENT_SECRET')
+INFERENCE_KEY = os.environ.get('VERDA_INFERENCE_KEY')
-# DataCrunch client instance
-datacrunch = DataCrunchClient(
- DATACRUNCH_CLIENT_ID,
- DATACRUNCH_CLIENT_SECRET,
- inference_key=DATACRUNCH_INFERENCE_KEY,
+# Verda client instance
+verda = VerdaClient(
+ CLIENT_ID,
+ CLIENT_SECRET,
+ inference_key=INFERENCE_KEY,
)
# Get the deployment
-deployment = datacrunch.containers.get_deployment_by_name(DEPLOYMENT_NAME)
+deployment = verda.containers.get_deployment_by_name(DEPLOYMENT_NAME)
# Make a synchronous request to the endpoint.
# This example demonstrates calling a SGLang deployment which serves LLMs using an OpenAI-compatible API format
diff --git a/examples/containers/calling_the_endpoint_with_inference_key.py b/examples/containers/calling_the_endpoint_with_inference_key.py
index e6bc52f..e34820e 100644
--- a/examples/containers/calling_the_endpoint_with_inference_key.py
+++ b/examples/containers/calling_the_endpoint_with_inference_key.py
@@ -1,15 +1,15 @@
import os
-from datacrunch.InferenceClient import InferenceClient
+from verda.InferenceClient import InferenceClient
# Get inference key and endpoint base url from environment variables
-DATACRUNCH_INFERENCE_KEY = os.environ.get('DATACRUNCH_INFERENCE_KEY')
-DATACRUNCH_ENDPOINT_BASE_URL = os.environ.get('DATACRUNCH_ENDPOINT_BASE_URL')
+INFERENCE_KEY = os.environ.get('VERDA_INFERENCE_KEY')
+BASE_URL = os.environ.get('VERDA_BASE_URL')
# Create an inference client that uses only the inference key, without client credentials
inference_client = InferenceClient(
- inference_key=DATACRUNCH_INFERENCE_KEY,
- endpoint_base_url=DATACRUNCH_ENDPOINT_BASE_URL,
+ inference_key=INFERENCE_KEY,
+ endpoint_base_url=BASE_URL,
)
# Make a synchronous request to the endpoint.
diff --git a/examples/containers/calling_the_endpoint_with_inference_key_async.py b/examples/containers/calling_the_endpoint_with_inference_key_async.py
index c23eca1..d4da15f 100644
--- a/examples/containers/calling_the_endpoint_with_inference_key_async.py
+++ b/examples/containers/calling_the_endpoint_with_inference_key_async.py
@@ -1,17 +1,17 @@
import os
from time import sleep
-from datacrunch.InferenceClient import InferenceClient
-from datacrunch.InferenceClient.inference_client import AsyncStatus
+from verda.InferenceClient import InferenceClient
+from verda.InferenceClient.inference_client import AsyncStatus
# Get inference key and endpoint base url from environment variables
-DATACRUNCH_INFERENCE_KEY = os.environ.get('DATACRUNCH_INFERENCE_KEY')
-DATACRUNCH_ENDPOINT_BASE_URL = os.environ.get('DATACRUNCH_ENDPOINT_BASE_URL')
+INFERENCE_KEY = os.environ.get('VERDA_INFERENCE_KEY')
+BASE_URL = os.environ.get('VERDA_BASE_URL')
# Create an inference client that uses only the inference key, without client credentials
inference_client = InferenceClient(
- inference_key=DATACRUNCH_INFERENCE_KEY,
- endpoint_base_url=DATACRUNCH_ENDPOINT_BASE_URL,
+ inference_key=INFERENCE_KEY,
+ endpoint_base_url=BASE_URL,
)
# Make an asynchronous request to the endpoint
diff --git a/examples/containers/compute_resources_example.py b/examples/containers/compute_resources_example.py
index 219e523..9bedf8c 100644
--- a/examples/containers/compute_resources_example.py
+++ b/examples/containers/compute_resources_example.py
@@ -1,28 +1,28 @@
import os
-from datacrunch import DataCrunchClient
+from verda import VerdaClient
# Get client secret and id from environment variables
-DATACRUNCH_CLIENT_ID = os.environ.get('DATACRUNCH_CLIENT_ID')
-DATACRUNCH_CLIENT_SECRET = os.environ.get('DATACRUNCH_CLIENT_SECRET')
+CLIENT_ID = os.environ.get('VERDA_CLIENT_ID')
+CLIENT_SECRET = os.environ.get('VERDA_CLIENT_SECRET')
# Initialize the client with your credentials
-datacrunch = DataCrunchClient(DATACRUNCH_CLIENT_ID, DATACRUNCH_CLIENT_SECRET)
+verda = VerdaClient(CLIENT_ID, CLIENT_SECRET)
# Example 1: List all compute resources
print('All compute resources:')
-all_resources = datacrunch.containers.get_compute_resources()
+all_resources = verda.containers.get_compute_resources()
for resource in all_resources:
print(f'Name: {resource.name}, Size: {resource.size}, Available: {resource.is_available}')
# Example 2: List available compute resources
print('\nAvailable compute resources:')
-available_resources = datacrunch.containers.get_compute_resources(is_available=True)
+available_resources = verda.containers.get_compute_resources(is_available=True)
for resource in available_resources:
print(f'Name: {resource.name}, Size: {resource.size}')
# Example 3: List compute resources of size 8
print('\nCompute resources with size 8:')
-size_8_resources = datacrunch.containers.get_compute_resources(size=8)
+size_8_resources = verda.containers.get_compute_resources(size=8)
for resource in size_8_resources:
print(f'Name: {resource.name}, Available: {resource.is_available}')
diff --git a/examples/containers/container_deployments_example.py b/examples/containers/container_deployments_example.py
index e5404b8..088f4bc 100644
--- a/examples/containers/container_deployments_example.py
+++ b/examples/containers/container_deployments_example.py
@@ -1,4 +1,4 @@
-"""Example script demonstrating container deployment management using the DataCrunch API.
+"""Example script demonstrating container deployment management using the Verda API.
This script provides a comprehensive example of container deployment lifecycle,
including creation, monitoring, scaling, and cleanup.
@@ -7,8 +7,8 @@
import os
import time
-from datacrunch import DataCrunchClient
-from datacrunch.containers import (
+from verda import VerdaClient
+from verda.containers import (
ComputeResource,
Container,
ContainerDeploymentStatus,
@@ -26,22 +26,22 @@
SharedFileSystemMount,
UtilizationScalingTrigger,
)
-from datacrunch.exceptions import APIException
+from verda.exceptions import APIException
# Configuration constants
DEPLOYMENT_NAME = 'my-deployment'
IMAGE_NAME = 'your-image-name:version'
# Get client secret and id from environment variables
-DATACRUNCH_CLIENT_ID = os.environ.get('DATACRUNCH_CLIENT_ID')
-DATACRUNCH_CLIENT_SECRET = os.environ.get('DATACRUNCH_CLIENT_SECRET')
+CLIENT_ID = os.environ.get('VERDA_CLIENT_ID')
+CLIENT_SECRET = os.environ.get('VERDA_CLIENT_SECRET')
-# DataCrunch client instance
-datacrunch = None
+# Verda client instance
+verda = None
def wait_for_deployment_health(
- client: DataCrunchClient,
+ client: VerdaClient,
deployment_name: str,
max_attempts: int = 10,
delay: int = 30,
@@ -49,7 +49,7 @@ def wait_for_deployment_health(
"""Wait for deployment to reach healthy status.
Args:
- client: DataCrunch API client
+ client: Verda API client
deployment_name: Name of the deployment to check
max_attempts: Maximum number of status checks
delay: Delay between checks in seconds
@@ -70,11 +70,11 @@ def wait_for_deployment_health(
return False
-def cleanup_resources(client: DataCrunchClient) -> None:
+def cleanup_resources(client: VerdaClient) -> None:
"""Clean up all created resources.
Args:
- client: DataCrunch API client
+ client: Verda API client
"""
try:
# Delete deployment
@@ -88,8 +88,8 @@ def main() -> None:
"""Main function demonstrating deployment lifecycle management."""
try:
# Initialize client
- global datacrunch
- datacrunch = DataCrunchClient(DATACRUNCH_CLIENT_ID, DATACRUNCH_CLIENT_SECRET)
+ global verda
+ verda = VerdaClient(CLIENT_ID, CLIENT_SECRET)
# Create container configuration
container = Container(
@@ -153,18 +153,18 @@ def main() -> None:
)
# Create the deployment
- created_deployment = datacrunch.containers.create_deployment(deployment)
+ created_deployment = verda.containers.create_deployment(deployment)
print(f'Created deployment: {created_deployment.name}')
# Wait for deployment to be healthy
- if not wait_for_deployment_health(datacrunch, DEPLOYMENT_NAME):
+ if not wait_for_deployment_health(verda, DEPLOYMENT_NAME):
print('Deployment health check failed')
- cleanup_resources(datacrunch)
+ cleanup_resources(verda)
return
# Update scaling configuration
try:
- deployment = datacrunch.containers.get_deployment_by_name(DEPLOYMENT_NAME)
+ deployment = verda.containers.get_deployment_by_name(DEPLOYMENT_NAME)
# Create new scaling options with increased replica counts
deployment.scaling = ScalingOptions(
min_replica_count=2,
@@ -179,9 +179,7 @@ def main() -> None:
gpu_utilization=UtilizationScalingTrigger(enabled=True, threshold=80),
),
)
- updated_deployment = datacrunch.containers.update_deployment(
- DEPLOYMENT_NAME, deployment
- )
+ updated_deployment = verda.containers.update_deployment(DEPLOYMENT_NAME, deployment)
print(f'Updated deployment scaling: {updated_deployment.name}')
except APIException as e:
print(f'Error updating scaling options: {e}')
@@ -189,32 +187,32 @@ def main() -> None:
# Demonstrate deployment operations
try:
# Pause deployment
- datacrunch.containers.pause_deployment(DEPLOYMENT_NAME)
+ verda.containers.pause_deployment(DEPLOYMENT_NAME)
print('Deployment paused')
time.sleep(60)
# Resume deployment
- datacrunch.containers.resume_deployment(DEPLOYMENT_NAME)
+ verda.containers.resume_deployment(DEPLOYMENT_NAME)
print('Deployment resumed')
# Restart deployment
- datacrunch.containers.restart_deployment(DEPLOYMENT_NAME)
+ verda.containers.restart_deployment(DEPLOYMENT_NAME)
print('Deployment restarted')
# Purge queue
- datacrunch.containers.purge_deployment_queue(DEPLOYMENT_NAME)
+ verda.containers.purge_deployment_queue(DEPLOYMENT_NAME)
print('Queue purged')
except APIException as e:
print(f'Error in deployment operations: {e}')
# Clean up
- cleanup_resources(datacrunch)
+ cleanup_resources(verda)
except Exception as e:
print(f'Unexpected error: {e}')
# Attempt cleanup even if there was an error
try:
- cleanup_resources(datacrunch)
+ cleanup_resources(verda)
except Exception as cleanup_error:
print(f'Error during cleanup after failure: {cleanup_error}')
diff --git a/examples/containers/delete_deployment_example.py b/examples/containers/delete_deployment_example.py
index f135aed..0c3086d 100644
--- a/examples/containers/delete_deployment_example.py
+++ b/examples/containers/delete_deployment_example.py
@@ -1,18 +1,18 @@
-"""Example script demonstrating deleting a deployment using the DataCrunch API."""
+"""Example script demonstrating deleting a deployment using the Verda API."""
import os
-from datacrunch import DataCrunchClient
+from verda import VerdaClient
DEPLOYMENT_NAME = 'sglang-deployment-example-20250411-160652'
# Get confidential values from environment variables
-DATACRUNCH_CLIENT_ID = os.environ.get('DATACRUNCH_CLIENT_ID')
-DATACRUNCH_CLIENT_SECRET = os.environ.get('DATACRUNCH_CLIENT_SECRET')
+CLIENT_ID = os.environ.get('VERDA_CLIENT_ID')
+CLIENT_SECRET = os.environ.get('VERDA_CLIENT_SECRET')
# Initialize client with inference key
-datacrunch = DataCrunchClient(DATACRUNCH_CLIENT_ID, DATACRUNCH_CLIENT_SECRET)
+verda = VerdaClient(CLIENT_ID, CLIENT_SECRET)
# Register signal handlers for cleanup
-datacrunch.containers.delete_deployment(DEPLOYMENT_NAME)
+verda.containers.delete_deployment(DEPLOYMENT_NAME)
print('Deployment deleted')
diff --git a/examples/containers/environment_variables_example.py b/examples/containers/environment_variables_example.py
index 69dd5ba..9638bf8 100644
--- a/examples/containers/environment_variables_example.py
+++ b/examples/containers/environment_variables_example.py
@@ -9,15 +9,15 @@
import os
-from datacrunch import DataCrunchClient
-from datacrunch.containers import EnvVar, EnvVarType
+from verda import VerdaClient
+from verda.containers import EnvVar, EnvVarType
# Get client secret and id from environment variables
-DATACRUNCH_CLIENT_ID = os.environ.get('DATACRUNCH_CLIENT_ID')
-DATACRUNCH_CLIENT_SECRET = os.environ.get('DATACRUNCH_CLIENT_SECRET')
+CLIENT_ID = os.environ.get('VERDA_CLIENT_ID')
+CLIENT_SECRET = os.environ.get('VERDA_CLIENT_SECRET')
-# Initialize DataCrunch client
-datacrunch = DataCrunchClient(DATACRUNCH_CLIENT_ID, DATACRUNCH_CLIENT_SECRET)
+# Initialize Verda client
+verda = VerdaClient(CLIENT_ID, CLIENT_SECRET)
# Example deployment and container names
DEPLOYMENT_NAME = 'my-deployment'
@@ -36,12 +36,12 @@ def print_env_vars(env_vars: dict[str, list[EnvVar]]) -> None:
def main():
# First, let's get the current environment variables
print('Getting current environment variables...')
- env_vars = datacrunch.containers.get_deployment_environment_variables(DEPLOYMENT_NAME)
+ env_vars = verda.containers.get_deployment_environment_variables(DEPLOYMENT_NAME)
print_env_vars(env_vars)
# Create a new secret
secret_name = 'my-secret-key'
- datacrunch.containers.create_secret(secret_name, 'my-secret-value')
+ verda.containers.create_secret(secret_name, 'my-secret-value')
# Add new environment variables
print('\nAdding new environment variables...')
@@ -54,7 +54,7 @@ def main():
EnvVar(name='DEBUG', value_or_reference_to_secret='true', type=EnvVarType.PLAIN),
]
- env_vars = datacrunch.containers.add_deployment_environment_variables(
+ env_vars = verda.containers.add_deployment_environment_variables(
deployment_name=DEPLOYMENT_NAME,
container_name=CONTAINER_NAME,
env_vars=new_env_vars,
@@ -67,7 +67,7 @@ def main():
EnvVar(name='DEBUG', value_or_reference_to_secret='false', type=EnvVarType.PLAIN),
]
- env_vars = datacrunch.containers.update_deployment_environment_variables(
+ env_vars = verda.containers.update_deployment_environment_variables(
deployment_name=DEPLOYMENT_NAME,
container_name=CONTAINER_NAME,
env_vars=updated_env_vars,
@@ -76,7 +76,7 @@ def main():
# Delete environment variables
print('\nDeleting environment variables...')
- env_vars = datacrunch.containers.delete_deployment_environment_variables(
+ env_vars = verda.containers.delete_deployment_environment_variables(
deployment_name=DEPLOYMENT_NAME,
container_name=CONTAINER_NAME,
env_var_names=['DEBUG'],
diff --git a/examples/containers/fileset_secret_example.py b/examples/containers/fileset_secret_example.py
index edb760f..b65956b 100644
--- a/examples/containers/fileset_secret_example.py
+++ b/examples/containers/fileset_secret_example.py
@@ -1,16 +1,16 @@
import os
-from datacrunch import DataCrunchClient
+from verda import VerdaClient
# Fileset secrets are a way to mount sensitive files like API keys, certs, and credentials securely inside a container, without hardcoding them in the image or env vars.
# This example demonstrates how to create a fileset secret containing two files from your local filesystem
# Get client secret and id from environment variables
-DATACRUNCH_CLIENT_ID = os.environ.get('DATACRUNCH_CLIENT_ID')
-DATACRUNCH_CLIENT_SECRET = os.environ.get('DATACRUNCH_CLIENT_SECRET')
+CLIENT_ID = os.environ.get('VERDA_CLIENT_ID')
+CLIENT_SECRET = os.environ.get('VERDA_CLIENT_SECRET')
# Initialize the client with your credentials
-datacrunch = DataCrunchClient(DATACRUNCH_CLIENT_ID, DATACRUNCH_CLIENT_SECRET)
+verda = VerdaClient(CLIENT_ID, CLIENT_SECRET)
# Define the secret name and the file paths from your local filesystem where this script is running
SECRET_NAME = 'my-fileset-secret'
@@ -18,13 +18,13 @@
ABSOLUTE_FILE_PATH = '/home/username/absolute-path/file2.json'
# Create the fileset secret that has 2 files
-fileset_secret = datacrunch.containers.create_fileset_secret_from_file_paths(
+fileset_secret = verda.containers.create_fileset_secret_from_file_paths(
secret_name=SECRET_NAME, file_paths=[RELATIVE_FILE_PATH, ABSOLUTE_FILE_PATH]
)
# Get the secret
-secrets = datacrunch.containers.get_fileset_secrets()
+secrets = verda.containers.get_fileset_secrets()
print(secrets)
# Delete the secret
-datacrunch.containers.delete_fileset_secret(secret_name=SECRET_NAME)
+verda.containers.delete_fileset_secret(secret_name=SECRET_NAME)
diff --git a/examples/containers/registry_credentials_example.py b/examples/containers/registry_credentials_example.py
index 23179f3..d066aaa 100644
--- a/examples/containers/registry_credentials_example.py
+++ b/examples/containers/registry_credentials_example.py
@@ -1,7 +1,7 @@
import os
-from datacrunch import DataCrunchClient
-from datacrunch.containers import (
+from verda import VerdaClient
+from verda.containers import (
AWSECRCredentials,
CustomRegistryCredentials,
DockerHubCredentials,
@@ -10,11 +10,11 @@
)
# Get client secret and id from environment variables
-DATACRUNCH_CLIENT_ID = os.environ.get('DATACRUNCH_CLIENT_ID')
-DATACRUNCH_CLIENT_SECRET = os.environ.get('DATACRUNCH_CLIENT_SECRET')
+CLIENT_ID = os.environ.get('VERDA_CLIENT_ID')
+CLIENT_SECRET = os.environ.get('VERDA_CLIENT_SECRET')
-# Initialize DataCrunch client
-datacrunch = DataCrunchClient(DATACRUNCH_CLIENT_ID, DATACRUNCH_CLIENT_SECRET)
+# Initialize Verda client
+verda = VerdaClient(CLIENT_ID, CLIENT_SECRET)
# Example 1: DockerHub Credentials
dockerhub_creds = DockerHubCredentials(
@@ -22,7 +22,7 @@
username='your-dockerhub-username',
access_token='your-dockerhub-access-token',
)
-datacrunch.containers.add_registry_credentials(dockerhub_creds)
+verda.containers.add_registry_credentials(dockerhub_creds)
print('Created DockerHub credentials')
# Example 2: GitHub Container Registry Credentials
@@ -31,7 +31,7 @@
username='your-github-username',
access_token='your-github-token',
)
-datacrunch.containers.add_registry_credentials(github_creds)
+verda.containers.add_registry_credentials(github_creds)
print('Created GitHub credentials')
# Example 3: Google Container Registry (GCR) Credentials
@@ -50,7 +50,7 @@
}"""
gcr_creds = GCRCredentials(name='my-gcr-creds', service_account_key=gcr_service_account_key)
-datacrunch.containers.add_registry_credentials(gcr_creds)
+verda.containers.add_registry_credentials(gcr_creds)
print('Created GCR credentials')
# Example 4: AWS ECR Credentials
@@ -61,7 +61,7 @@
region='eu-north-1',
ecr_repo='887841266746.dkr.ecr.eu-north-1.amazonaws.com',
)
-datacrunch.containers.add_registry_credentials(aws_creds)
+verda.containers.add_registry_credentials(aws_creds)
print('Created AWS ECR credentials')
# Example 5: Custom Registry Credentials
@@ -76,12 +76,12 @@
custom_creds = CustomRegistryCredentials(
name='my-custom-registry-creds', docker_config_json=custom_docker_config
)
-datacrunch.containers.add_registry_credentials(custom_creds)
+verda.containers.add_registry_credentials(custom_creds)
print('Created Custom registry credentials')
# Delete all registry credentials
-datacrunch.containers.delete_registry_credentials('my-dockerhub-creds')
-datacrunch.containers.delete_registry_credentials('my-github-creds')
-datacrunch.containers.delete_registry_credentials('my-gcr-creds')
-datacrunch.containers.delete_registry_credentials('my-aws-ecr-creds')
-datacrunch.containers.delete_registry_credentials('my-custom-registry-creds')
+verda.containers.delete_registry_credentials('my-dockerhub-creds')
+verda.containers.delete_registry_credentials('my-github-creds')
+verda.containers.delete_registry_credentials('my-gcr-creds')
+verda.containers.delete_registry_credentials('my-aws-ecr-creds')
+verda.containers.delete_registry_credentials('my-custom-registry-creds')
diff --git a/examples/containers/secrets_example.py b/examples/containers/secrets_example.py
index 0418e7d..a8c2161 100644
--- a/examples/containers/secrets_example.py
+++ b/examples/containers/secrets_example.py
@@ -1,16 +1,16 @@
import os
-from datacrunch import DataCrunchClient
+from verda import VerdaClient
# Get client secret and id from environment variables
-DATACRUNCH_CLIENT_ID = os.environ.get('DATACRUNCH_CLIENT_ID')
-DATACRUNCH_CLIENT_SECRET = os.environ.get('DATACRUNCH_CLIENT_SECRET')
+CLIENT_ID = os.environ.get('VERDA_CLIENT_ID')
+CLIENT_SECRET = os.environ.get('VERDA_CLIENT_SECRET')
-# Initialize DataCrunch client
-datacrunch = DataCrunchClient(DATACRUNCH_CLIENT_ID, DATACRUNCH_CLIENT_SECRET)
+# Initialize Verda client
+verda = VerdaClient(CLIENT_ID, CLIENT_SECRET)
# List all secrets
-secrets = datacrunch.containers.get_secrets()
+secrets = verda.containers.get_secrets()
print('Available secrets:')
for secret in secrets:
print(f'- {secret.name} (created at: {secret.created_at})')
@@ -18,15 +18,15 @@
# Create a new secret
secret_name = 'my-api-key'
secret_value = 'super-secret-value'
-datacrunch.containers.create_secret(name=secret_name, value=secret_value)
+verda.containers.create_secret(name=secret_name, value=secret_value)
print(f'\nCreated new secret: {secret_name}')
# Delete a secret (with force=False by default)
-datacrunch.containers.delete_secret(secret_name)
+verda.containers.delete_secret(secret_name)
print(f'\nDeleted secret: {secret_name}')
# Delete a secret with force=True (will delete even if secret is in use)
secret_name = 'another-secret'
-datacrunch.containers.create_secret(name=secret_name, value=secret_value)
-datacrunch.containers.delete_secret(secret_name, force=True)
+verda.containers.create_secret(name=secret_name, value=secret_value)
+verda.containers.delete_secret(secret_name, force=True)
print(f'\nForce deleted secret: {secret_name}')
diff --git a/examples/containers/sglang_deployment_example.py b/examples/containers/sglang_deployment_example.py
index 9a12e77..972195e 100644
--- a/examples/containers/sglang_deployment_example.py
+++ b/examples/containers/sglang_deployment_example.py
@@ -1,4 +1,4 @@
-"""Example script demonstrating SGLang model deployment using the DataCrunch API.
+"""Example script demonstrating SGLang model deployment using the Verda API.
This script provides an example of deploying a SGLang server with deepseek-ai/deepseek-llm-7b-chat model,
including creation, monitoring, testing, and cleanup.
@@ -11,8 +11,8 @@
import time
from datetime import datetime
-from datacrunch import DataCrunchClient
-from datacrunch.containers import (
+from verda import VerdaClient
+from verda.containers import (
ComputeResource,
Container,
ContainerDeploymentStatus,
@@ -27,7 +27,7 @@
ScalingTriggers,
UtilizationScalingTrigger,
)
-from datacrunch.exceptions import APIException
+from verda.exceptions import APIException
CURRENT_TIMESTAMP = datetime.now().strftime('%Y%m%d-%H%M%S').lower() # e.g. 20250403-120000
@@ -38,14 +38,14 @@
HF_SECRET_NAME = 'huggingface-token'
# Get confidential values from environment variables
-DATACRUNCH_CLIENT_ID = os.environ.get('DATACRUNCH_CLIENT_ID')
-DATACRUNCH_CLIENT_SECRET = os.environ.get('DATACRUNCH_CLIENT_SECRET')
-DATACRUNCH_INFERENCE_KEY = os.environ.get('DATACRUNCH_INFERENCE_KEY')
+CLIENT_ID = os.environ.get('VERDA_CLIENT_ID')
+CLIENT_SECRET = os.environ.get('VERDA_CLIENT_SECRET')
+INFERENCE_KEY = os.environ.get('VERDA_INFERENCE_KEY')
HF_TOKEN = os.environ.get('HF_TOKEN')
def wait_for_deployment_health(
- datacrunch_client: DataCrunchClient,
+ client: VerdaClient,
deployment_name: str,
max_attempts: int = 20,
delay: int = 30,
@@ -53,7 +53,7 @@ def wait_for_deployment_health(
"""Wait for deployment to reach healthy status.
Args:
- client: DataCrunch API client
+ client: Verda API client
deployment_name: Name of the deployment to check
max_attempts: Maximum number of status checks
delay: Delay between checks in seconds
@@ -64,7 +64,7 @@ def wait_for_deployment_health(
print('Waiting for deployment to be healthy (may take several minutes to download model)...')
for attempt in range(max_attempts):
try:
- status = datacrunch_client.containers.get_deployment_status(deployment_name)
+ status = client.containers.get_deployment_status(deployment_name)
print(f'Attempt {attempt + 1}/{max_attempts} - Deployment status: {status}')
if status == ContainerDeploymentStatus.HEALTHY:
return True
@@ -75,15 +75,15 @@ def wait_for_deployment_health(
return False
-def cleanup_resources(datacrunch_client: DataCrunchClient) -> None:
+def cleanup_resources(client: VerdaClient) -> None:
"""Clean up all created resources.
Args:
- client: DataCrunchAPI client
+ client: Verda API client
"""
try:
# Delete deployment
- datacrunch_client.containers.delete_deployment(DEPLOYMENT_NAME)
+ client.containers.delete_deployment(DEPLOYMENT_NAME)
print('Deployment deleted')
except APIException as e:
print(f'Error during cleanup: {e}')
@@ -93,7 +93,7 @@ def graceful_shutdown(signum, _frame) -> None:
"""Handle graceful shutdown on signals."""
print(f'\nSignal {signum} received, cleaning up resources...')
try:
- cleanup_resources(datacrunch)
+ cleanup_resources(verda)
except Exception as e:
print(f'Error during cleanup: {e}')
sys.exit(0)
@@ -101,19 +101,17 @@ def graceful_shutdown(signum, _frame) -> None:
try:
# Get the inference API key
- datacrunch_inference_key = DATACRUNCH_INFERENCE_KEY
- if not datacrunch_inference_key:
- datacrunch_inference_key = input(
- 'Enter your Inference API Key from the DataCrunch dashboard: '
- )
+ inference_key = INFERENCE_KEY
+ if not inference_key:
+ inference_key = input('Enter your Inference API Key from the Verda dashboard: ')
else:
print('Using Inference API Key from environment')
# Initialize client with inference key
- datacrunch = DataCrunchClient(
- client_id=DATACRUNCH_CLIENT_ID,
- client_secret=DATACRUNCH_CLIENT_SECRET,
- inference_key=datacrunch_inference_key,
+ verda = VerdaClient(
+ client_id=CLIENT_ID,
+ client_secret=CLIENT_SECRET,
+ inference_key=inference_key,
)
# Register signal handlers for cleanup
@@ -124,14 +122,14 @@ def graceful_shutdown(signum, _frame) -> None:
print(f'Creating secret for Hugging Face token: {HF_SECRET_NAME}')
try:
# Check if secret already exists
- existing_secrets = datacrunch.containers.get_secrets()
+ existing_secrets = verda.containers.get_secrets()
secret_exists = any(secret.name == HF_SECRET_NAME for secret in existing_secrets)
if not secret_exists:
# check is HF_TOKEN is set, if not, prompt the user
if not HF_TOKEN:
HF_TOKEN = input('Enter your Hugging Face token: ')
- datacrunch.containers.create_secret(HF_SECRET_NAME, HF_TOKEN)
+ verda.containers.create_secret(HF_SECRET_NAME, HF_TOKEN)
print(f"Secret '{HF_SECRET_NAME}' created successfully")
else:
print(f"Secret '{HF_SECRET_NAME}' already exists, using existing secret")
@@ -198,14 +196,14 @@ def graceful_shutdown(signum, _frame) -> None:
)
# Create the deployment
- created_deployment = datacrunch.containers.create_deployment(deployment)
+ created_deployment = verda.containers.create_deployment(deployment)
print(f'Created deployment: {created_deployment.name}')
print('This could take several minutes while the model is downloaded and the server starts...')
# Wait for deployment to be healthy
- if not wait_for_deployment_health(datacrunch, DEPLOYMENT_NAME):
+ if not wait_for_deployment_health(verda, DEPLOYMENT_NAME):
print('Deployment health check failed')
- cleanup_resources(datacrunch)
+ cleanup_resources(verda)
sys.exit(1)
# Test the deployment with a simple request
@@ -266,17 +264,17 @@ def graceful_shutdown(signum, _frame) -> None:
# Cleanup or keep running based on user input
keep_running = input('\nDo you want to keep the deployment running? (y/n): ')
if keep_running.lower() != 'y':
- cleanup_resources(datacrunch)
+ cleanup_resources(verda)
else:
print(f"Deployment {DEPLOYMENT_NAME} is running. Don't forget to delete it when finished.")
- print('You can delete it from the DataCrunch dashboard or by running:')
- print(f"datacrunch.containers.delete('{DEPLOYMENT_NAME}')")
+ print('You can delete it from the Verda dashboard or by running:')
+ print(f"verda.containers.delete('{DEPLOYMENT_NAME}')")
except Exception as e:
print(f'Unexpected error: {e}')
# Attempt cleanup even if there was an error
try:
- cleanup_resources(datacrunch)
+ cleanup_resources(verda)
except Exception as cleanup_error:
print(f'Error during cleanup after failure: {cleanup_error}')
sys.exit(1)
diff --git a/examples/containers/update_deployment_scaling_example.py b/examples/containers/update_deployment_scaling_example.py
index f45b2e2..979957e 100644
--- a/examples/containers/update_deployment_scaling_example.py
+++ b/examples/containers/update_deployment_scaling_example.py
@@ -1,31 +1,31 @@
"""Example script demonstrating how to update scaling options for a container deployment.
-This script shows how to update scaling configurations for an existing container deployment on DataCrunch.
+This script shows how to update scaling configurations for an existing container deployment on Verda.
"""
import os
-from datacrunch import DataCrunchClient
-from datacrunch.containers import (
+from verda import VerdaClient
+from verda.containers import (
QueueLoadScalingTrigger,
ScalingOptions,
ScalingPolicy,
ScalingTriggers,
UtilizationScalingTrigger,
)
-from datacrunch.exceptions import APIException
+from verda.exceptions import APIException
# Get deployment name, client secret and id from environment variables
-DEPLOYMENT_NAME = os.environ.get('DATACRUNCH_DEPLOYMENT_NAME')
-DATACRUNCH_CLIENT_ID = os.environ.get('DATACRUNCH_CLIENT_ID')
-DATACRUNCH_CLIENT_SECRET = os.environ.get('DATACRUNCH_CLIENT_SECRET')
+DEPLOYMENT_NAME = os.environ.get('VERDA_DEPLOYMENT_NAME')
+CLIENT_ID = os.environ.get('VERDA_CLIENT_ID')
+CLIENT_SECRET = os.environ.get('VERDA_CLIENT_SECRET')
# Initialize client
-datacrunch = DataCrunchClient(DATACRUNCH_CLIENT_ID, DATACRUNCH_CLIENT_SECRET)
+verda = VerdaClient(CLIENT_ID, CLIENT_SECRET)
try:
# Get current scaling options
- scaling_options = datacrunch.containers.get_deployment_scaling_options(DEPLOYMENT_NAME)
+ scaling_options = verda.containers.get_deployment_scaling_options(DEPLOYMENT_NAME)
print('Current scaling configuration:\n')
print(f'Min replicas: {scaling_options.min_replica_count}')
@@ -70,7 +70,7 @@
)
# Update scaling options
- updated_options = datacrunch.containers.update_deployment_scaling_options(
+ updated_options = verda.containers.update_deployment_scaling_options(
DEPLOYMENT_NAME, scaling_options
)
diff --git a/examples/instance_actions.py b/examples/instance_actions.py
index 708a39f..9a63550 100644
--- a/examples/instance_actions.py
+++ b/examples/instance_actions.py
@@ -1,22 +1,22 @@
import os
import time
-from datacrunch import DataCrunchClient
-from datacrunch.exceptions import APIException
+from verda import VerdaClient
+from verda.exceptions import APIException
# Get client secret and id from environment variables
-DATACRUNCH_CLIENT_ID = os.environ.get('DATACRUNCH_CLIENT_ID')
-DATACRUNCH_CLIENT_SECRET = os.environ.get('DATACRUNCH_CLIENT_SECRET')
+CLIENT_ID = os.environ.get('VERDA_CLIENT_ID')
+CLIENT_SECRET = os.environ.get('VERDA_CLIENT_SECRET')
# Create datcrunch client
-datacrunch = DataCrunchClient(DATACRUNCH_CLIENT_ID, DATACRUNCH_CLIENT_SECRET)
+verda = VerdaClient(CLIENT_ID, CLIENT_SECRET)
# Get all SSH keys
-ssh_keys = datacrunch.ssh_keys.get()
+ssh_keys = verda.ssh_keys.get()
ssh_keys_ids = [ssh_key.id for ssh_key in ssh_keys]
# Create a new 1V100.6V instance
-instance = datacrunch.instances.create(
+instance = verda.instances.create(
instance_type='1V100.6V',
image='ubuntu-22.04-cuda-12.0-docker',
ssh_key_ids=ssh_keys_ids,
@@ -29,28 +29,28 @@
# Try to shutdown instance right away,
# encounter an error (because it's still provisioning)
try:
- datacrunch.instances.action(instance.id, datacrunch.constants.instance_actions.SHUTDOWN)
+ verda.instances.action(instance.id, verda.constants.instance_actions.SHUTDOWN)
except APIException as exception:
print(exception) # we were too eager...
# Wait until instance is running (check every 30sec), only then shut it down
-while instance.status != datacrunch.constants.instance_status.RUNNING:
+while instance.status != verda.constants.instance_status.RUNNING:
time.sleep(30)
- instance = datacrunch.instances.get_by_id(instance.id)
+ instance = verda.instances.get_by_id(instance.id)
# Shutdown!
try:
- datacrunch.instances.action(instance.id, datacrunch.constants.instance_actions.SHUTDOWN)
+ verda.instances.action(instance.id, verda.constants.instance_actions.SHUTDOWN)
except APIException as exception:
print(exception) # no exception this time
# Wait until instance is offline (check every 30sec), only then hibernate
-while instance.status != datacrunch.constants.instance_status.OFFLINE:
+while instance.status != verda.constants.instance_status.OFFLINE:
time.sleep(30)
- instance = datacrunch.instances.get_by_id(instance.id)
+ instance = verda.instances.get_by_id(instance.id)
# Hibernate the instance
try:
- datacrunch.instances.action(instance.id, datacrunch.constants.instance_actions.HIBERNATE)
+ verda.instances.action(instance.id, verda.constants.instance_actions.HIBERNATE)
except APIException as exception:
print(exception)
diff --git a/examples/instances_and_volumes.py b/examples/instances_and_volumes.py
index 8af410c..a8ec020 100644
--- a/examples/instances_and_volumes.py
+++ b/examples/instances_and_volumes.py
@@ -1,27 +1,27 @@
import os
-from datacrunch import DataCrunchClient
+from verda import VerdaClient
# Get client secret and id from environment variables
-DATACRUNCH_CLIENT_ID = os.environ.get('DATACRUNCH_CLIENT_ID')
-DATACRUNCH_CLIENT_SECRET = os.environ.get('DATACRUNCH_CLIENT_SECRET')
+CLIENT_ID = os.environ.get('VERDA_CLIENT_ID')
+CLIENT_SECRET = os.environ.get('VERDA_CLIENT_SECRET')
# Create datcrunch client
-datacrunch = DataCrunchClient(DATACRUNCH_CLIENT_ID, DATACRUNCH_CLIENT_SECRET)
+verda = VerdaClient(CLIENT_ID, CLIENT_SECRET)
# Get some volume type constants
-NVMe = datacrunch.constants.volume_types.NVMe
-HDD = datacrunch.constants.volume_types.HDD
+NVMe = verda.constants.volume_types.NVMe
+HDD = verda.constants.volume_types.HDD
EXISTING_OS_VOLUME_ID = '81e45bf0-5da2-412b-97d7-c20a7564fca0'
EXAMPLE_VOLUME_ID = '225dde24-ae44-4787-9224-2b9f56f44394'
EXAMPLE_INSTANCE_ID = '1eeabba4-caf7-4b4a-9143-0107034cc7f5'
# Get all SSH keys
-ssh_keys = datacrunch.ssh_keys.get()
+ssh_keys = verda.ssh_keys.get()
# Create instance with extra attached volumes
-instance_with_extra_volumes = datacrunch.instances.create(
+instance_with_extra_volumes = verda.instances.create(
instance_type='1V100.6V',
image='ubuntu-22.04-cuda-12.0-docker',
ssh_key_ids=ssh_keys,
@@ -34,7 +34,7 @@
)
# Create instance with custom OS volume size and name
-instance_with_custom_os_volume = datacrunch.instances.create(
+instance_with_custom_os_volume = verda.instances.create(
instance_type='1V100.6V',
image='ubuntu-22.04-cuda-12.0-docker',
ssh_key_ids=ssh_keys,
@@ -44,7 +44,7 @@
)
# Create instance with existing OS volume as an image
-instance_with_existing_os_volume = datacrunch.instances.create(
+instance_with_existing_os_volume = verda.instances.create(
instance_type='1V100.6V',
image=EXISTING_OS_VOLUME_ID,
ssh_key_ids=ssh_keys,
@@ -53,20 +53,20 @@
)
# Delete instance AND OS volume (the rest of the volumes would be detached)
-datacrunch.instances.action(
- instance_id=EXAMPLE_INSTANCE_ID, action=datacrunch.constants.instance_actions.DELETE
+verda.instances.action(
+ instance_id=EXAMPLE_INSTANCE_ID, action=verda.constants.instance_actions.DELETE
)
# Delete instance WITHOUT deleting the OS volume (will detach all volumes of the instance)
-datacrunch.instances.action(
+verda.instances.action(
instance_id=EXAMPLE_INSTANCE_ID,
- action=datacrunch.constants.instance_actions.DELETE,
+ action=verda.constants.instance_actions.DELETE,
volume_ids=[],
)
# Delete instance and one of it's volumes (will delete one volume, detach the rest)
-datacrunch.instances.action(
+verda.instances.action(
instance_id=EXAMPLE_INSTANCE_ID,
- action=datacrunch.constants.instance_actions.DELETE,
+ action=verda.constants.instance_actions.DELETE,
volume_ids=[EXAMPLE_VOLUME_ID],
)
diff --git a/examples/simple_create_instance.py b/examples/simple_create_instance.py
index 576da30..df4b1b0 100644
--- a/examples/simple_create_instance.py
+++ b/examples/simple_create_instance.py
@@ -1,22 +1,22 @@
import os
import time
-from datacrunch import DataCrunchClient
-from datacrunch.constants import InstanceStatus, Locations
+from verda import VerdaClient
+from verda.constants import InstanceStatus, Locations
# Get client secret and id from environment variables
-DATACRUNCH_CLIENT_ID = os.environ.get('DATACRUNCH_CLIENT_ID')
-DATACRUNCH_CLIENT_SECRET = os.environ.get('DATACRUNCH_CLIENT_SECRET')
+CLIENT_ID = os.environ.get('VERDA_CLIENT_ID')
+CLIENT_SECRET = os.environ.get('VERDA_CLIENT_SECRET')
# Create datcrunch client
-datacrunch = DataCrunchClient(DATACRUNCH_CLIENT_ID, DATACRUNCH_CLIENT_SECRET)
+verda = VerdaClient(CLIENT_ID, CLIENT_SECRET)
# Get all SSH keys id's
-ssh_keys = datacrunch.ssh_keys.get()
+ssh_keys = verda.ssh_keys.get()
ssh_keys_ids = [ssh_key.id for ssh_key in ssh_keys]
# Create a new instance
-instance = datacrunch.instances.create(
+instance = verda.instances.create(
instance_type='1V100.6V',
image='ubuntu-22.04-cuda-12.0-docker',
location=Locations.FIN_03,
@@ -28,9 +28,9 @@
# Wait for instance to enter running state
while instance.status != InstanceStatus.RUNNING:
time.sleep(0.5)
- instance = datacrunch.instances.get_by_id(instance.id)
+ instance = verda.instances.get_by_id(instance.id)
print(instance)
# Delete instance
-datacrunch.instances.action(instance.id, datacrunch.constants.instance_actions.DELETE)
+verda.instances.action(instance.id, verda.constants.instance_actions.DELETE)
diff --git a/examples/ssh_keys.py b/examples/ssh_keys.py
index 5475ec6..7e9428c 100644
--- a/examples/ssh_keys.py
+++ b/examples/ssh_keys.py
@@ -1,19 +1,19 @@
import os
-from datacrunch import DataCrunchClient
+from verda import VerdaClient
# Get client secret and id from environment variables
-DATACRUNCH_CLIENT_ID = os.environ.get('DATACRUNCH_CLIENT_ID')
-DATACRUNCH_CLIENT_SECRET = os.environ.get('DATACRUNCH_CLIENT_SECRET')
+CLIENT_ID = os.environ.get('VERDA_CLIENT_ID')
+CLIENT_SECRET = os.environ.get('VERDA_CLIENT_SECRET')
# Create datcrunch client
-datacrunch = DataCrunchClient(DATACRUNCH_CLIENT_ID, DATACRUNCH_CLIENT_SECRET)
+verda = VerdaClient(CLIENT_ID, CLIENT_SECRET)
# Create new SSH key
public_key = (
'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAI0qq2Qjt5GPi7DKdcnBHOkvk8xNsG9dA607tnWagOkHC test_key'
)
-ssh_key = datacrunch.ssh_keys.create('my test key', public_key)
+ssh_key = verda.ssh_keys.create('my test key', public_key)
# Print new key id, name, public key
print(ssh_key.id)
@@ -21,10 +21,10 @@
print(ssh_key.public_key)
# Get all keys
-all_ssh_keys = datacrunch.ssh_keys.get()
+all_ssh_keys = verda.ssh_keys.get()
# Get single key by id
-some_ssh_key = datacrunch.ssh_keys.get_by_id(ssh_key.id)
+some_ssh_key = verda.ssh_keys.get_by_id(ssh_key.id)
# Delete ssh key by id
-datacrunch.ssh_keys.delete_by_id(ssh_key.id)
+verda.ssh_keys.delete_by_id(ssh_key.id)
diff --git a/examples/startup_scripts.py b/examples/startup_scripts.py
index e08c69f..2a3b390 100644
--- a/examples/startup_scripts.py
+++ b/examples/startup_scripts.py
@@ -1,13 +1,13 @@
import os
-from datacrunch import DataCrunchClient
+from verda import VerdaClient
# Get client secret and id from environment variables
-DATACRUNCH_CLIENT_ID = os.environ.get('DATACRUNCH_CLIENT_ID')
-DATACRUNCH_CLIENT_SECRET = os.environ.get('DATACRUNCH_CLIENT_SECRET')
+CLIENT_ID = os.environ.get('VERDA_CLIENT_ID')
+CLIENT_SECRET = os.environ.get('VERDA_CLIENT_SECRET')
# Create datcrunch client
-datacrunch = DataCrunchClient(DATACRUNCH_CLIENT_ID, DATACRUNCH_CLIENT_SECRET)
+verda = VerdaClient(CLIENT_ID, CLIENT_SECRET)
# Create new startup script
bash_script = """echo this is a test script for serious cat business
@@ -18,7 +18,7 @@
# download a cat picture
curl https://http.cat/200 --output cat.jpg
"""
-script = datacrunch.startup_scripts.create('catty businness', bash_script)
+script = verda.startup_scripts.create('catty businness', bash_script)
# Print new startup script id, name, script code
print(script.id)
@@ -26,10 +26,10 @@
print(script.script)
# Get all startup scripts
-all_scripts = datacrunch.startup_scripts.get()
+all_scripts = verda.startup_scripts.get()
# Get a single startup script by id
-some_script = datacrunch.startup_scripts.get_by_id(script.id)
+some_script = verda.startup_scripts.get_by_id(script.id)
# Delete startup script by id
-datacrunch.startup_scripts.delete_by_id(script.id)
+verda.startup_scripts.delete_by_id(script.id)
diff --git a/examples/storage_volumes.py b/examples/storage_volumes.py
index 56072f0..4c501d0 100644
--- a/examples/storage_volumes.py
+++ b/examples/storage_volumes.py
@@ -1,40 +1,40 @@
import os
-from datacrunch import DataCrunchClient
+from verda import VerdaClient
# Get client secret and id from environment variables
-DATACRUNCH_CLIENT_ID = os.environ.get('DATACRUNCH_CLIENT_ID')
-DATACRUNCH_CLIENT_SECRET = os.environ.get('DATACRUNCH_CLIENT_SECRET')
+CLIENT_ID = os.environ.get('VERDA_CLIENT_ID')
+CLIENT_SECRET = os.environ.get('VERDA_CLIENT_SECRET')
# Create datcrunch client
-datacrunch = DataCrunchClient(DATACRUNCH_CLIENT_ID, DATACRUNCH_CLIENT_SECRET)
+verda = VerdaClient(CLIENT_ID, CLIENT_SECRET)
# Get some volume type constants
-NVMe = datacrunch.constants.volume_types.NVMe
-HDD = datacrunch.constants.volume_types.HDD
-SFS = datacrunch.constants.volume_types.SFS
+NVMe = verda.constants.volume_types.NVMe
+HDD = verda.constants.volume_types.HDD
+SFS = verda.constants.volume_types.SFS
# Example instance id
INSTANCE_ID = '8705bb38-2574-454f-9967-d18b130bf5ee'
# Get all volumes
-all_volumes = datacrunch.volumes.get()
+all_volumes = verda.volumes.get()
# Get all attached volumes
-all_attached_volumes = datacrunch.volumes.get(status=datacrunch.constants.volume_status.ATTACHED)
+all_attached_volumes = verda.volumes.get(status=verda.constants.volume_status.ATTACHED)
# Get volume by id
-random_volume = datacrunch.volumes.get_by_id('0c41e387-3dd8-495f-a285-e861527f2f3d')
+random_volume = verda.volumes.get_by_id('0c41e387-3dd8-495f-a285-e861527f2f3d')
# Create a 200 GB detached NVMe volume
-nvme_volume = datacrunch.volumes.create(type=NVMe, name='data-storage-1', size=200)
+nvme_volume = verda.volumes.create(type=NVMe, name='data-storage-1', size=200)
# Create a shared filesystem volume
-shared_filesystem_volume = datacrunch.volumes.create(type=SFS, name='shared-filesystem-1', size=50)
+shared_filesystem_volume = verda.volumes.create(type=SFS, name='shared-filesystem-1', size=50)
# Create a 500 GB HDD volume and attach it to an existing shutdown instance
# Note: If the instance isn't shutdown an exception would be raised
-hdd_volume = datacrunch.volumes.create(
+hdd_volume = verda.volumes.create(
type=HDD, name='data-storage-2', size=500, instance_id=INSTANCE_ID
)
@@ -43,31 +43,31 @@
sfs_volume_id = shared_filesystem_volume.id
# attach the nvme volume to the instance
-datacrunch.volumes.attach(nvme_volume_id, INSTANCE_ID)
+verda.volumes.attach(nvme_volume_id, INSTANCE_ID)
# detach both volumes from the instance
-datacrunch.volumes.detach([nvme_volume_id, hdd_volume_id])
+verda.volumes.detach([nvme_volume_id, hdd_volume_id])
# rename volume
-datacrunch.volumes.rename(nvme_volume_id, 'new-name')
+verda.volumes.rename(nvme_volume_id, 'new-name')
# increase volume size
-datacrunch.volumes.increase_size(nvme_volume_id, 300)
+verda.volumes.increase_size(nvme_volume_id, 300)
# clone volume
-datacrunch.volumes.clone(nvme_volume_id)
+verda.volumes.clone(nvme_volume_id)
# clone volume and give it a new name and storage type (from NVMe to HDD)
-datacrunch.volumes.clone(nvme_volume_id, name='my-cloned-volume', type=HDD)
+verda.volumes.clone(nvme_volume_id, name='my-cloned-volume', type=HDD)
# clone multiple volumes at once
-datacrunch.volumes.clone([nvme_volume_id, hdd_volume_id])
+verda.volumes.clone([nvme_volume_id, hdd_volume_id])
# delete volumes (move to trash for 96h, not permanent)
-datacrunch.volumes.delete([nvme_volume_id, hdd_volume_id, sfs_volume_id])
+verda.volumes.delete([nvme_volume_id, hdd_volume_id, sfs_volume_id])
# get all volumes in trash
-volumes_in_trash = datacrunch.volumes.get_in_trash()
+volumes_in_trash = verda.volumes.get_in_trash()
# delete volumes permanently
-datacrunch.volumes.delete([nvme_volume_id, hdd_volume_id, sfs_volume_id], is_permanent=True)
+verda.volumes.delete([nvme_volume_id, hdd_volume_id, sfs_volume_id], is_permanent=True)
diff --git a/pyproject.toml b/pyproject.toml
index 5acec48..9e7334a 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,11 +1,11 @@
[project]
-name = "datacrunch"
-version = "1.16.0"
-description = "Official Python SDK for DataCrunch Public API"
+name = "verda"
+version = "1.17.0"
+description = "Official Python SDK for Verda (formerly DataCrunch) Public API"
readme = "README.md"
-requires-python = ">=3.11"
+requires-python = ">=3.10"
-authors = [{ name = "DataCrunch Oy", email = "info@datacrunch.io" }]
+authors = [{ name = "Verda Cloud Oy", email = "info@verda.com" }]
classifiers = [
"Development Status :: 5 - Production/Stable",
@@ -14,6 +14,7 @@ classifiers = [
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
@@ -33,9 +34,9 @@ dev = [
]
[project.urls]
-Homepage = "https://github.com/DataCrunch-io"
+Homepage = "https://github.com/verda-cloud"
Documentation = "https://datacrunch-python.readthedocs.io/"
-Repository = "https://github.com/DataCrunch-io/datacrunch-python"
+Repository = "https://github.com/verda-cloud/sdk-python"
Changelog = "https://datacrunch-python.readthedocs.io/en/latest/changelog.html"
[build-system]
@@ -43,7 +44,7 @@ requires = ["uv_build>=0.9.5,<0.10.0"]
build-backend = "uv_build"
[tool.uv.build-backend]
-module-name = "datacrunch"
+module-name = "verda"
module-root = ""
[tool.ruff]
diff --git a/tests/integration_tests/conftest.py b/tests/integration_tests/conftest.py
index 91490fc..e557734 100644
--- a/tests/integration_tests/conftest.py
+++ b/tests/integration_tests/conftest.py
@@ -3,7 +3,7 @@
import pytest
from dotenv import load_dotenv
-from datacrunch.datacrunch import DataCrunchClient
+from verda.verda import VerdaClient
"""
Make sure to run the server and the account has enough balance before running the tests
@@ -13,10 +13,10 @@
# Load env variables, make sure there's an env file with valid client credentials
load_dotenv()
-CLIENT_SECRET = os.getenv('DATACRUNCH_CLIENT_SECRET')
-CLIENT_ID = os.getenv('DATACRUNCH_CLIENT_ID')
+CLIENT_SECRET = os.getenv('VERDA_CLIENT_SECRET')
+CLIENT_ID = os.getenv('VERDA_CLIENT_ID')
@pytest.fixture
-def datacrunch_client():
- return DataCrunchClient(CLIENT_ID, CLIENT_SECRET, BASE_URL)
+def verda_client():
+ return VerdaClient(CLIENT_ID, CLIENT_SECRET, BASE_URL)
diff --git a/tests/integration_tests/test_instances.py b/tests/integration_tests/test_instances.py
index 4540428..3f50b46 100644
--- a/tests/integration_tests/test_instances.py
+++ b/tests/integration_tests/test_instances.py
@@ -2,8 +2,8 @@
import pytest
-from datacrunch.constants import Locations
-from datacrunch.datacrunch import DataCrunchClient
+from verda.constants import Locations
+from verda.verda import VerdaClient
IN_GITHUB_ACTIONS = os.getenv('GITHUB_ACTIONS') == 'true'
@@ -11,12 +11,12 @@
@pytest.mark.skipif(IN_GITHUB_ACTIONS, reason="Test doesn't work in Github Actions.")
@pytest.mark.withoutresponses
class TestInstances:
- def test_create_instance(self, datacrunch_client: DataCrunchClient):
+ def test_create_instance(self, verda_client: VerdaClient):
# get ssh key
- ssh_key = datacrunch_client.ssh_keys.get()[0]
+ ssh_key = verda_client.ssh_keys.get()[0]
# create instance
- instance = datacrunch_client.instances.create(
+ instance = verda_client.instances.create(
hostname='test-instance',
location=Locations.FIN_03,
instance_type='CPU.4V',
@@ -27,12 +27,12 @@ def test_create_instance(self, datacrunch_client: DataCrunchClient):
# assert instance is created
assert instance.id is not None
- assert instance.status == datacrunch_client.constants.instance_status.PROVISIONING
+ assert instance.status == verda_client.constants.instance_status.PROVISIONING
# delete instance
- datacrunch_client.instances.action(instance.id, 'delete')
+ verda_client.instances.action(instance.id, 'delete')
# permanently delete all volumes in trash
- trash = datacrunch_client.volumes.get_in_trash()
+ trash = verda_client.volumes.get_in_trash()
for volume in trash:
- datacrunch_client.volumes.delete(volume.id, is_permanent=True)
+ verda_client.volumes.delete(volume.id, is_permanent=True)
diff --git a/tests/integration_tests/test_locations.py b/tests/integration_tests/test_locations.py
index 41484a0..4b04c4d 100644
--- a/tests/integration_tests/test_locations.py
+++ b/tests/integration_tests/test_locations.py
@@ -2,8 +2,8 @@
import pytest
-from datacrunch.constants import Locations
-from datacrunch.datacrunch import DataCrunchClient
+from verda.constants import Locations
+from verda.verda import VerdaClient
IN_GITHUB_ACTIONS = os.getenv('GITHUB_ACTIONS') == 'true'
@@ -13,22 +13,16 @@
@pytest.mark.skipif(IN_GITHUB_ACTIONS, reason="Test doesn't work in Github Actions.")
@pytest.mark.withoutresponses
class TestLocations:
- def test_specific_instance_availability_in_specific_location(
- self, datacrunch_client: DataCrunchClient
- ):
+ def test_specific_instance_availability_in_specific_location(self, verda_client: VerdaClient):
# call the instance availability endpoint, for a specific location
- availability = datacrunch_client.instances.is_available(
- 'CPU.4V', location_code=Locations.FIN_01
- )
+ availability = verda_client.instances.is_available('CPU.4V', location_code=Locations.FIN_01)
assert availability is not None
assert isinstance(availability, bool)
- def test_all_availabilies_in_specific_location(self, datacrunch_client: DataCrunchClient):
+ def test_all_availabilies_in_specific_location(self, verda_client: VerdaClient):
# call the instance availability endpoint, for a specific location
- availabilities = datacrunch_client.instances.get_availabilities(
- location_code=Locations.FIN_01
- )
+ availabilities = verda_client.instances.get_availabilities(location_code=Locations.FIN_01)
assert availabilities is not None
assert isinstance(availabilities, list)
@@ -37,9 +31,9 @@ def test_all_availabilies_in_specific_location(self, datacrunch_client: DataCrun
assert isinstance(availabilities[0]['availabilities'], list)
assert len(availabilities[0]['availabilities']) > 0
- def test_all_availabilites(self, datacrunch_client: DataCrunchClient):
+ def test_all_availabilites(self, verda_client: VerdaClient):
# call the instance availability endpoint, for all locations
- all_availabilities = datacrunch_client.instances.get_availabilities()
+ all_availabilities = verda_client.instances.get_availabilities()
assert all_availabilities is not None
assert isinstance(all_availabilities, list)
@@ -49,9 +43,9 @@ def test_all_availabilites(self, datacrunch_client: DataCrunchClient):
assert isinstance(all_availabilities[0]['availabilities'], list)
assert len(all_availabilities[0]['availabilities']) > 0
- def test_get_all_locations(self, datacrunch_client: DataCrunchClient):
+ def test_get_all_locations(self, verda_client: VerdaClient):
# call the locations endpoint
- locations = datacrunch_client.locations.get()
+ locations = verda_client.locations.get()
assert locations is not None
assert isinstance(locations, list)
diff --git a/tests/integration_tests/test_volumes.py b/tests/integration_tests/test_volumes.py
index 10d4ff1..bc02dab 100644
--- a/tests/integration_tests/test_volumes.py
+++ b/tests/integration_tests/test_volumes.py
@@ -3,8 +3,8 @@
import pytest
-from datacrunch.constants import Locations, VolumeStatus, VolumeTypes
-from datacrunch.datacrunch import DataCrunchClient
+from verda.constants import Locations, VolumeStatus, VolumeTypes
+from verda.verda import VerdaClient
IN_GITHUB_ACTIONS = os.getenv('GITHUB_ACTIONS') == 'true'
@@ -15,72 +15,72 @@
@pytest.mark.skipif(IN_GITHUB_ACTIONS, reason="Test doesn't work in Github Actions.")
@pytest.mark.withoutresponses
class TestVolumes:
- def test_get_volumes_from_trash(self, datacrunch_client: DataCrunchClient):
+ def test_get_volumes_from_trash(self, verda_client: VerdaClient):
# create new volume
- volume = datacrunch_client.volumes.create(type=NVMe, name='test_volume', size=100)
+ volume = verda_client.volumes.create(type=NVMe, name='test_volume', size=100)
# delete volume
- datacrunch_client.volumes.delete(volume.id)
+ verda_client.volumes.delete(volume.id)
# get volumes from trash
- volumes = datacrunch_client.volumes.get_in_trash()
+ volumes = verda_client.volumes.get_in_trash()
# assert volume is in trash
assert volume.id in [v.id for v in volumes]
# cleaning: permanently delete the volume
- datacrunch_client.volumes.delete(volume.id, is_permanent=True)
+ verda_client.volumes.delete(volume.id, is_permanent=True)
- def test_permanently_delete_detached_volumes(seld, datacrunch_client):
+ def test_permanently_delete_detached_volumes(seld, verda_client):
# create new volume
- volume = datacrunch_client.volumes.create(type=NVMe, name='test_volume', size=100)
+ volume = verda_client.volumes.create(type=NVMe, name='test_volume', size=100)
# permanently delete the detached volume
- datacrunch_client.volumes.delete(volume.id, is_permanent=True)
+ verda_client.volumes.delete(volume.id, is_permanent=True)
# sleep for 2 seconds
time.sleep(2)
# make sure the volume is not in trash
- volumes = datacrunch_client.volumes.get_in_trash()
+ volumes = verda_client.volumes.get_in_trash()
# assert volume is not in trash
assert volume.id not in [v.id for v in volumes]
# get the volume
- volume = datacrunch_client.volumes.get_by_id(volume.id)
+ volume = verda_client.volumes.get_by_id(volume.id)
# assert volume status is deleted
- assert volume.status == datacrunch_client.constants.volume_status.DELETED
+ assert volume.status == verda_client.constants.volume_status.DELETED
- def test_permanently_delete_a_deleted_volume_from_trash(self, datacrunch_client):
+ def test_permanently_delete_a_deleted_volume_from_trash(self, verda_client):
# create new volume
- volume = datacrunch_client.volumes.create(type=NVMe, name='test_volume', size=100)
+ volume = verda_client.volumes.create(type=NVMe, name='test_volume', size=100)
# delete volume
- datacrunch_client.volumes.delete(volume.id)
+ verda_client.volumes.delete(volume.id)
# sleep for 2 seconds
time.sleep(2)
# permanently delete the volume
- datacrunch_client.volumes.delete(volume.id, is_permanent=True)
+ verda_client.volumes.delete(volume.id, is_permanent=True)
# get the volume
- volume = datacrunch_client.volumes.get_by_id(volume.id)
+ volume = verda_client.volumes.get_by_id(volume.id)
# assert volume status is deleted
- assert volume.status == datacrunch_client.constants.volume_status.DELETED
+ assert volume.status == verda_client.constants.volume_status.DELETED
# make sure the volume is not in trash
- volumes = datacrunch_client.volumes.get_in_trash()
+ volumes = verda_client.volumes.get_in_trash()
# assert volume is not in trash
assert volume.id not in [v.id for v in volumes]
- def test_create_volume(self, datacrunch_client):
+ def test_create_volume(self, verda_client):
# create new volume
- volume = datacrunch_client.volumes.create(
+ volume = verda_client.volumes.create(
type=NVMe, name='test_volume', size=100, location=Locations.FIN_01
)
@@ -90,4 +90,4 @@ def test_create_volume(self, datacrunch_client):
assert volume.status == VolumeStatus.ORDERED or volume.status == VolumeStatus.DETACHED
# cleaning: delete volume
- datacrunch_client.volumes.delete(volume.id, is_permanent=True)
+ verda_client.volumes.delete(volume.id, is_permanent=True)
diff --git a/tests/smoke.py b/tests/smoke.py
new file mode 100644
index 0000000..d06e060
--- /dev/null
+++ b/tests/smoke.py
@@ -0,0 +1,28 @@
+import responses
+
+from verda import VerdaClient
+
+BASE_URL = 'https://example.com'
+
+
+@responses.activate()
+def main():
+ responses.add(
+ responses.POST,
+ f'{BASE_URL}/oauth2/token',
+ json={
+ 'access_token': 'dummy',
+ 'token_type': 'Bearer',
+ 'refresh_token': 'dummy',
+ 'scope': 'fullAccess',
+ 'expires_in': 3600,
+ },
+ status=200,
+ )
+
+ client = VerdaClient('id', 'secret', BASE_URL)
+ assert client.constants.base_url == BASE_URL
+
+
+if __name__ == '__main__':
+ main()
diff --git a/tests/unit_tests/authentication/test_authentication.py b/tests/unit_tests/authentication/test_authentication.py
index 589851e..179e171 100644
--- a/tests/unit_tests/authentication/test_authentication.py
+++ b/tests/unit_tests/authentication/test_authentication.py
@@ -4,13 +4,13 @@
import responses # https://github.com/getsentry/responses
from responses import matchers
-from datacrunch.authentication.authentication import AuthenticationService
-from datacrunch.exceptions import APIException
+from verda.authentication.authentication import AuthenticationService
+from verda.exceptions import APIException
INVALID_REQUEST = 'invalid_request'
INVALID_REQUEST_MESSAGE = 'Your existence is invalid'
-BASE_URL = 'https://api-testing.datacrunch.io/v1'
+BASE_URL = 'https://api.example.com/v1'
CLIENT_ID = '0123456789xyz'
CLIENT_SECRET = 'zyx987654321'
diff --git a/tests/unit_tests/balance/test_balance.py b/tests/unit_tests/balance/test_balance.py
index b1e15ce..4155edb 100644
--- a/tests/unit_tests/balance/test_balance.py
+++ b/tests/unit_tests/balance/test_balance.py
@@ -1,6 +1,6 @@
import responses # https://github.com/getsentry/responses
-from datacrunch.balance.balance import Balance, BalanceService
+from verda.balance.balance import Balance, BalanceService
def test_balance(http_client):
diff --git a/tests/unit_tests/conftest.py b/tests/unit_tests/conftest.py
index 1df8806..3848b7a 100644
--- a/tests/unit_tests/conftest.py
+++ b/tests/unit_tests/conftest.py
@@ -2,9 +2,9 @@
import pytest
-from datacrunch.http_client.http_client import HTTPClient
+from verda.http_client.http_client import HTTPClient
-BASE_URL = 'https://api-testing.datacrunch.io/v1'
+BASE_URL = 'https://api.example.com/v1'
ACCESS_TOKEN = 'test-token'
CLIENT_ID = '0123456789xyz'
CLIENT_SECRET = '0123456789xyz'
diff --git a/tests/unit_tests/containers/test_containers.py b/tests/unit_tests/containers/test_containers.py
index 965f4c9..b2ffc66 100644
--- a/tests/unit_tests/containers/test_containers.py
+++ b/tests/unit_tests/containers/test_containers.py
@@ -2,7 +2,7 @@
import responses # https://github.com/getsentry/responses
from responses import matchers
-from datacrunch.containers.containers import (
+from verda.containers.containers import (
CONTAINER_DEPLOYMENTS_ENDPOINT,
CONTAINER_REGISTRY_CREDENTIALS_ENDPOINT,
SECRETS_ENDPOINT,
@@ -33,7 +33,7 @@
VolumeMount,
VolumeMountType,
)
-from datacrunch.exceptions import APIException
+from verda.exceptions import APIException
DEPLOYMENT_NAME = 'test-deployment'
CONTAINER_NAME = 'test-container'
diff --git a/tests/unit_tests/http_client/test_http_client.py b/tests/unit_tests/http_client/test_http_client.py
index 896dee1..54ccaed 100644
--- a/tests/unit_tests/http_client/test_http_client.py
+++ b/tests/unit_tests/http_client/test_http_client.py
@@ -3,7 +3,7 @@
import pytest
import responses # https://github.com/getsentry/responses
-from datacrunch.exceptions import APIException
+from verda.exceptions import APIException
INVALID_REQUEST = 'invalid_request'
INVALID_REQUEST_MESSAGE = 'Your existence is invalid'
diff --git a/tests/unit_tests/images/test_images.py b/tests/unit_tests/images/test_images.py
index d458167..e067699 100644
--- a/tests/unit_tests/images/test_images.py
+++ b/tests/unit_tests/images/test_images.py
@@ -1,6 +1,6 @@
import responses # https://github.com/getsentry/responses
-from datacrunch.images.images import Image, ImagesService
+from verda.images.images import Image, ImagesService
def test_images(http_client):
diff --git a/tests/unit_tests/instance_types/test_instance_types.py b/tests/unit_tests/instance_types/test_instance_types.py
index 5426d0b..974f26e 100644
--- a/tests/unit_tests/instance_types/test_instance_types.py
+++ b/tests/unit_tests/instance_types/test_instance_types.py
@@ -1,6 +1,6 @@
import responses # https://github.com/getsentry/responses
-from datacrunch.instance_types.instance_types import InstanceType, InstanceTypesService
+from verda.instance_types.instance_types import InstanceType, InstanceTypesService
TYPE_ID = '01cf5dc1-a5d2-4972-ae4e-d429115d055b'
CPU_DESCRIPTION = '48 CPU 3.5GHz'
diff --git a/tests/unit_tests/instances/test_instances.py b/tests/unit_tests/instances/test_instances.py
index 0a7f856..c0f1157 100644
--- a/tests/unit_tests/instances/test_instances.py
+++ b/tests/unit_tests/instances/test_instances.py
@@ -1,9 +1,9 @@
import pytest
import responses # https://github.com/getsentry/responses
-from datacrunch.constants import Actions, ErrorCodes, Locations
-from datacrunch.exceptions import APIException
-from datacrunch.instances.instances import Instance, InstancesService
+from verda.constants import Actions, ErrorCodes, Locations
+from verda.exceptions import APIException
+from verda.instances.instances import Instance, InstancesService
INVALID_REQUEST = ErrorCodes.INVALID_REQUEST
INVALID_REQUEST_MESSAGE = 'Your existence is invalid'
diff --git a/tests/unit_tests/ssh_keys/test_ssh_keys.py b/tests/unit_tests/ssh_keys/test_ssh_keys.py
index 8621823..3d572e5 100644
--- a/tests/unit_tests/ssh_keys/test_ssh_keys.py
+++ b/tests/unit_tests/ssh_keys/test_ssh_keys.py
@@ -1,8 +1,8 @@
import pytest
import responses # https://github.com/getsentry/responses
-from datacrunch.exceptions import APIException
-from datacrunch.ssh_keys.ssh_keys import SSHKey, SSHKeysService
+from verda.exceptions import APIException
+from verda.ssh_keys.ssh_keys import SSHKey, SSHKeysService
INVALID_REQUEST = 'invalid_request'
INVALID_REQUEST_MESSAGE = 'Your existence is invalid'
diff --git a/tests/unit_tests/startup_scripts/test_startup_scripts.py b/tests/unit_tests/startup_scripts/test_startup_scripts.py
index 242f0a8..72426c4 100644
--- a/tests/unit_tests/startup_scripts/test_startup_scripts.py
+++ b/tests/unit_tests/startup_scripts/test_startup_scripts.py
@@ -1,8 +1,8 @@
import pytest
import responses # https://github.com/getsentry/responses
-from datacrunch.exceptions import APIException
-from datacrunch.startup_scripts.startup_scripts import (
+from verda.exceptions import APIException
+from verda.startup_scripts.startup_scripts import (
StartupScript,
StartupScriptsService,
)
diff --git a/tests/unit_tests/test_client.py b/tests/unit_tests/test_client.py
new file mode 100644
index 0000000..845c674
--- /dev/null
+++ b/tests/unit_tests/test_client.py
@@ -0,0 +1,63 @@
+import pytest
+import responses # https://github.com/getsentry/responses
+
+from verda.exceptions import APIException
+from verda.verda import VerdaClient
+
+BASE_URL = 'https://api.example.com/v1'
+
+response_json = {
+ 'access_token': 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJoZXkiOiJ5b3UgYWN1YWxseSBjaGVja2VkIHRoaXM_In0.0RjcdKQ1NJP9gbRyXITE6LFFLwKGzeeshuubnkkfkb8',
+ 'token_type': 'Bearer',
+ 'expires_in': 3600,
+ 'refresh_token': 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ3b3ciOiJhbmQgdGhpcyB0b28_In0.AC5gk-o-MOptUgrouEErlhr8WT3Hg_RR6px6A0I7ZEk',
+ 'scope': 'fullAccess',
+}
+
+
+class TestVerdaClient:
+ def test_client(self):
+ # arrange - add response mock
+ responses.add(responses.POST, BASE_URL + '/oauth2/token', json=response_json, status=200)
+
+ # act
+ client = VerdaClient('XXXXXXXXXXXXXX', 'XXXXXXXXXXXXXX', BASE_URL)
+
+ # assert
+ assert client.constants.base_url == BASE_URL
+
+ def test_client_with_default_base_url(self):
+ # arrange - add response mock
+ DEFAULT_BASE_URL = 'https://api.verda.com/v1'
+ responses.add(
+ responses.POST,
+ DEFAULT_BASE_URL + '/oauth2/token',
+ json=response_json,
+ status=200,
+ )
+
+ # act
+ client = VerdaClient('XXXXXXXXXXXXXX', 'XXXXXXXXXXXXXX')
+
+ # assert
+ assert client.constants.base_url == DEFAULT_BASE_URL
+
+ def test_invalid_client_credentials(self):
+ # arrange - add response mock
+ responses.add(
+ responses.POST,
+ BASE_URL + '/oauth2/token',
+ json={
+ 'code': 'unauthorized_request',
+ 'message': 'Invalid client id or client secret',
+ },
+ status=401,
+ )
+
+ # act
+ with pytest.raises(APIException) as excinfo:
+ VerdaClient('x', 'y', BASE_URL)
+
+ # assert
+ assert excinfo.value.code == 'unauthorized_request'
+ assert excinfo.value.message == 'Invalid client id or client secret'
diff --git a/tests/unit_tests/test_datacrunch.py b/tests/unit_tests/test_datacrunch.py
index 40a1f16..9d13464 100644
--- a/tests/unit_tests/test_datacrunch.py
+++ b/tests/unit_tests/test_datacrunch.py
@@ -1,63 +1,55 @@
+import sys
+
import pytest
import responses # https://github.com/getsentry/responses
-from datacrunch.datacrunch import DataCrunchClient
-from datacrunch.exceptions import APIException
-
-BASE_URL = 'https://api-testing.datacrunch.io/v1'
+BASE_URL = 'https://api.example.com/v1'
response_json = {
- 'access_token': 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJoZXkiOiJ5b3UgYWN1YWxseSBjaGVja2VkIHRoaXM_In0.0RjcdKQ1NJP9gbRyXITE6LFFLwKGzeeshuubnkkfkb8',
+ 'access_token': 'SECRET',
'token_type': 'Bearer',
'expires_in': 3600,
- 'refresh_token': 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ3b3ciOiJhbmQgdGhpcyB0b28_In0.AC5gk-o-MOptUgrouEErlhr8WT3Hg_RR6px6A0I7ZEk',
+ 'refresh_token': 'SECRET',
'scope': 'fullAccess',
}
-class TestDataCrunchClient:
- def test_client(self):
- # arrange - add response mock
- responses.add(responses.POST, BASE_URL + '/oauth2/token', json=response_json, status=200)
+@pytest.fixture(autouse=True)
+def reset_verda_datacrunch():
+ # Ensure this module gets freshly imported in each test. Python normally caches imports,
+ # which prevents module-level DeprecationWarnings from firing more than once.
+ sys.modules.pop('verda.datacrunch', None)
+
+
+def test_datacrunch_client_deprecation():
+ from verda import DataCrunchClient
- # act
+ responses.add(responses.POST, BASE_URL + '/oauth2/token', json=response_json, status=200)
+
+ with pytest.warns(DeprecationWarning, match='DataCrunchClient is deprecated'):
client = DataCrunchClient('XXXXXXXXXXXXXX', 'XXXXXXXXXXXXXX', BASE_URL)
- # assert
- assert client.constants.base_url == BASE_URL
-
- def test_client_with_default_base_url(self):
- # arrange - add response mock
- DEFAULT_BASE_URL = 'https://api.datacrunch.io/v1'
- responses.add(
- responses.POST,
- DEFAULT_BASE_URL + '/oauth2/token',
- json=response_json,
- status=200,
- )
-
- # act
- client = DataCrunchClient('XXXXXXXXXXXXXX', 'XXXXXXXXXXXXXX')
-
- # assert
- assert client.constants.base_url == DEFAULT_BASE_URL
-
- def test_invalid_client_credentials(self):
- # arrange - add response mock
- responses.add(
- responses.POST,
- BASE_URL + '/oauth2/token',
- json={
- 'code': 'unauthorized_request',
- 'message': 'Invalid client id or client secret',
- },
- status=401,
- )
-
- # act
- with pytest.raises(APIException) as excinfo:
- DataCrunchClient('x', 'y', BASE_URL)
-
- # assert
- assert excinfo.value.code == 'unauthorized_request'
- assert excinfo.value.message == 'Invalid client id or client secret'
+ assert client.constants.base_url == BASE_URL
+
+
+@pytest.mark.filterwarnings('ignore:DataCrunchClient is deprecated')
+def test_datacrunch_module_deprecation():
+ responses.add(responses.POST, BASE_URL + '/oauth2/token', json=response_json, status=200)
+
+ with pytest.warns(DeprecationWarning, match='datacrunch.datacrunch is deprecated'):
+ from verda.datacrunch import DataCrunchClient
+
+ client = DataCrunchClient('XXXXXXXXXXXXXX', 'XXXXXXXXXXXXXX', BASE_URL)
+ assert client.constants.base_url == BASE_URL
+
+
+def test_datacrunch_constants_module():
+ # Test that old re-exports in datacrunch.datacrunch (sub)module still work, but warn
+
+ with pytest.warns(DeprecationWarning, match='datacrunch.datacrunch is deprecated'):
+ from verda.datacrunch import Constants
+
+ constants = Constants('url', 'v1')
+
+ assert constants.base_url == 'url'
+ assert constants.version == 'v1'
diff --git a/tests/unit_tests/test_exceptions.py b/tests/unit_tests/test_exceptions.py
index 940234a..dd58e8e 100644
--- a/tests/unit_tests/test_exceptions.py
+++ b/tests/unit_tests/test_exceptions.py
@@ -1,6 +1,6 @@
import pytest
-from datacrunch.exceptions import APIException
+from verda.exceptions import APIException
ERROR_CODE = 'test_code'
ERROR_MESSAGE = 'test message'
diff --git a/tests/unit_tests/volume_types/test_volume_types.py b/tests/unit_tests/volume_types/test_volume_types.py
index ddde59f..df8ce09 100644
--- a/tests/unit_tests/volume_types/test_volume_types.py
+++ b/tests/unit_tests/volume_types/test_volume_types.py
@@ -1,7 +1,7 @@
import responses # https://github.com/getsentry/responses
-from datacrunch.constants import VolumeTypes
-from datacrunch.volume_types.volume_types import VolumeType, VolumeTypesService
+from verda.constants import VolumeTypes
+from verda.volume_types.volume_types import VolumeType, VolumeTypesService
USD = 'usd'
NVMe_PRICE = 0.2
diff --git a/tests/unit_tests/volumes/test_volumes.py b/tests/unit_tests/volumes/test_volumes.py
index 43c34a7..9a4b592 100644
--- a/tests/unit_tests/volumes/test_volumes.py
+++ b/tests/unit_tests/volumes/test_volumes.py
@@ -2,15 +2,15 @@
import responses # https://github.com/getsentry/responses
from responses import matchers
-from datacrunch.constants import (
+from verda.constants import (
ErrorCodes,
Locations,
VolumeActions,
VolumeStatus,
VolumeTypes,
)
-from datacrunch.exceptions import APIException
-from datacrunch.volumes.volumes import Volume, VolumesService
+from verda.exceptions import APIException
+from verda.volumes.volumes import Volume, VolumesService
INVALID_REQUEST = ErrorCodes.INVALID_REQUEST
INVALID_REQUEST_MESSAGE = 'Your existence is invalid'
diff --git a/uv.lock b/uv.lock
index a81baae..24e8db0 100644
--- a/uv.lock
+++ b/uv.lock
@@ -1,6 +1,6 @@
version = 1
revision = 3
-requires-python = ">=3.11"
+requires-python = ">=3.10"
[[package]]
name = "certifi"
@@ -17,6 +17,21 @@ version = "3.4.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f2/4f/e1808dc01273379acc506d18f1504eb2d299bd4131743b9fc54d7be4df1e/charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e", size = 106620, upload-time = "2024-10-09T07:40:20.413Z" }
wheels = [
+ { url = "https://files.pythonhosted.org/packages/69/8b/825cc84cf13a28bfbcba7c416ec22bf85a9584971be15b21dd8300c65b7f/charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6", size = 196363, upload-time = "2024-10-09T07:38:02.622Z" },
+ { url = "https://files.pythonhosted.org/packages/23/81/d7eef6a99e42c77f444fdd7bc894b0ceca6c3a95c51239e74a722039521c/charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b", size = 125639, upload-time = "2024-10-09T07:38:04.044Z" },
+ { url = "https://files.pythonhosted.org/packages/21/67/b4564d81f48042f520c948abac7079356e94b30cb8ffb22e747532cf469d/charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99", size = 120451, upload-time = "2024-10-09T07:38:04.997Z" },
+ { url = "https://files.pythonhosted.org/packages/c2/72/12a7f0943dd71fb5b4e7b55c41327ac0a1663046a868ee4d0d8e9c369b85/charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca", size = 140041, upload-time = "2024-10-09T07:38:06.676Z" },
+ { url = "https://files.pythonhosted.org/packages/67/56/fa28c2c3e31217c4c52158537a2cf5d98a6c1e89d31faf476c89391cd16b/charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d", size = 150333, upload-time = "2024-10-09T07:38:08.626Z" },
+ { url = "https://files.pythonhosted.org/packages/f9/d2/466a9be1f32d89eb1554cf84073a5ed9262047acee1ab39cbaefc19635d2/charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7", size = 142921, upload-time = "2024-10-09T07:38:10.301Z" },
+ { url = "https://files.pythonhosted.org/packages/f8/01/344ec40cf5d85c1da3c1f57566c59e0c9b56bcc5566c08804a95a6cc8257/charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3", size = 144785, upload-time = "2024-10-09T07:38:12.019Z" },
+ { url = "https://files.pythonhosted.org/packages/73/8b/2102692cb6d7e9f03b9a33a710e0164cadfce312872e3efc7cfe22ed26b4/charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907", size = 146631, upload-time = "2024-10-09T07:38:13.701Z" },
+ { url = "https://files.pythonhosted.org/packages/d8/96/cc2c1b5d994119ce9f088a9a0c3ebd489d360a2eb058e2c8049f27092847/charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b", size = 140867, upload-time = "2024-10-09T07:38:15.403Z" },
+ { url = "https://files.pythonhosted.org/packages/c9/27/cde291783715b8ec30a61c810d0120411844bc4c23b50189b81188b273db/charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912", size = 149273, upload-time = "2024-10-09T07:38:16.433Z" },
+ { url = "https://files.pythonhosted.org/packages/3a/a4/8633b0fc1a2d1834d5393dafecce4a1cc56727bfd82b4dc18fc92f0d3cc3/charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95", size = 152437, upload-time = "2024-10-09T07:38:18.013Z" },
+ { url = "https://files.pythonhosted.org/packages/64/ea/69af161062166b5975ccbb0961fd2384853190c70786f288684490913bf5/charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e", size = 150087, upload-time = "2024-10-09T07:38:19.089Z" },
+ { url = "https://files.pythonhosted.org/packages/3b/fd/e60a9d9fd967f4ad5a92810138192f825d77b4fa2a557990fd575a47695b/charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe", size = 145142, upload-time = "2024-10-09T07:38:20.78Z" },
+ { url = "https://files.pythonhosted.org/packages/6d/02/8cb0988a1e49ac9ce2eed1e07b77ff118f2923e9ebd0ede41ba85f2dcb04/charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc", size = 94701, upload-time = "2024-10-09T07:38:21.851Z" },
+ { url = "https://files.pythonhosted.org/packages/d6/20/f1d4670a8a723c46be695dff449d86d6092916f9e99c53051954ee33a1bc/charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749", size = 102191, upload-time = "2024-10-09T07:38:23.467Z" },
{ url = "https://files.pythonhosted.org/packages/9c/61/73589dcc7a719582bf56aae309b6103d2762b526bffe189d635a7fcfd998/charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c", size = 193339, upload-time = "2024-10-09T07:38:24.527Z" },
{ url = "https://files.pythonhosted.org/packages/77/d5/8c982d58144de49f59571f940e329ad6e8615e1e82ef84584c5eeb5e1d72/charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944", size = 124366, upload-time = "2024-10-09T07:38:26.488Z" },
{ url = "https://files.pythonhosted.org/packages/bf/19/411a64f01ee971bed3231111b69eb56f9331a769072de479eae7de52296d/charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee", size = 118874, upload-time = "2024-10-09T07:38:28.115Z" },
@@ -80,6 +95,16 @@ version = "7.6.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f7/08/7e37f82e4d1aead42a7443ff06a1e406aabf7302c4f00a546e4b320b994c/coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d", size = 798791, upload-time = "2024-08-04T19:45:30.9Z" }
wheels = [
+ { url = "https://files.pythonhosted.org/packages/7e/61/eb7ce5ed62bacf21beca4937a90fe32545c91a3c8a42a30c6616d48fc70d/coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16", size = 206690, upload-time = "2024-08-04T19:43:07.695Z" },
+ { url = "https://files.pythonhosted.org/packages/7d/73/041928e434442bd3afde5584bdc3f932fb4562b1597629f537387cec6f3d/coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36", size = 207127, upload-time = "2024-08-04T19:43:10.15Z" },
+ { url = "https://files.pythonhosted.org/packages/c7/c8/6ca52b5147828e45ad0242388477fdb90df2c6cbb9a441701a12b3c71bc8/coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02", size = 235654, upload-time = "2024-08-04T19:43:12.405Z" },
+ { url = "https://files.pythonhosted.org/packages/d5/da/9ac2b62557f4340270942011d6efeab9833648380109e897d48ab7c1035d/coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc", size = 233598, upload-time = "2024-08-04T19:43:14.078Z" },
+ { url = "https://files.pythonhosted.org/packages/53/23/9e2c114d0178abc42b6d8d5281f651a8e6519abfa0ef460a00a91f80879d/coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23", size = 234732, upload-time = "2024-08-04T19:43:16.632Z" },
+ { url = "https://files.pythonhosted.org/packages/0f/7e/a0230756fb133343a52716e8b855045f13342b70e48e8ad41d8a0d60ab98/coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34", size = 233816, upload-time = "2024-08-04T19:43:19.049Z" },
+ { url = "https://files.pythonhosted.org/packages/28/7c/3753c8b40d232b1e5eeaed798c875537cf3cb183fb5041017c1fdb7ec14e/coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c", size = 232325, upload-time = "2024-08-04T19:43:21.246Z" },
+ { url = "https://files.pythonhosted.org/packages/57/e3/818a2b2af5b7573b4b82cf3e9f137ab158c90ea750a8f053716a32f20f06/coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959", size = 233418, upload-time = "2024-08-04T19:43:22.945Z" },
+ { url = "https://files.pythonhosted.org/packages/c8/fb/4532b0b0cefb3f06d201648715e03b0feb822907edab3935112b61b885e2/coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232", size = 209343, upload-time = "2024-08-04T19:43:25.121Z" },
+ { url = "https://files.pythonhosted.org/packages/5a/25/af337cc7421eca1c187cc9c315f0a755d48e755d2853715bfe8c418a45fa/coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0", size = 210136, upload-time = "2024-08-04T19:43:26.851Z" },
{ url = "https://files.pythonhosted.org/packages/ad/5f/67af7d60d7e8ce61a4e2ddcd1bd5fb787180c8d0ae0fbd073f903b3dd95d/coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93", size = 206796, upload-time = "2024-08-04T19:43:29.115Z" },
{ url = "https://files.pythonhosted.org/packages/e1/0e/e52332389e057daa2e03be1fbfef25bb4d626b37d12ed42ae6281d0a274c/coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3", size = 207244, upload-time = "2024-08-04T19:43:31.285Z" },
{ url = "https://files.pythonhosted.org/packages/aa/cd/766b45fb6e090f20f8927d9c7cb34237d41c73a939358bc881883fd3a40d/coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff", size = 239279, upload-time = "2024-08-04T19:43:33.581Z" },
@@ -120,6 +145,7 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/52/76/1766bb8b803a88f93c3a2d07e30ffa359467810e5cbc68e375ebe6906efb/coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3", size = 247598, upload-time = "2024-08-04T19:44:41.59Z" },
{ url = "https://files.pythonhosted.org/packages/66/8b/f54f8db2ae17188be9566e8166ac6df105c1c611e25da755738025708d54/coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f", size = 210307, upload-time = "2024-08-04T19:44:43.301Z" },
{ url = "https://files.pythonhosted.org/packages/9f/b0/e0dca6da9170aefc07515cce067b97178cefafb512d00a87a1c717d2efd5/coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657", size = 211453, upload-time = "2024-08-04T19:44:45.677Z" },
+ { url = "https://files.pythonhosted.org/packages/a5/2b/0354ed096bca64dc8e32a7cbcae28b34cb5ad0b1fe2125d6d99583313ac0/coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df", size = 198926, upload-time = "2024-08-04T19:45:28.875Z" },
]
[[package]]
@@ -136,38 +162,15 @@ wheels = [
]
[[package]]
-name = "datacrunch"
-version = "1.16.0"
-source = { editable = "." }
+name = "exceptiongroup"
+version = "1.3.1"
+source = { registry = "https://pypi.org/simple" }
dependencies = [
- { name = "dataclasses-json" },
- { name = "requests" },
-]
-
-[package.dev-dependencies]
-dev = [
- { name = "pytest" },
- { name = "pytest-cov" },
- { name = "pytest-responses" },
- { name = "python-dotenv" },
- { name = "responses" },
- { name = "ruff" },
-]
-
-[package.metadata]
-requires-dist = [
- { name = "dataclasses-json", specifier = ">=0.6.7" },
- { name = "requests", specifier = ">=2.25.1,<3" },
+ { name = "typing-extensions", marker = "python_full_version < '3.13'" },
]
-
-[package.metadata.requires-dev]
-dev = [
- { name = "pytest", specifier = ">=8.1,<9" },
- { name = "pytest-cov", specifier = ">=2.10.1,<3" },
- { name = "pytest-responses", specifier = ">=0.4.0,<1" },
- { name = "python-dotenv", specifier = ">=1.1.1" },
- { name = "responses", specifier = ">=0.12.1,<1" },
- { name = "ruff", specifier = ">=0.14.2" },
+sdist = { url = "https://files.pythonhosted.org/packages/50/79/66800aadf48771f6b62f7eb014e352e5d06856655206165d775e675a02c9/exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219", size = 30371, upload-time = "2025-11-21T23:01:54.787Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/8a/0e/97c33bf5009bdbac74fd2beace167cab3f978feb69cc36f1ef79360d6c4e/exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598", size = 16740, upload-time = "2025-11-21T23:01:53.443Z" },
]
[[package]]
@@ -242,10 +245,12 @@ version = "8.4.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "colorama", marker = "sys_platform == 'win32'" },
+ { name = "exceptiongroup", marker = "python_full_version < '3.11'" },
{ name = "iniconfig" },
{ name = "packaging" },
{ name = "pluggy" },
{ name = "pygments" },
+ { name = "tomli", marker = "python_full_version < '3.11'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" }
wheels = [
@@ -293,6 +298,15 @@ version = "6.0.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" }
wheels = [
+ { url = "https://files.pythonhosted.org/packages/9b/95/a3fac87cb7158e231b5a6012e438c647e1a87f09f8e0d123acec8ab8bf71/PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086", size = 184199, upload-time = "2024-08-06T20:31:40.178Z" },
+ { url = "https://files.pythonhosted.org/packages/c7/7a/68bd47624dab8fd4afbfd3c48e3b79efe09098ae941de5b58abcbadff5cb/PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf", size = 171758, upload-time = "2024-08-06T20:31:42.173Z" },
+ { url = "https://files.pythonhosted.org/packages/49/ee/14c54df452143b9ee9f0f29074d7ca5516a36edb0b4cc40c3f280131656f/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237", size = 718463, upload-time = "2024-08-06T20:31:44.263Z" },
+ { url = "https://files.pythonhosted.org/packages/4d/61/de363a97476e766574650d742205be468921a7b532aa2499fcd886b62530/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b", size = 719280, upload-time = "2024-08-06T20:31:50.199Z" },
+ { url = "https://files.pythonhosted.org/packages/6b/4e/1523cb902fd98355e2e9ea5e5eb237cbc5f3ad5f3075fa65087aa0ecb669/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed", size = 751239, upload-time = "2024-08-06T20:31:52.292Z" },
+ { url = "https://files.pythonhosted.org/packages/b7/33/5504b3a9a4464893c32f118a9cc045190a91637b119a9c881da1cf6b7a72/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180", size = 695802, upload-time = "2024-08-06T20:31:53.836Z" },
+ { url = "https://files.pythonhosted.org/packages/5c/20/8347dcabd41ef3a3cdc4f7b7a2aff3d06598c8779faa189cdbf878b626a4/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68", size = 720527, upload-time = "2024-08-06T20:31:55.565Z" },
+ { url = "https://files.pythonhosted.org/packages/be/aa/5afe99233fb360d0ff37377145a949ae258aaab831bde4792b32650a4378/PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99", size = 144052, upload-time = "2024-08-06T20:31:56.914Z" },
+ { url = "https://files.pythonhosted.org/packages/b5/84/0fa4b06f6d6c958d207620fc60005e241ecedceee58931bb20138e1e5776/PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e", size = 161774, upload-time = "2024-08-06T20:31:58.304Z" },
{ url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612, upload-time = "2024-08-06T20:32:03.408Z" },
{ url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040, upload-time = "2024-08-06T20:32:04.926Z" },
{ url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829, upload-time = "2024-08-06T20:32:06.459Z" },
@@ -386,6 +400,55 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/44/6f/7120676b6d73228c96e17f1f794d8ab046fc910d781c8d151120c3f1569e/toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", size = 16588, upload-time = "2020-11-01T01:40:20.672Z" },
]
+[[package]]
+name = "tomli"
+version = "2.3.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/52/ed/3f73f72945444548f33eba9a87fc7a6e969915e7b1acc8260b30e1f76a2f/tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549", size = 17392, upload-time = "2025-10-08T22:01:47.119Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b3/2e/299f62b401438d5fe1624119c723f5d877acc86a4c2492da405626665f12/tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45", size = 153236, upload-time = "2025-10-08T22:01:00.137Z" },
+ { url = "https://files.pythonhosted.org/packages/86/7f/d8fffe6a7aefdb61bced88fcb5e280cfd71e08939da5894161bd71bea022/tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba", size = 148084, upload-time = "2025-10-08T22:01:01.63Z" },
+ { url = "https://files.pythonhosted.org/packages/47/5c/24935fb6a2ee63e86d80e4d3b58b222dafaf438c416752c8b58537c8b89a/tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf", size = 234832, upload-time = "2025-10-08T22:01:02.543Z" },
+ { url = "https://files.pythonhosted.org/packages/89/da/75dfd804fc11e6612846758a23f13271b76d577e299592b4371a4ca4cd09/tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441", size = 242052, upload-time = "2025-10-08T22:01:03.836Z" },
+ { url = "https://files.pythonhosted.org/packages/70/8c/f48ac899f7b3ca7eb13af73bacbc93aec37f9c954df3c08ad96991c8c373/tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845", size = 239555, upload-time = "2025-10-08T22:01:04.834Z" },
+ { url = "https://files.pythonhosted.org/packages/ba/28/72f8afd73f1d0e7829bfc093f4cb98ce0a40ffc0cc997009ee1ed94ba705/tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c", size = 245128, upload-time = "2025-10-08T22:01:05.84Z" },
+ { url = "https://files.pythonhosted.org/packages/b6/eb/a7679c8ac85208706d27436e8d421dfa39d4c914dcf5fa8083a9305f58d9/tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456", size = 96445, upload-time = "2025-10-08T22:01:06.896Z" },
+ { url = "https://files.pythonhosted.org/packages/0a/fe/3d3420c4cb1ad9cb462fb52967080575f15898da97e21cb6f1361d505383/tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be", size = 107165, upload-time = "2025-10-08T22:01:08.107Z" },
+ { url = "https://files.pythonhosted.org/packages/ff/b7/40f36368fcabc518bb11c8f06379a0fd631985046c038aca08c6d6a43c6e/tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac", size = 154891, upload-time = "2025-10-08T22:01:09.082Z" },
+ { url = "https://files.pythonhosted.org/packages/f9/3f/d9dd692199e3b3aab2e4e4dd948abd0f790d9ded8cd10cbaae276a898434/tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22", size = 148796, upload-time = "2025-10-08T22:01:10.266Z" },
+ { url = "https://files.pythonhosted.org/packages/60/83/59bff4996c2cf9f9387a0f5a3394629c7efa5ef16142076a23a90f1955fa/tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f", size = 242121, upload-time = "2025-10-08T22:01:11.332Z" },
+ { url = "https://files.pythonhosted.org/packages/45/e5/7c5119ff39de8693d6baab6c0b6dcb556d192c165596e9fc231ea1052041/tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52", size = 250070, upload-time = "2025-10-08T22:01:12.498Z" },
+ { url = "https://files.pythonhosted.org/packages/45/12/ad5126d3a278f27e6701abde51d342aa78d06e27ce2bb596a01f7709a5a2/tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8", size = 245859, upload-time = "2025-10-08T22:01:13.551Z" },
+ { url = "https://files.pythonhosted.org/packages/fb/a1/4d6865da6a71c603cfe6ad0e6556c73c76548557a8d658f9e3b142df245f/tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6", size = 250296, upload-time = "2025-10-08T22:01:14.614Z" },
+ { url = "https://files.pythonhosted.org/packages/a0/b7/a7a7042715d55c9ba6e8b196d65d2cb662578b4d8cd17d882d45322b0d78/tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876", size = 97124, upload-time = "2025-10-08T22:01:15.629Z" },
+ { url = "https://files.pythonhosted.org/packages/06/1e/f22f100db15a68b520664eb3328fb0ae4e90530887928558112c8d1f4515/tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878", size = 107698, upload-time = "2025-10-08T22:01:16.51Z" },
+ { url = "https://files.pythonhosted.org/packages/89/48/06ee6eabe4fdd9ecd48bf488f4ac783844fd777f547b8d1b61c11939974e/tomli-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5192f562738228945d7b13d4930baffda67b69425a7f0da96d360b0a3888136b", size = 154819, upload-time = "2025-10-08T22:01:17.964Z" },
+ { url = "https://files.pythonhosted.org/packages/f1/01/88793757d54d8937015c75dcdfb673c65471945f6be98e6a0410fba167ed/tomli-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:be71c93a63d738597996be9528f4abe628d1adf5e6eb11607bc8fe1a510b5dae", size = 148766, upload-time = "2025-10-08T22:01:18.959Z" },
+ { url = "https://files.pythonhosted.org/packages/42/17/5e2c956f0144b812e7e107f94f1cc54af734eb17b5191c0bbfb72de5e93e/tomli-2.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4665508bcbac83a31ff8ab08f424b665200c0e1e645d2bd9ab3d3e557b6185b", size = 240771, upload-time = "2025-10-08T22:01:20.106Z" },
+ { url = "https://files.pythonhosted.org/packages/d5/f4/0fbd014909748706c01d16824eadb0307115f9562a15cbb012cd9b3512c5/tomli-2.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4021923f97266babc6ccab9f5068642a0095faa0a51a246a6a02fccbb3514eaf", size = 248586, upload-time = "2025-10-08T22:01:21.164Z" },
+ { url = "https://files.pythonhosted.org/packages/30/77/fed85e114bde5e81ecf9bc5da0cc69f2914b38f4708c80ae67d0c10180c5/tomli-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4ea38c40145a357d513bffad0ed869f13c1773716cf71ccaa83b0fa0cc4e42f", size = 244792, upload-time = "2025-10-08T22:01:22.417Z" },
+ { url = "https://files.pythonhosted.org/packages/55/92/afed3d497f7c186dc71e6ee6d4fcb0acfa5f7d0a1a2878f8beae379ae0cc/tomli-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad805ea85eda330dbad64c7ea7a4556259665bdf9d2672f5dccc740eb9d3ca05", size = 248909, upload-time = "2025-10-08T22:01:23.859Z" },
+ { url = "https://files.pythonhosted.org/packages/f8/84/ef50c51b5a9472e7265ce1ffc7f24cd4023d289e109f669bdb1553f6a7c2/tomli-2.3.0-cp313-cp313-win32.whl", hash = "sha256:97d5eec30149fd3294270e889b4234023f2c69747e555a27bd708828353ab606", size = 96946, upload-time = "2025-10-08T22:01:24.893Z" },
+ { url = "https://files.pythonhosted.org/packages/b2/b7/718cd1da0884f281f95ccfa3a6cc572d30053cba64603f79d431d3c9b61b/tomli-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0c95ca56fbe89e065c6ead5b593ee64b84a26fca063b5d71a1122bf26e533999", size = 107705, upload-time = "2025-10-08T22:01:26.153Z" },
+ { url = "https://files.pythonhosted.org/packages/19/94/aeafa14a52e16163008060506fcb6aa1949d13548d13752171a755c65611/tomli-2.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cebc6fe843e0733ee827a282aca4999b596241195f43b4cc371d64fc6639da9e", size = 154244, upload-time = "2025-10-08T22:01:27.06Z" },
+ { url = "https://files.pythonhosted.org/packages/db/e4/1e58409aa78eefa47ccd19779fc6f36787edbe7d4cd330eeeedb33a4515b/tomli-2.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4c2ef0244c75aba9355561272009d934953817c49f47d768070c3c94355c2aa3", size = 148637, upload-time = "2025-10-08T22:01:28.059Z" },
+ { url = "https://files.pythonhosted.org/packages/26/b6/d1eccb62f665e44359226811064596dd6a366ea1f985839c566cd61525ae/tomli-2.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c22a8bf253bacc0cf11f35ad9808b6cb75ada2631c2d97c971122583b129afbc", size = 241925, upload-time = "2025-10-08T22:01:29.066Z" },
+ { url = "https://files.pythonhosted.org/packages/70/91/7cdab9a03e6d3d2bb11beae108da5bdc1c34bdeb06e21163482544ddcc90/tomli-2.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0eea8cc5c5e9f89c9b90c4896a8deefc74f518db5927d0e0e8d4a80953d774d0", size = 249045, upload-time = "2025-10-08T22:01:31.98Z" },
+ { url = "https://files.pythonhosted.org/packages/15/1b/8c26874ed1f6e4f1fcfeb868db8a794cbe9f227299402db58cfcc858766c/tomli-2.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b74a0e59ec5d15127acdabd75ea17726ac4c5178ae51b85bfe39c4f8a278e879", size = 245835, upload-time = "2025-10-08T22:01:32.989Z" },
+ { url = "https://files.pythonhosted.org/packages/fd/42/8e3c6a9a4b1a1360c1a2a39f0b972cef2cc9ebd56025168c4137192a9321/tomli-2.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5870b50c9db823c595983571d1296a6ff3e1b88f734a4c8f6fc6188397de005", size = 253109, upload-time = "2025-10-08T22:01:34.052Z" },
+ { url = "https://files.pythonhosted.org/packages/22/0c/b4da635000a71b5f80130937eeac12e686eefb376b8dee113b4a582bba42/tomli-2.3.0-cp314-cp314-win32.whl", hash = "sha256:feb0dacc61170ed7ab602d3d972a58f14ee3ee60494292d384649a3dc38ef463", size = 97930, upload-time = "2025-10-08T22:01:35.082Z" },
+ { url = "https://files.pythonhosted.org/packages/b9/74/cb1abc870a418ae99cd5c9547d6bce30701a954e0e721821df483ef7223c/tomli-2.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:b273fcbd7fc64dc3600c098e39136522650c49bca95df2d11cf3b626422392c8", size = 107964, upload-time = "2025-10-08T22:01:36.057Z" },
+ { url = "https://files.pythonhosted.org/packages/54/78/5c46fff6432a712af9f792944f4fcd7067d8823157949f4e40c56b8b3c83/tomli-2.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:940d56ee0410fa17ee1f12b817b37a4d4e4dc4d27340863cc67236c74f582e77", size = 163065, upload-time = "2025-10-08T22:01:37.27Z" },
+ { url = "https://files.pythonhosted.org/packages/39/67/f85d9bd23182f45eca8939cd2bc7050e1f90c41f4a2ecbbd5963a1d1c486/tomli-2.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f85209946d1fe94416debbb88d00eb92ce9cd5266775424ff81bc959e001acaf", size = 159088, upload-time = "2025-10-08T22:01:38.235Z" },
+ { url = "https://files.pythonhosted.org/packages/26/5a/4b546a0405b9cc0659b399f12b6adb750757baf04250b148d3c5059fc4eb/tomli-2.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a56212bdcce682e56b0aaf79e869ba5d15a6163f88d5451cbde388d48b13f530", size = 268193, upload-time = "2025-10-08T22:01:39.712Z" },
+ { url = "https://files.pythonhosted.org/packages/42/4f/2c12a72ae22cf7b59a7fe75b3465b7aba40ea9145d026ba41cb382075b0e/tomli-2.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5f3ffd1e098dfc032d4d3af5c0ac64f6d286d98bc148698356847b80fa4de1b", size = 275488, upload-time = "2025-10-08T22:01:40.773Z" },
+ { url = "https://files.pythonhosted.org/packages/92/04/a038d65dbe160c3aa5a624e93ad98111090f6804027d474ba9c37c8ae186/tomli-2.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5e01decd096b1530d97d5d85cb4dff4af2d8347bd35686654a004f8dea20fc67", size = 272669, upload-time = "2025-10-08T22:01:41.824Z" },
+ { url = "https://files.pythonhosted.org/packages/be/2f/8b7c60a9d1612a7cbc39ffcca4f21a73bf368a80fc25bccf8253e2563267/tomli-2.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8a35dd0e643bb2610f156cca8db95d213a90015c11fee76c946aa62b7ae7e02f", size = 279709, upload-time = "2025-10-08T22:01:43.177Z" },
+ { url = "https://files.pythonhosted.org/packages/7e/46/cc36c679f09f27ded940281c38607716c86cf8ba4a518d524e349c8b4874/tomli-2.3.0-cp314-cp314t-win32.whl", hash = "sha256:a1f7f282fe248311650081faafa5f4732bdbfef5d45fe3f2e702fbc6f2d496e0", size = 107563, upload-time = "2025-10-08T22:01:44.233Z" },
+ { url = "https://files.pythonhosted.org/packages/84/ff/426ca8683cf7b753614480484f6437f568fd2fda2edbdf57a2d3d8b27a0b/tomli-2.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:70a251f8d4ba2d9ac2542eecf008b3c8a9fc5c3f9f02c56a9d7952612be2fdba", size = 119756, upload-time = "2025-10-08T22:01:45.234Z" },
+ { url = "https://files.pythonhosted.org/packages/77/b8/0135fadc89e73be292b473cb820b4f5a08197779206b33191e801feeae40/tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b", size = 14408, upload-time = "2025-10-08T22:01:46.04Z" },
+]
+
[[package]]
name = "typing-extensions"
version = "4.15.0"
@@ -416,3 +479,38 @@ sdist = { url = "https://files.pythonhosted.org/packages/ed/63/22ba4ebfe7430b763
wheels = [
{ url = "https://files.pythonhosted.org/packages/ce/d9/5f4c13cecde62396b0d3fe530a50ccea91e7dfc1ccf0e09c228841bb5ba8/urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac", size = 126338, upload-time = "2024-09-12T10:52:16.589Z" },
]
+
+[[package]]
+name = "verda"
+version = "1.17.0"
+source = { editable = "." }
+dependencies = [
+ { name = "dataclasses-json" },
+ { name = "requests" },
+]
+
+[package.dev-dependencies]
+dev = [
+ { name = "pytest" },
+ { name = "pytest-cov" },
+ { name = "pytest-responses" },
+ { name = "python-dotenv" },
+ { name = "responses" },
+ { name = "ruff" },
+]
+
+[package.metadata]
+requires-dist = [
+ { name = "dataclasses-json", specifier = ">=0.6.7" },
+ { name = "requests", specifier = ">=2.25.1,<3" },
+]
+
+[package.metadata.requires-dev]
+dev = [
+ { name = "pytest", specifier = ">=8.1,<9" },
+ { name = "pytest-cov", specifier = ">=2.10.1,<3" },
+ { name = "pytest-responses", specifier = ">=0.4.0,<1" },
+ { name = "python-dotenv", specifier = ">=1.1.1" },
+ { name = "responses", specifier = ">=0.12.1,<1" },
+ { name = "ruff", specifier = ">=0.14.2" },
+]
diff --git a/datacrunch/InferenceClient/__init__.py b/verda/InferenceClient/__init__.py
similarity index 100%
rename from datacrunch/InferenceClient/__init__.py
rename to verda/InferenceClient/__init__.py
diff --git a/datacrunch/InferenceClient/inference_client.py b/verda/InferenceClient/inference_client.py
similarity index 100%
rename from datacrunch/InferenceClient/inference_client.py
rename to verda/InferenceClient/inference_client.py
diff --git a/verda/__init__.py b/verda/__init__.py
new file mode 100644
index 0000000..d666f6f
--- /dev/null
+++ b/verda/__init__.py
@@ -0,0 +1,22 @@
+import warnings
+
+from verda._version import __version__
+from verda.verda import VerdaClient
+
+
+class _DataCrunchClientAlias:
+ def __call__(self, *args, **kwargs):
+ warnings.warn(
+ 'DataCrunchClient is deprecated; use VerdaClient instead.',
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return VerdaClient(*args, **kwargs)
+
+
+# creates a callable that behaves like the class
+DataCrunchClient = _DataCrunchClientAlias()
+DataCrunchClient.__name__ = 'DataCrunchClient'
+DataCrunchClient.__doc__ = VerdaClient.__doc__
+
+__all__ = ['DataCrunchClient', 'VerdaClient', '__version__']
diff --git a/datacrunch/_version.py b/verda/_version.py
similarity index 75%
rename from datacrunch/_version.py
rename to verda/_version.py
index b4b0c6b..2aa7245 100644
--- a/datacrunch/_version.py
+++ b/verda/_version.py
@@ -1,6 +1,6 @@
try:
from importlib.metadata import version
- __version__ = version('datacrunch')
+ __version__ = version('verda')
except Exception:
__version__ = '0.0.0+dev' # fallback for development
diff --git a/datacrunch/authentication/__init__.py b/verda/authentication/__init__.py
similarity index 100%
rename from datacrunch/authentication/__init__.py
rename to verda/authentication/__init__.py
diff --git a/datacrunch/authentication/authentication.py b/verda/authentication/authentication.py
similarity index 98%
rename from datacrunch/authentication/authentication.py
rename to verda/authentication/authentication.py
index d8cefe1..c174de4 100644
--- a/datacrunch/authentication/authentication.py
+++ b/verda/authentication/authentication.py
@@ -2,7 +2,7 @@
import requests
-from datacrunch.http_client.http_client import handle_error
+from verda.http_client.http_client import handle_error
TOKEN_ENDPOINT = '/oauth2/token'
diff --git a/datacrunch/balance/__init__.py b/verda/balance/__init__.py
similarity index 100%
rename from datacrunch/balance/__init__.py
rename to verda/balance/__init__.py
diff --git a/datacrunch/balance/balance.py b/verda/balance/balance.py
similarity index 100%
rename from datacrunch/balance/balance.py
rename to verda/balance/balance.py
diff --git a/datacrunch/constants.py b/verda/constants.py
similarity index 98%
rename from datacrunch/constants.py
rename to verda/constants.py
index 3ba0a92..70b789f 100644
--- a/datacrunch/constants.py
+++ b/verda/constants.py
@@ -120,7 +120,7 @@ def __init__(self, base_url, version):
"""Available error codes"""
self.base_url: str = base_url
- """DataCrunch's Public API URL"""
+ """Verda Public API URL"""
self.version: str = version
"""Current SDK Version"""
diff --git a/datacrunch/containers/__init__.py b/verda/containers/__init__.py
similarity index 100%
rename from datacrunch/containers/__init__.py
rename to verda/containers/__init__.py
diff --git a/datacrunch/containers/containers.py b/verda/containers/containers.py
similarity index 98%
rename from datacrunch/containers/containers.py
rename to verda/containers/containers.py
index 6254848..ac5bd0c 100644
--- a/datacrunch/containers/containers.py
+++ b/verda/containers/containers.py
@@ -1,4 +1,4 @@
-"""Container deployment and management service for DataCrunch.
+"""Container deployment and management service for Verda.
This module provides functionality for managing container deployments, including
creation, updates, deletion, and monitoring of containerized applications.
@@ -12,8 +12,8 @@
from dataclasses_json import Undefined, dataclass_json # type: ignore
-from datacrunch.http_client.http_client import HTTPClient
-from datacrunch.InferenceClient import InferenceClient, InferenceResponse
+from verda.http_client.http_client import HTTPClient
+from verda.InferenceClient import InferenceClient, InferenceResponse
# API endpoints
CONTAINER_DEPLOYMENTS_ENDPOINT = '/container-deployments'
@@ -727,8 +727,8 @@ def __init__(self, name: str, docker_config_json: str):
class ContainersService:
"""Service for managing container deployments.
- This class provides methods for interacting with the DataCrunch container
- deployment API, including CRUD operations for deployments and related resources.
+ This class provides methods for interacting with container deployment API,
+ including CRUD operations for deployments and related resources.
"""
def __init__(self, http_client: HTTPClient, inference_key: str | None = None) -> None:
diff --git a/verda/datacrunch.py b/verda/datacrunch.py
new file mode 100644
index 0000000..0101c9a
--- /dev/null
+++ b/verda/datacrunch.py
@@ -0,0 +1,43 @@
+# Frozen, minimal compatibility layer for old DataCrunch API
+
+from verda import DataCrunchClient
+from verda._version import __version__
+from verda.authentication.authentication import AuthenticationService
+from verda.balance.balance import BalanceService
+from verda.constants import Constants
+from verda.containers.containers import ContainersService
+from verda.http_client.http_client import HTTPClient
+from verda.images.images import ImagesService
+from verda.instance_types.instance_types import InstanceTypesService
+from verda.instances.instances import InstancesService
+from verda.locations.locations import LocationsService
+from verda.ssh_keys.ssh_keys import SSHKeysService
+from verda.startup_scripts.startup_scripts import StartupScriptsService
+from verda.volume_types.volume_types import VolumeTypesService
+from verda.volumes.volumes import VolumesService
+
+__all__ = [
+ 'AuthenticationService',
+ 'BalanceService',
+ 'Constants',
+ 'ContainersService',
+ 'DataCrunchClient',
+ 'HTTPClient',
+ 'ImagesService',
+ 'InstanceTypesService',
+ 'InstancesService',
+ 'LocationsService',
+ 'SSHKeysService',
+ 'StartupScriptsService',
+ 'VolumeTypesService',
+ 'VolumesService',
+ '__version__',
+]
+
+import warnings
+
+warnings.warn(
+ 'datacrunch.datacrunch is deprecated; use `from verda` instead.',
+ DeprecationWarning,
+ stacklevel=2,
+)
diff --git a/datacrunch/exceptions.py b/verda/exceptions.py
similarity index 80%
rename from datacrunch/exceptions.py
rename to verda/exceptions.py
index e93bd26..7b170df 100644
--- a/datacrunch/exceptions.py
+++ b/verda/exceptions.py
@@ -1,5 +1,5 @@
class APIException(Exception):
- """This exception is raised if there was an error from datacrunch's API.
+ """This exception is raised if there was an error from verda's API.
Could be an invalid input, token etc.
@@ -15,7 +15,7 @@ def __init__(self, code: str, message: str) -> None:
:type message: str
"""
self.code = code
- """Error code. should be available in DataCrunchClient.error_codes"""
+ """Error code. should be available in VerdaClient.error_codes"""
self.message = message
"""Error message
diff --git a/datacrunch/helpers.py b/verda/helpers.py
similarity index 100%
rename from datacrunch/helpers.py
rename to verda/helpers.py
diff --git a/datacrunch/http_client/__init__.py b/verda/http_client/__init__.py
similarity index 100%
rename from datacrunch/http_client/__init__.py
rename to verda/http_client/__init__.py
diff --git a/datacrunch/http_client/http_client.py b/verda/http_client/http_client.py
similarity index 97%
rename from datacrunch/http_client/http_client.py
rename to verda/http_client/http_client.py
index 322662e..31cd271 100644
--- a/datacrunch/http_client/http_client.py
+++ b/verda/http_client/http_client.py
@@ -2,8 +2,8 @@
import requests
-from datacrunch._version import __version__
-from datacrunch.exceptions import APIException
+from verda._version import __version__
+from verda.exceptions import APIException
def handle_error(response: requests.Response) -> None:
@@ -235,8 +235,8 @@ def _add_base_url(self, url: str) -> str:
Example:
if the relative url is '/balance'
- and the base url is 'https://api.datacrunch.io/v1'
- then this method will return 'https://api.datacrunch.io/v1/balance'
+ and the base url is 'https://api.verda.com/v1'
+ then this method will return 'https://api.verda.com/v1/balance'
:param url: a relative url path
:type url: str
diff --git a/datacrunch/images/__init__.py b/verda/images/__init__.py
similarity index 100%
rename from datacrunch/images/__init__.py
rename to verda/images/__init__.py
diff --git a/datacrunch/images/images.py b/verda/images/images.py
similarity index 96%
rename from datacrunch/images/images.py
rename to verda/images/images.py
index 8442e9a..1ed1e48 100644
--- a/datacrunch/images/images.py
+++ b/verda/images/images.py
@@ -1,4 +1,4 @@
-from datacrunch.helpers import stringify_class_object_properties
+from verda.helpers import stringify_class_object_properties
IMAGES_ENDPOINT = '/images'
diff --git a/datacrunch/instance_types/__init__.py b/verda/instance_types/__init__.py
similarity index 100%
rename from datacrunch/instance_types/__init__.py
rename to verda/instance_types/__init__.py
diff --git a/datacrunch/instance_types/instance_types.py b/verda/instance_types/instance_types.py
similarity index 100%
rename from datacrunch/instance_types/instance_types.py
rename to verda/instance_types/instance_types.py
diff --git a/datacrunch/instances/__init__.py b/verda/instances/__init__.py
similarity index 100%
rename from datacrunch/instances/__init__.py
rename to verda/instances/__init__.py
diff --git a/datacrunch/instances/instances.py b/verda/instances/instances.py
similarity index 98%
rename from datacrunch/instances/instances.py
rename to verda/instances/instances.py
index 9bbf8a5..625b14f 100644
--- a/datacrunch/instances/instances.py
+++ b/verda/instances/instances.py
@@ -5,7 +5,7 @@
from dataclasses_json import dataclass_json
-from datacrunch.constants import InstanceStatus, Locations
+from verda.constants import InstanceStatus, Locations
INSTANCES_ENDPOINT = '/instances'
@@ -70,8 +70,7 @@ class Instance:
class InstancesService:
"""Service for managing cloud instances through the API.
- This service provides methods to create, retrieve, and manage cloud instances
- through the DataCrunch API.
+ This service provides methods to create, retrieve, and manage cloud instances.
"""
def __init__(self, http_client) -> None:
diff --git a/datacrunch/locations/__init__.py b/verda/locations/__init__.py
similarity index 100%
rename from datacrunch/locations/__init__.py
rename to verda/locations/__init__.py
diff --git a/datacrunch/locations/locations.py b/verda/locations/locations.py
similarity index 100%
rename from datacrunch/locations/locations.py
rename to verda/locations/locations.py
diff --git a/datacrunch/ssh_keys/__init__.py b/verda/ssh_keys/__init__.py
similarity index 100%
rename from datacrunch/ssh_keys/__init__.py
rename to verda/ssh_keys/__init__.py
diff --git a/datacrunch/ssh_keys/ssh_keys.py b/verda/ssh_keys/ssh_keys.py
similarity index 100%
rename from datacrunch/ssh_keys/ssh_keys.py
rename to verda/ssh_keys/ssh_keys.py
diff --git a/datacrunch/startup_scripts/__init__.py b/verda/startup_scripts/__init__.py
similarity index 100%
rename from datacrunch/startup_scripts/__init__.py
rename to verda/startup_scripts/__init__.py
diff --git a/datacrunch/startup_scripts/startup_scripts.py b/verda/startup_scripts/startup_scripts.py
similarity index 100%
rename from datacrunch/startup_scripts/startup_scripts.py
rename to verda/startup_scripts/startup_scripts.py
diff --git a/datacrunch/datacrunch.py b/verda/verda.py
similarity index 69%
rename from datacrunch/datacrunch.py
rename to verda/verda.py
index d95872e..3177d2a 100644
--- a/datacrunch/datacrunch.py
+++ b/verda/verda.py
@@ -1,36 +1,36 @@
-from datacrunch._version import __version__
-from datacrunch.authentication.authentication import AuthenticationService
-from datacrunch.balance.balance import BalanceService
-from datacrunch.constants import Constants
-from datacrunch.containers.containers import ContainersService
-from datacrunch.http_client.http_client import HTTPClient
-from datacrunch.images.images import ImagesService
-from datacrunch.instance_types.instance_types import InstanceTypesService
-from datacrunch.instances.instances import InstancesService
-from datacrunch.locations.locations import LocationsService
-from datacrunch.ssh_keys.ssh_keys import SSHKeysService
-from datacrunch.startup_scripts.startup_scripts import StartupScriptsService
-from datacrunch.volume_types.volume_types import VolumeTypesService
-from datacrunch.volumes.volumes import VolumesService
-
-
-class DataCrunchClient:
- """Client for interacting with DataCrunch's public API."""
+from verda._version import __version__
+from verda.authentication.authentication import AuthenticationService
+from verda.balance.balance import BalanceService
+from verda.constants import Constants
+from verda.containers.containers import ContainersService
+from verda.http_client.http_client import HTTPClient
+from verda.images.images import ImagesService
+from verda.instance_types.instance_types import InstanceTypesService
+from verda.instances.instances import InstancesService
+from verda.locations.locations import LocationsService
+from verda.ssh_keys.ssh_keys import SSHKeysService
+from verda.startup_scripts.startup_scripts import StartupScriptsService
+from verda.volume_types.volume_types import VolumeTypesService
+from verda.volumes.volumes import VolumesService
+
+
+class VerdaClient:
+ """Client for interacting with Verda public API."""
def __init__(
self,
client_id: str,
client_secret: str,
- base_url: str = 'https://api.datacrunch.io/v1',
+ base_url: str = 'https://api.verda.com/v1',
inference_key: str | None = None,
) -> None:
- """The DataCrunch client.
+ """Verda client.
:param client_id: client id
:type client_id: str
:param client_secret: client secret
:type client_secret: str
- :param base_url: base url for all the endpoints, optional, defaults to "https://api.datacrunch.io/v1"
+ :param base_url: base url for all the endpoints, optional, defaults to "https://api.verda.com/v1"
:type base_url: str, optional
:param inference_key: inference key, optional
:type inference_key: str, optional
@@ -78,3 +78,6 @@ def __init__(
self.containers: ContainersService = ContainersService(self._http_client, inference_key)
"""Containers service. Deploy, manage, and monitor container deployments"""
+
+
+__all__ = ['VerdaClient']
diff --git a/datacrunch/volume_types/__init__.py b/verda/volume_types/__init__.py
similarity index 100%
rename from datacrunch/volume_types/__init__.py
rename to verda/volume_types/__init__.py
diff --git a/datacrunch/volume_types/volume_types.py b/verda/volume_types/volume_types.py
similarity index 100%
rename from datacrunch/volume_types/volume_types.py
rename to verda/volume_types/volume_types.py
diff --git a/datacrunch/volumes/__init__.py b/verda/volumes/__init__.py
similarity index 100%
rename from datacrunch/volumes/__init__.py
rename to verda/volumes/__init__.py
diff --git a/datacrunch/volumes/volumes.py b/verda/volumes/volumes.py
similarity index 98%
rename from datacrunch/volumes/volumes.py
rename to verda/volumes/volumes.py
index 554ba78..ca66573 100644
--- a/datacrunch/volumes/volumes.py
+++ b/verda/volumes/volumes.py
@@ -1,5 +1,5 @@
-from datacrunch.constants import Locations, VolumeActions
-from datacrunch.helpers import stringify_class_object_properties
+from verda.constants import Locations, VolumeActions
+from verda.helpers import stringify_class_object_properties
VOLUMES_ENDPOINT = '/volumes'