Skip to content

Commit 96d755f

Browse files
authored
docs(datalab): improve the API documentation wording (#1546)
1 parent 528f8c4 commit 96d755f

File tree

4 files changed

+54
-54
lines changed

4 files changed

+54
-54
lines changed

scaleway-async/scaleway_async/datalab/v1beta1/api.py

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -72,15 +72,15 @@ async def create_datalab(
7272
Create a new Data Lab. In this call, one can personalize the node counts, add a notebook, choose the private network, define the persistent volume storage capacity.
7373
:param name: The name of the Data Lab.
7474
:param description: The description of the Data Lab.
75-
:param has_notebook: Whether a JupyterLab notebook shall be created with the Data Lab or not.
75+
:param has_notebook: Select this option to include a notebook as part of the Data Lab.
7676
:param spark_version: The version of Spark running inside the Data Lab, available options can be viewed at ListClusterVersions.
77-
:param private_network_id: The private network to which the Data Lab is connected. Important for accessing the Spark Master URL from a private cluster.
77+
:param private_network_id: The unique identifier of the private network the Data Lab will be attached to.
7878
:param region: Region to target. If none is passed will use default region from the config.
7979
:param project_id: The unique identifier of the project where the Data Lab will be created.
8080
:param tags: The tags of the Data Lab.
81-
:param main: The Spark main node configuration of the Data Lab, has one parameter `node_type` which specifies the compute node type of the main node. See ListNodeTypes for available options.
82-
:param worker: The Spark worker node configuration of the Data Lab, has two parameters `node_type` for selecting the type of the worker node, and `node_count` for specifying the amount of nodes.
83-
:param total_storage: The total storage selected by the user for Spark workers. This means the workers will not use more then this amount for their workload.
81+
:param main: The cluster main node specification. It holds the parameters `node_type` which specifies the node type of the main node. See ListNodeTypes for available options. See ListNodeTypes for available options.
82+
:param worker: The cluster worker node specification. It holds the parameters `node_type` which specifies the node type of the worker node and `node_count` for specifying the amount of nodes.
83+
:param total_storage: The maximum persistent volume storage that will be available during workload.
8484
:return: :class:`Datalab <Datalab>`
8585
8686
Usage:
@@ -386,12 +386,12 @@ async def list_node_types(
386386
resource_type: Optional[ListNodeTypesRequestResourceType] = None,
387387
) -> ListNodeTypesResponse:
388388
"""
389-
List the available compute node types upon which a Data Lab can be created.
389+
List the available compute node types for creating a Data Lab.
390390
:param region: Region to target. If none is passed will use default region from the config.
391391
:param page: The page number.
392392
:param page_size: The page size.
393393
:param order_by: The order by field. Available fields are `name_asc`, `name_desc`, `vcpus_asc`, `vcpus_desc`, `memory_gigabytes_asc`, `memory_gigabytes_desc`, `vram_bytes_asc`, `vram_bytes_desc`, `gpus_asc`, `gpus_desc`.
394-
:param targets: Filter on the wanted targets, whether it's for main node or worker.
394+
:param targets: Filter based on the target of the nodes. Allows to filter the nodes based on their purpose which can be main or worker node.
395395
:param resource_type: Filter based on node type ( `cpu`/`gpu`/`all` ).
396396
:return: :class:`ListNodeTypesResponse <ListNodeTypesResponse>`
397397
@@ -431,12 +431,12 @@ async def list_node_types_all(
431431
resource_type: Optional[ListNodeTypesRequestResourceType] = None,
432432
) -> list[NodeType]:
433433
"""
434-
List the available compute node types upon which a Data Lab can be created.
434+
List the available compute node types for creating a Data Lab.
435435
:param region: Region to target. If none is passed will use default region from the config.
436436
:param page: The page number.
437437
:param page_size: The page size.
438438
:param order_by: The order by field. Available fields are `name_asc`, `name_desc`, `vcpus_asc`, `vcpus_desc`, `memory_gigabytes_asc`, `memory_gigabytes_desc`, `vram_bytes_asc`, `vram_bytes_desc`, `gpus_asc`, `gpus_desc`.
439-
:param targets: Filter on the wanted targets, whether it's for main node or worker.
439+
:param targets: Filter based on the target of the nodes. Allows to filter the nodes based on their purpose which can be main or worker node.
440440
:param resource_type: Filter based on node type ( `cpu`/`gpu`/`all` ).
441441
:return: :class:`list[NodeType] <list[NodeType]>`
442442
@@ -469,7 +469,7 @@ async def list_notebook_versions(
469469
order_by: Optional[ListNotebookVersionsRequestOrderBy] = None,
470470
) -> ListNotebookVersionsResponse:
471471
"""
472-
List available notebook versions.
472+
Lists available notebook versions.
473473
:param region: Region to target. If none is passed will use default region from the config.
474474
:param page: The page number.
475475
:param page_size: The page size.
@@ -508,7 +508,7 @@ async def list_notebook_versions_all(
508508
order_by: Optional[ListNotebookVersionsRequestOrderBy] = None,
509509
) -> list[Notebook]:
510510
"""
511-
List available notebook versions.
511+
Lists available notebook versions.
512512
:param region: Region to target. If none is passed will use default region from the config.
513513
:param page: The page number.
514514
:param page_size: The page size.

scaleway-async/scaleway_async/datalab/v1beta1/types.py

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -250,7 +250,7 @@ class Datalab:
250250

251251
project_id: str
252252
"""
253-
The identifier of the project where the Data Lab has been created.
253+
The unique identifier of the project where the Data Lab has been created.
254254
"""
255255

256256
name: str
@@ -270,7 +270,7 @@ class Datalab:
270270

271271
status: DatalabStatus
272272
"""
273-
The status of the Data Lab. For a working Data Lab this should be `ready`.
273+
The status of the Data Lab. For a working Data Lab the status is marked as `ready`.
274274
"""
275275

276276
region: ScwRegion
@@ -290,17 +290,17 @@ class Datalab:
290290

291291
private_network_id: str
292292
"""
293-
The private network to which the data lab is connected. This is important for accessing the Spark Master URL.
293+
The unique identifier of the private network to which the Data Lab is attached to.
294294
"""
295295

296296
main: Optional[DatalabSparkMain] = None
297297
"""
298-
The Spark Main node specification of Data lab. It holds the parameters `node_type` the compute node type of the main node, `spark_ui_url` where the Spark UI is available, `spark_master_url` with which one can connect to the cluster from within one's VPC, `root_volume` the size of the volume assigned to the main node.
298+
The Spark Main node specification of Data lab. It holds the parameters `node_type`, `spark_ui_url` (available to reach Spark UI), `spark_master_url` (used to reach the cluster within a VPC), `root_volume` (size of the volume assigned to the cluster).
299299
"""
300300

301301
worker: Optional[DatalabSparkWorker] = None
302302
"""
303-
The worker node specification of the Data Lab. It presents the parameters `node_type` the compute node type of each worker node, `node_count` the number of worker nodes currently in the cluster, `root_volume` the root volume size of each executor.
303+
The cluster worker nodes specification. It holds the parameters `node_type`, `node_count`, `root_volume` (size of the volume assigned to the cluster).
304304
"""
305305

306306
created_at: Optional[datetime] = None
@@ -315,17 +315,17 @@ class Datalab:
315315

316316
notebook_url: Optional[str] = None
317317
"""
318-
The URL of said notebook if exists.
318+
The URL of the notebook if available.
319319
"""
320320

321321
total_storage: Optional[Volume] = None
322322
"""
323-
The total storage selected by the user for Spark.
323+
The total persistent volume storage selected to run Spark.
324324
"""
325325

326326
notebook_master_url: Optional[str] = None
327327
"""
328-
The URL to the Spark Master endpoint from, and only from the perspective of the JupyterLab Notebook. This is NOT the URL to use for accessing the cluster from a private server.
328+
The URL that is used to reach the cluster from the notebook when available. This URL cannot be used to reach the cluster from a server.
329329
"""
330330

331331

@@ -436,7 +436,7 @@ class CreateDatalabRequest:
436436

437437
has_notebook: bool
438438
"""
439-
Whether a JupyterLab notebook shall be created with the Data Lab or not.
439+
Select this option to include a notebook as part of the Data Lab.
440440
"""
441441

442442
spark_version: str
@@ -446,7 +446,7 @@ class CreateDatalabRequest:
446446

447447
private_network_id: str
448448
"""
449-
The private network to which the Data Lab is connected. Important for accessing the Spark Master URL from a private cluster.
449+
The unique identifier of the private network the Data Lab will be attached to.
450450
"""
451451

452452
region: Optional[ScwRegion] = None
@@ -466,17 +466,17 @@ class CreateDatalabRequest:
466466

467467
main: Optional[CreateDatalabRequestSparkMain] = None
468468
"""
469-
The Spark main node configuration of the Data Lab, has one parameter `node_type` which specifies the compute node type of the main node. See ListNodeTypes for available options.
469+
The cluster main node specification. It holds the parameters `node_type` which specifies the node type of the main node. See ListNodeTypes for available options. See ListNodeTypes for available options.
470470
"""
471471

472472
worker: Optional[CreateDatalabRequestSparkWorker] = None
473473
"""
474-
The Spark worker node configuration of the Data Lab, has two parameters `node_type` for selecting the type of the worker node, and `node_count` for specifying the amount of nodes.
474+
The cluster worker node specification. It holds the parameters `node_type` which specifies the node type of the worker node and `node_count` for specifying the amount of nodes.
475475
"""
476476

477477
total_storage: Optional[Volume] = None
478478
"""
479-
The total storage selected by the user for Spark workers. This means the workers will not use more then this amount for their workload.
479+
The maximum persistent volume storage that will be available during workload.
480480
"""
481481

482482

@@ -563,7 +563,7 @@ class ListClusterVersionsResponse:
563563
@dataclass
564564
class ListDatalabsRequest:
565565
"""
566-
A request to list Datalabs.
566+
A request to list Data Labs.
567567
"""
568568

569569
region: Optional[ScwRegion] = None
@@ -620,7 +620,7 @@ class ListDatalabsResponse:
620620

621621
total_count: int
622622
"""
623-
The total count of Datalabs.
623+
The total count of Data Labs.
624624
"""
625625

626626

@@ -654,7 +654,7 @@ class ListNodeTypesRequest:
654654

655655
targets: Optional[list[NodeTypeTarget]] = field(default_factory=list)
656656
"""
657-
Filter on the wanted targets, whether it's for main node or worker.
657+
Filter based on the target of the nodes. Allows to filter the nodes based on their purpose which can be main or worker node.
658658
"""
659659

660660
resource_type: Optional[ListNodeTypesRequestResourceType] = (

scaleway/scaleway/datalab/v1beta1/api.py

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -72,15 +72,15 @@ def create_datalab(
7272
Create a new Data Lab. In this call, one can personalize the node counts, add a notebook, choose the private network, define the persistent volume storage capacity.
7373
:param name: The name of the Data Lab.
7474
:param description: The description of the Data Lab.
75-
:param has_notebook: Whether a JupyterLab notebook shall be created with the Data Lab or not.
75+
:param has_notebook: Select this option to include a notebook as part of the Data Lab.
7676
:param spark_version: The version of Spark running inside the Data Lab, available options can be viewed at ListClusterVersions.
77-
:param private_network_id: The private network to which the Data Lab is connected. Important for accessing the Spark Master URL from a private cluster.
77+
:param private_network_id: The unique identifier of the private network the Data Lab will be attached to.
7878
:param region: Region to target. If none is passed will use default region from the config.
7979
:param project_id: The unique identifier of the project where the Data Lab will be created.
8080
:param tags: The tags of the Data Lab.
81-
:param main: The Spark main node configuration of the Data Lab, has one parameter `node_type` which specifies the compute node type of the main node. See ListNodeTypes for available options.
82-
:param worker: The Spark worker node configuration of the Data Lab, has two parameters `node_type` for selecting the type of the worker node, and `node_count` for specifying the amount of nodes.
83-
:param total_storage: The total storage selected by the user for Spark workers. This means the workers will not use more then this amount for their workload.
81+
:param main: The cluster main node specification. It holds the parameters `node_type` which specifies the node type of the main node. See ListNodeTypes for available options. See ListNodeTypes for available options.
82+
:param worker: The cluster worker node specification. It holds the parameters `node_type` which specifies the node type of the worker node and `node_count` for specifying the amount of nodes.
83+
:param total_storage: The maximum persistent volume storage that will be available during workload.
8484
:return: :class:`Datalab <Datalab>`
8585
8686
Usage:
@@ -386,12 +386,12 @@ def list_node_types(
386386
resource_type: Optional[ListNodeTypesRequestResourceType] = None,
387387
) -> ListNodeTypesResponse:
388388
"""
389-
List the available compute node types upon which a Data Lab can be created.
389+
List the available compute node types for creating a Data Lab.
390390
:param region: Region to target. If none is passed will use default region from the config.
391391
:param page: The page number.
392392
:param page_size: The page size.
393393
:param order_by: The order by field. Available fields are `name_asc`, `name_desc`, `vcpus_asc`, `vcpus_desc`, `memory_gigabytes_asc`, `memory_gigabytes_desc`, `vram_bytes_asc`, `vram_bytes_desc`, `gpus_asc`, `gpus_desc`.
394-
:param targets: Filter on the wanted targets, whether it's for main node or worker.
394+
:param targets: Filter based on the target of the nodes. Allows to filter the nodes based on their purpose which can be main or worker node.
395395
:param resource_type: Filter based on node type ( `cpu`/`gpu`/`all` ).
396396
:return: :class:`ListNodeTypesResponse <ListNodeTypesResponse>`
397397
@@ -431,12 +431,12 @@ def list_node_types_all(
431431
resource_type: Optional[ListNodeTypesRequestResourceType] = None,
432432
) -> list[NodeType]:
433433
"""
434-
List the available compute node types upon which a Data Lab can be created.
434+
List the available compute node types for creating a Data Lab.
435435
:param region: Region to target. If none is passed will use default region from the config.
436436
:param page: The page number.
437437
:param page_size: The page size.
438438
:param order_by: The order by field. Available fields are `name_asc`, `name_desc`, `vcpus_asc`, `vcpus_desc`, `memory_gigabytes_asc`, `memory_gigabytes_desc`, `vram_bytes_asc`, `vram_bytes_desc`, `gpus_asc`, `gpus_desc`.
439-
:param targets: Filter on the wanted targets, whether it's for main node or worker.
439+
:param targets: Filter based on the target of the nodes. Allows to filter the nodes based on their purpose which can be main or worker node.
440440
:param resource_type: Filter based on node type ( `cpu`/`gpu`/`all` ).
441441
:return: :class:`list[NodeType] <list[NodeType]>`
442442
@@ -469,7 +469,7 @@ def list_notebook_versions(
469469
order_by: Optional[ListNotebookVersionsRequestOrderBy] = None,
470470
) -> ListNotebookVersionsResponse:
471471
"""
472-
List available notebook versions.
472+
Lists available notebook versions.
473473
:param region: Region to target. If none is passed will use default region from the config.
474474
:param page: The page number.
475475
:param page_size: The page size.
@@ -508,7 +508,7 @@ def list_notebook_versions_all(
508508
order_by: Optional[ListNotebookVersionsRequestOrderBy] = None,
509509
) -> list[Notebook]:
510510
"""
511-
List available notebook versions.
511+
Lists available notebook versions.
512512
:param region: Region to target. If none is passed will use default region from the config.
513513
:param page: The page number.
514514
:param page_size: The page size.

0 commit comments

Comments
 (0)