Skip to content

Commit 8d3ec2d

Browse files
committed
Inline defaults for embedding algos
1 parent 7e3eb54 commit 8d3ec2d

17 files changed

+718
-713
lines changed

graphdatascience/procedure_surface/api/model/graphsage_model.py

Lines changed: 20 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -31,12 +31,12 @@ def predict_write(
3131
*,
3232
relationship_types: list[str] = ALL_TYPES,
3333
node_labels: list[str] = ALL_LABELS,
34-
batch_size: int | None = None,
34+
batch_size: int = 100,
3535
concurrency: int | None = None,
3636
write_concurrency: int | None = None,
3737
log_progress: bool = True,
3838
username: str | None = None,
39-
sudo: bool | None = None,
39+
sudo: bool = False,
4040
job_id: str | None = None,
4141
) -> GraphSageWriteResult:
4242
"""
@@ -52,17 +52,17 @@ def predict_write(
5252
The relationship types to consider.
5353
node_labels : list[str]
5454
The node labels to consider.
55-
batch_size : int | None, default=None
55+
batch_size : int = 100
5656
The batch size for prediction.
5757
concurrency : int | None, default=None
5858
The concurrency for computation.
5959
write_concurrency : int | None, default=None
6060
The concurrency for writing.
61-
log_progress : bool | None, default=None
61+
log_progress : bool = True
6262
Whether to log progress.
6363
username : str | None, default=None
6464
The username for the operation.
65-
sudo : bool
65+
sudo : bool = False
6666
Override memory estimation limits. Use with caution as this can lead to
6767
memory issues if the estimation is significantly wrong.
6868
job_id : str | None, default=None
@@ -95,11 +95,11 @@ def predict_stream(
9595
*,
9696
relationship_types: list[str] = ALL_TYPES,
9797
node_labels: list[str] = ALL_LABELS,
98-
batch_size: int | None = None,
98+
batch_size: int = 100,
9999
concurrency: int | None = None,
100100
log_progress: bool = True,
101101
username: str | None = None,
102-
sudo: bool | None = None,
102+
sudo: bool = False,
103103
job_id: str | None = None,
104104
) -> DataFrame:
105105
"""
@@ -113,15 +113,15 @@ def predict_stream(
113113
The relationship types to consider.
114114
node_labels : list[str]
115115
The node labels to consider.
116-
batch_size : int | None, default=None
116+
batch_size : int = 100
117117
The batch size for prediction.
118118
concurrency : int | None, default=None
119119
The concurrency for computation.
120-
log_progress : bool | None, default=None
120+
log_progress : bool = True
121121
Whether to log progress.
122122
username : str | None, default=None
123123
The username for the operation.
124-
sudo : bool
124+
sudo : bool = False
125125
Override memory estimation limits. Use with caution as this can lead to
126126
memory issues if the estimation is significantly wrong.
127127
job_id : str | None, default=None
@@ -153,11 +153,11 @@ def predict_mutate(
153153
*,
154154
relationship_types: list[str] = ALL_TYPES,
155155
node_labels: list[str] = ALL_LABELS,
156-
batch_size: int | None = None,
156+
batch_size: int = 100,
157157
concurrency: int | None = None,
158158
log_progress: bool = True,
159159
username: str | None = None,
160-
sudo: bool | None = None,
160+
sudo: bool = False,
161161
job_id: str | None = None,
162162
) -> GraphSageMutateResult:
163163
"""
@@ -173,15 +173,15 @@ def predict_mutate(
173173
The relationship types to consider.
174174
node_labels : list[str]
175175
The node labels to consider.
176-
batch_size : int | None, default=None
176+
batch_size : int = 100
177177
The batch size for prediction.
178178
concurrency : int | None, default=None
179179
The concurrency for computation.
180-
log_progress : bool | None, default=None
180+
log_progress : bool = True
181181
Whether to log progress.
182182
username : str | None, default=None
183183
The username for the operation.
184-
sudo : bool
184+
sudo : bool = False
185185
Override memory estimation limits. Use with caution as this can lead to
186186
memory issues if the estimation is significantly wrong.
187187
job_id : str | None, default=None
@@ -213,11 +213,11 @@ def predict_estimate(
213213
*,
214214
relationship_types: list[str] = ALL_TYPES,
215215
node_labels: list[str] = ALL_LABELS,
216-
batch_size: int | None = None,
216+
batch_size: int = 100,
217217
concurrency: int | None = None,
218218
log_progress: bool = True,
219219
username: str | None = None,
220-
sudo: bool | None = None,
220+
sudo: bool = False,
221221
job_id: str | None = None,
222222
) -> EstimationResult:
223223
"""
@@ -231,15 +231,15 @@ def predict_estimate(
231231
The relationship types to consider.
232232
node_labels : list[str]
233233
The node labels to consider.
234-
batch_size : int | None, default=None
234+
batch_size : int = 100
235235
The batch size for prediction.
236236
concurrency : int | None, default=None
237237
The concurrency for computation.
238-
log_progress : bool | None, default=None
238+
log_progress : bool = True
239239
Whether to log progress.
240240
username : str | None, default=None
241241
The username for the operation.
242-
sudo : bool
242+
sudo : bool = False
243243
Override memory estimation limits. Use with caution as this can lead to
244244
memory issues if the estimation is significantly wrong.
245245
job_id : str | None, default=None

0 commit comments

Comments
 (0)