Skip to content

Commit 8783b3f

Browse files
author
Martin Durant
committed
Update some docs and API
Also, get version string to be right
1 parent 6445fa9 commit 8783b3f

File tree

4 files changed

+28
-21
lines changed

4 files changed

+28
-21
lines changed

docs/source/api.rst

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,8 +40,10 @@ Sources
4040
.. autosummary::
4141
filenames
4242
from_kafka
43+
from_kafka_batched
4344
from_textfile
44-
from_socket
45+
from_tcp
46+
from_http_server
4547

4648
DaskStream
4749
----------

docs/source/conf.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -56,9 +56,10 @@
5656
# built documents.
5757
#
5858
# The short X.Y version.
59-
version = '0.0.1'
59+
import streamz
60+
version = streamz.__version__
6061
# The full version, including alpha/beta/rc tags.
61-
release = '0.0.1'
62+
release = streamz.__version__
6263

6364
# The language for content autogenerated by Sphinx. Refer to documentation
6465
# for a list of supported languages.

docs/source/index.rst

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -88,5 +88,4 @@ data streaming systems like `Apache Flink <https://flink.apache.org/>`_,
8888
collections.rst
8989
api.rst
9090
collections-api.rst
91-
dataframe-aggregations.rst
9291
async.rst

streamz/sources.py

Lines changed: 22 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -43,22 +43,22 @@ class from_textfile(Source):
4343
Parameters
4444
----------
4545
f: file or string
46+
Source of the data. If string, will be opened.
4647
poll_interval: Number
4748
Interval to poll file for new data in seconds
48-
delimiter: str ("\n")
49+
delimiter: str
4950
Character(s) to use to split the data into parts
50-
start: bool (False)
51+
start: bool
5152
Whether to start running immediately; otherwise call stream.start()
5253
explicitly.
53-
from_end: bool (False)
54+
from_end: bool
5455
Whether to begin streaming from the end of the file (i.e., only emit
5556
lines appended after the stream starts).
5657
57-
Example
58-
-------
58+
Examples
59+
--------
5960
>>> source = Stream.from_textfile('myfile.json') # doctest: +SKIP
6061
>>> js.map(json.loads).pluck('value').sum().sink(print) # doctest: +SKIP
61-
6262
>>> source.start() # doctest: +SKIP
6363
6464
Returns
@@ -177,8 +177,8 @@ class from_tcp(Source):
177177
server_kwargs : dict or None
178178
If given, additional arguments to pass to TCPServer
179179
180-
Example
181-
-------
180+
Examples
181+
--------
182182
183183
>>> source = Source.from_tcp(4567) # doctest: +SKIP
184184
"""
@@ -244,9 +244,11 @@ class from_http_server(Source):
244244
server_kwargs : dict or None
245245
If given, set of further parameters to pass on to HTTPServer
246246
247-
Example
248-
-------
247+
Examples
248+
--------
249+
249250
>>> source = Source.from_http_server(4567) # doctest: +SKIP
251+
250252
"""
251253

252254
def __init__(self, port, path='/.*', start=False, server_kwargs=None):
@@ -308,20 +310,22 @@ class from_kafka(Source):
308310
https://docs.confluent.io/current/clients/confluent-kafka-python/#configuration
309311
https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md
310312
Examples:
311-
bootstrap.servers: Connection string(s) (host:port) by which to reach Kafka
312-
group.id: Identity of the consumer. If multiple sources share the same
313-
group, each message will be passed to only one of them.
313+
bootstrap.servers, Connection string(s) (host:port) by which to reach
314+
Kafka;
315+
group.id, Identity of the consumer. If multiple sources share the same
316+
group, each message will be passed to only one of them.
314317
poll_interval: number
315318
Seconds that elapse between polling Kafka for new messages
316319
start: bool (False)
317320
Whether to start polling upon instantiation
318321
319-
Example
320-
-------
322+
Examples
323+
--------
321324
322325
>>> source = Stream.from_kafka(['mytopic'],
323326
... {'bootstrap.servers': 'localhost:9092',
324327
... 'group.id': 'streamz'}) # doctest: +SKIP
328+
325329
"""
326330
def __init__(self, topics, consumer_params, poll_interval=0.1, start=False, **kwargs):
327331
self.cpars = consumer_params
@@ -458,12 +462,13 @@ def from_kafka_batched(topic, consumer_params, poll_interval='1s',
458462
start: bool (False)
459463
Whether to start polling upon instantiation
460464
461-
Example
462-
-------
465+
Examples
466+
--------
463467
464468
>>> source = Stream.from_kafka_batched('mytopic',
465469
... {'bootstrap.servers': 'localhost:9092',
466470
... 'group.id': 'streamz'}, npartitions=4) # doctest: +SKIP
471+
467472
"""
468473
if dask:
469474
from distributed.client import default_client

0 commit comments

Comments
 (0)