|
81 | 81 | 'pytest-shard', |
82 | 82 | 'pytest-xdist', |
83 | 83 | # Lazy-deps required by core |
84 | | - # TODO(b/418761065): Update to 2.65.0 once the bug is fixed. |
85 | 84 | 'apache-beam<2.65.0', |
86 | 85 | 'conllu', |
87 | 86 | 'mlcroissant>=1.0.9', |
|
150 | 149 | # nltk==3.8.2 is broken: https://github.com/nltk/nltk/issues/3293 |
151 | 150 | 'nltk==3.8.1', |
152 | 151 | 'tldextract', |
| 152 | + # tensorflow==2.20.0 is not compatible with gcld3 because of protobuf |
| 153 | + # version conflict. |
| 154 | + 'tensorflow<2.20.0', |
153 | 155 | ], |
154 | 156 | 'c4_wsrs': ['apache-beam<2.65.0'], |
155 | 157 | 'cats_vs_dogs': ['matplotlib'], |
|
167 | 169 | 'scipy', |
168 | 170 | ], |
169 | 171 | 'librispeech': ['pydub'], # and ffmpeg installed |
170 | | - 'lsun': ['tensorflow-io[tensorflow]'], |
171 | | - # sklearn version required to avoid conflict with librosa from |
172 | | - # https://github.com/scikit-learn/scikit-learn/issues/14485 |
173 | | - # See https://github.com/librosa/librosa/issues/1160 |
174 | | - 'nsynth': ['crepe>=0.0.11', 'librosa', 'scikit-learn==0.20.3'], |
| 172 | + 'lsun': [ |
| 173 | + # tensorflow-io is compiled against specific versions of TF. |
| 174 | + 'tensorflow-io[tensorflow]', |
| 175 | + ], |
| 176 | + 'nsynth': [ |
| 177 | + 'crepe', |
| 178 | + 'librosa', |
| 179 | + # tensorflow==2.20.0 is not compatible with librosa because of protobuf |
| 180 | + # version conflict. |
| 181 | + 'tensorflow<2.20.0', |
| 182 | + ], |
175 | 183 | 'ogbg_molpcba': ['pandas', 'networkx'], |
176 | 184 | 'pet_finder': ['pandas'], |
177 | 185 | 'qm9': ['pandas'], |
|
196 | 204 |
|
197 | 205 | # Those datasets have dependencies which conflict with the rest of TFDS, so |
198 | 206 | # running them in an isolated environments. |
199 | | -ISOLATED_DATASETS = ('nsynth', 'lsun') |
| 207 | +ISOLATED_DATASETS = ('c4', 'lsun', 'nsynth') |
200 | 208 |
|
201 | 209 | # Extra dataset deps are required for the tests |
202 | 210 | all_dataset_dependencies = list( |
|
238 | 246 | license='Apache 2.0', |
239 | 247 | packages=setuptools.find_packages(), |
240 | 248 | package_data={ |
241 | | - 'tensorflow_datasets': DATASET_FILES + [ |
242 | | - # Bundle `datasets/` folder in PyPI releases |
243 | | - 'datasets/*/*', |
244 | | - 'core/utils/colormap.csv', |
245 | | - 'scripts/documentation/templates/*', |
246 | | - 'url_checksums/*', |
247 | | - 'checksums.tsv', |
248 | | - 'community-datasets.toml', |
249 | | - 'dataset_collections/*/*.md', |
250 | | - 'dataset_collections/*/*.bib', |
251 | | - 'core/valid_tags.txt', |
252 | | - ], |
| 249 | + 'tensorflow_datasets': ( |
| 250 | + DATASET_FILES |
| 251 | + + [ |
| 252 | + # Bundle `datasets/` folder in PyPI releases |
| 253 | + 'datasets/*/*', |
| 254 | + 'core/utils/colormap.csv', |
| 255 | + 'scripts/documentation/templates/*', |
| 256 | + 'url_checksums/*', |
| 257 | + 'checksums.tsv', |
| 258 | + 'community-datasets.toml', |
| 259 | + 'dataset_collections/*/*.md', |
| 260 | + 'dataset_collections/*/*.bib', |
| 261 | + 'core/valid_tags.txt', |
| 262 | + ] |
| 263 | + ), |
253 | 264 | }, |
254 | 265 | exclude_package_data={ |
255 | 266 | 'tensorflow_datasets': [ |
|
0 commit comments