python3Packages.tensorflow-datasets: init at 4.4.0 (#154117)

* python3Packages.tensorflow-datasets: init at 4.4.0

* Update pkgs/development/python-modules/tensorflow-datasets/default.nix

Co-authored-by: Dmitry Kalinkin <dmitry.kalinkin@gmail.com>

Co-authored-by: Samuel Ainsworth <skainsworth@gmail.com>
Co-authored-by: Dmitry Kalinkin <dmitry.kalinkin@gmail.com>
This commit is contained in:
Alexander Tsvyashchenko 2022-01-11 08:53:55 +01:00 committed by GitHub
parent 07e6e9d92f
commit b2737d4980
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
3 changed files with 165 additions and 0 deletions

View file

@ -0,0 +1,22 @@
diff --git a/tensorflow_datasets/image_classification/corruptions.py b/tensorflow_datasets/image_classification/corruptions.py
index 066c4460..cb9a6667 100644
--- a/tensorflow_datasets/image_classification/corruptions.py
+++ b/tensorflow_datasets/image_classification/corruptions.py
@@ -35,7 +35,7 @@ FROST_FILENAMES = []
def _imagemagick_bin():
- return 'imagemagick' # pylint: disable=unreachable
+ return 'convert' # pylint: disable=unreachable
# /////////////// Corruption Helpers ///////////////
@@ -675,7 +675,7 @@ def spatter(x, severity=1):
# ker = np.array([[-1,-2,-3],[-2,0,0],[-3,0,1]], dtype=np.float32)
# ker -= np.mean(ker)
ker = np.array([[-2, -1, 0], [-1, 1, 1], [0, 1, 2]])
- dist = cv2.filter2D(dist, cv2.CVX_8U, ker)
+ dist = cv2.filter2D(dist, cv2.CV_8U, ker)
dist = cv2.blur(dist, (3, 3)).astype(np.float32)
m = cv2.cvtColor(liquid_layer * dist, cv2.COLOR_GRAY2BGRA)

View file

@ -0,0 +1,141 @@
{ apache-beam
, attrs
, beautifulsoup4
, buildPythonPackage
, dill
, dm-tree
, fetchFromGitHub
, ffmpeg
, future
, imagemagick
, importlib-resources
, jinja2
, langdetect
, lib
, matplotlib
, mwparserfromhell
, networkx
, nltk
, numpy
, opencv4
, pandas
, pillow
, promise
, protobuf
, pycocotools
, pydub
, pytestCheckHook
, requests
, scikitimage
, scipy
, six
, tensorflow
, tensorflow-metadata
, termcolor
, tifffile
, tqdm
}:
buildPythonPackage rec {
pname = "tensorflow-datasets";
version = "4.4.0";
src = fetchFromGitHub {
owner = "tensorflow";
repo = "datasets";
rev = "v${version}";
sha256 = "11kbpv54nwr0xf7z5mkj2lmrfqfmcdq8qcpapnqck1kiawr3yad6";
};
patches = [
# addresses https://github.com/tensorflow/datasets/issues/3673
./corruptions.patch
];
propagatedBuildInputs = [
attrs
dill
dm-tree
future
importlib-resources
numpy
promise
protobuf
requests
six
tensorflow-metadata
termcolor
tqdm
];
pythonImportsCheck = [
"tensorflow_datasets"
];
checkInputs = [
apache-beam
beautifulsoup4
ffmpeg
imagemagick
jinja2
langdetect
matplotlib
mwparserfromhell
networkx
nltk
opencv4
pandas
pillow
pycocotools
pydub
pytestCheckHook
scikitimage
scipy
tensorflow
tifffile
];
disabledTestPaths = [
# Sandbox violations: network access, filesystem write attempts outside of build dir, ...
"tensorflow_datasets/core/dataset_builder_test.py"
"tensorflow_datasets/core/dataset_info_test.py"
"tensorflow_datasets/core/features/features_test.py"
"tensorflow_datasets/core/github_api/github_path_test.py"
"tensorflow_datasets/core/utils/gcs_utils_test.py"
"tensorflow_datasets/scripts/cli/build_test.py"
# Requires `pretty_midi` which is not packaged in `nixpkgs`.
"tensorflow_datasets/audio/groove_test.py"
# Requires `crepe` which is not packaged in `nixpkgs`.
"tensorflow_datasets/audio/nsynth_test.py"
# Requires `gcld3` and `pretty_midi` which are not packaged in `nixpkgs`.
"tensorflow_datasets/core/lazy_imports_lib_test.py"
# Requires `tensorflow_io` which is not packaged in `nixpkgs`.
"tensorflow_datasets/image/lsun_test.py"
# Fails with `TypeError: Constant constructor takes either 0 or 2 positional arguments`
# deep in TF AutoGraph. Doesn't reproduce in Docker with Ubuntu 22.04 => might be related
# to the differences in some of the dependencies?
"tensorflow_datasets/rl_unplugged/rlu_atari/rlu_atari_test.py"
# Requires `tensorflow_docs` which is not packaged in `nixpkgs` and the test is for documentation anyway.
"tensorflow_datasets/scripts/documentation/build_api_docs_test.py"
# Not a test, should not be executed.
"tensorflow_datasets/testing/test_utils.py"
# Require `gcld3` and `nltk.punkt` which are not packaged in `nixpkgs`.
"tensorflow_datasets/text/c4_test.py"
"tensorflow_datasets/text/c4_utils_test.py"
];
meta = with lib; {
description = "Library of datasets ready to use with TensorFlow";
homepage = "https://www.tensorflow.org/datasets/overview";
license = licenses.asl20;
maintainers = with maintainers; [ ndl ];
};
}

View file

@ -9559,6 +9559,8 @@ in {
lmdb-core = pkgs.lmdb;
};
tensorflow-datasets = callPackage ../development/python-modules/tensorflow-datasets { };
tensorflow-estimator = callPackage ../development/python-modules/tensorflow-estimator { };
tensorflow-metadata = callPackage ../development/python-modules/tensorflow-metadata { };