Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
75 changes: 49 additions & 26 deletions WORKSPACE
Original file line number Diff line number Diff line change
Expand Up @@ -4,34 +4,32 @@ load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")

http_archive(
name = "icu",
strip_prefix = "icu-release-64-2",
sha256 = "dfc62618aa4bd3ca14a3df548cd65fe393155edd213e49c39f3a30ccd618fc27",
build_file = "//third_party/icu:BUILD.bzl",
sha256 = "e424ba5282d95ad38b52639a08fb82164f0b0cbd7f17b53ae16bf14f8541855f",
strip_prefix = "icu-release-77-1",
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/unicode-org/icu/archive/release-64-2.zip",
"https://github.com/unicode-org/icu/archive/release-64-2.zip",
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/unicode-org/icu/archive/release-77-1.zip",
"https://github.com/unicode-org/icu/archive/release-77-1.zip",
],
build_file = "//third_party/icu:BUILD.bzl",
patches = ["//third_party/icu:udata.patch"],
patch_args = ["-p1"],
)

http_archive(
name = "com_google_sentencepiece",
strip_prefix = "sentencepiece-0.1.96",
build_file = "//third_party/sentencepiece:BUILD",
patch_args = ["-p1"],
patches = ["//third_party/sentencepiece:sp.patch"],
sha256 = "8409b0126ebd62b256c685d5757150cf7fcb2b92a2f2b98efb3f38fc36719754",
strip_prefix = "sentencepiece-0.1.96",
urls = [
"https://github.com/google/sentencepiece/archive/refs/tags/v0.1.96.zip"
"https://github.com/google/sentencepiece/archive/refs/tags/v0.1.96.zip",
],
build_file = "//third_party/sentencepiece:BUILD",
patches = ["//third_party/sentencepiece:sp.patch"],
patch_args = ["-p1"],
)

http_archive(
name = "cppitertools",
urls = ["https://github.com/ryanhaining/cppitertools/archive/refs/tags/v2.0.zip"],
sha256 = "e56741b108d6baced98c4ccd83fd0d5a545937f2845978799c28d0312c0dee3d",
strip_prefix = "cppitertools-2.0",
urls = ["https://github.com/ryanhaining/cppitertools/archive/refs/tags/v2.0.zip"],
)

http_archive(
Expand All @@ -56,10 +54,10 @@ http_archive(

http_archive(
name = "org_tensorflow",
strip_prefix = "tensorflow-40998f44c0c500ce0f6e3b1658dfbc54f838a82a",
sha256 = "5a5bc4599964c71277dcac0d687435291e5810d2ac2f6283cc96736febf73aaf",
strip_prefix = "tensorflow-40998f44c0c500ce0f6e3b1658dfbc54f838a82a",
urls = [
"https://github.com/tensorflow/tensorflow/archive/40998f44c0c500ce0f6e3b1658dfbc54f838a82a.zip"
"https://github.com/tensorflow/tensorflow/archive/40998f44c0c500ce0f6e3b1658dfbc54f838a82a.zip",
],
)

Expand All @@ -72,15 +70,26 @@ http_archive(
],
)

http_archive(
name = "pybind11_bazel",
sha256 = "e10d65e64d101d2d3d0a52fdf04b1a5116e0cff16c9e0d0b1b1c2ed0e1b69e5d",
strip_prefix = "pybind11_bazel-b1d64d363b8cb6e8b7d9b9b1263b651410d7e4c5",
urls = ["https://github.com/pybind/pybind11_bazel/archive/b1d64d363b8cb6e8b7d9b9b1263b651410d7e4c5.tar.gz"],
)

load("@pybind11_bazel//:python_configure.bzl", "pybind_python_configure")

pybind_python_configure(name = "local_config_python")

http_archive(
name = "pybind11",
build_file = "@pybind11_bazel//:BUILD",
sha256 = "efc901aa0aab439a3fea6efeaf930b5a349fb06394bf845c64ce15a9cf8f0240",
strip_prefix = "pybind11-2.13.4",
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/pybind/pybind11/archive/v2.13.4.tar.gz",
"https://github.com/pybind/pybind11/archive/v2.13.4.tar.gz",
],
sha256 = "efc901aa0aab439a3fea6efeaf930b5a349fb06394bf845c64ce15a9cf8f0240",
strip_prefix = "pybind11-2.13.4",
build_file = "//third_party/pybind11:BUILD.bzl",
)

http_archive(
Expand All @@ -99,27 +108,31 @@ load("//tensorflow_text:tftext.bzl", "py_deps_profile")

py_deps_profile(
name = "release_or_nightly",
requirements_in = "//oss_scripts/pip_package:requirements.in",
pip_repo_name = "pypi",
deps_map = {
"tensorflow": ["tf-nightly", "tf_header_lib", "libtensorflow_framework"],
"tf-keras": ["tf-keras-nightly"]
"tensorflow": [
"tf-nightly",
"tf_header_lib",
"libtensorflow_framework",
],
"tf-keras": ["tf-keras-nightly"],
},
pip_repo_name = "pypi",
requirements_in = "//oss_scripts/pip_package:requirements.in",
switch = {
"IS_NIGHTLY": "nightly"
}
"IS_NIGHTLY": "nightly",
},
)

load("@org_tensorflow//third_party/py:python_init_repositories.bzl", "python_init_repositories")

python_init_repositories(
default_python_version = "system",
requirements = {
"3.9": "//oss_scripts/pip_package:requirements_lock_3_9.txt",
"3.10": "//oss_scripts/pip_package:requirements_lock_3_10.txt",
"3.11": "//oss_scripts/pip_package:requirements_lock_3_11.txt",
"3.12": "//oss_scripts/pip_package:requirements_lock_3_12.txt",
},
default_python_version = "system",
)

load("@org_tensorflow//third_party/py:python_init_toolchains.bzl", "python_init_toolchains")
Expand All @@ -136,18 +149,28 @@ install_deps()

# Initialize TensorFlow dependencies.
load("@org_tensorflow//tensorflow:workspace3.bzl", "tf_workspace3")

tf_workspace3()

load("@org_tensorflow//tensorflow:workspace2.bzl", "tf_workspace2")

tf_workspace2()

load("@org_tensorflow//tensorflow:workspace1.bzl", "tf_workspace1")

tf_workspace1()

load("@org_tensorflow//tensorflow:workspace0.bzl", "tf_workspace0")

tf_workspace0()

# Set up Android.
load("@org_tensorflow//third_party/android:android_configure.bzl", "android_configure")
android_configure(name="local_config_android")

android_configure(name = "local_config_android")

load("@local_config_android//:android.bzl", "android_workspace")

android_workspace()

load(
Expand Down
4 changes: 3 additions & 1 deletion oss_scripts/configure.sh
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ else
if [[ "$IS_NIGHTLY" == "nightly" ]]; then
pip install tf-nightly
else
pip install tensorflow==2.18.0
pip install tensorflow==2.20.0
fi
fi

Expand Down Expand Up @@ -85,3 +85,5 @@ if is_macos; then
fi

write_action_env_to_bazelrc "TF_CXX11_ABI_FLAG" ${TF_ABIFLAG}
write_to_bazelrc "build --define=TENSORFLOW_TEXT_BUILD_TFLITE_OPS=1"
write_to_bazelrc "build --define=with_tflite_ops=true"
2 changes: 0 additions & 2 deletions oss_scripts/pip_package/requirements.in
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
setuptools==70.0.0
dm-tree==0.1.8 # Limit for macos support.
numpy
protobuf==4.25.3 # b/397977335 - Fix crash on python 3.9, 3.10.
tensorflow
tf-keras
tensorflow-datasets
tensorflow-metadata
4 changes: 2 additions & 2 deletions oss_scripts/pip_package/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,12 +88,12 @@ def finalize_options(self):
distclass=BinaryDistribution,
install_requires=[
(
'tensorflow>=2.18.0, <2.19',
'tensorflow==2.20',
),
],
extras_require={
'tensorflow_cpu': [
'tensorflow-cpu>=2.18.0, <2.19',
'tensorflow-cpu==2.20',
],
'tests': [
'absl-py',
Expand Down
5 changes: 5 additions & 0 deletions tensorflow_text/core/kernels/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -337,6 +337,7 @@ cc_library(
deps = [
"@com_google_absl//absl/container:flat_hash_set",
"@com_google_absl//absl/status:statusor",
"@com_google_absl//absl/strings",
"@darts_clone",
],
)
Expand Down Expand Up @@ -871,8 +872,12 @@ tf_cc_library(
# tf:lib tensorflow dep,
],
deps = [
"@com_google_absl//absl/base:core_headers",
"@com_google_absl//absl/status",
"@com_google_absl//absl/status:statusor",
"@com_google_absl//absl/strings",
"@icu//:common",
"@icu//:nfkc",
],
)

Expand Down
1 change: 1 addition & 0 deletions tensorflow_text/core/kernels/darts_clone_trie_builder.cc
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
#include <numeric>

#include "absl/container/flat_hash_set.h"
#include "absl/strings/str_cat.h"
#include "include/darts.h"

namespace tensorflow {
Expand Down
48 changes: 1 addition & 47 deletions tensorflow_text/core/pybinds/BUILD
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# Code that exposes C++ libraries to Python via pybind11.

# Placeholder: load py_test
load("@org_tensorflow//tensorflow:tensorflow.bzl", "pybind_extension")
load("//third_party/tensorflow/compiler/xla/tsl:tsl.google.bzl", pybind_extension = "tsl_pybind_extension")

licenses(["notice"])

Expand Down Expand Up @@ -66,19 +66,6 @@ pybind_extension(
],
)

py_test(
name = "pywrap_fast_bert_normalizer_model_builder_test",
srcs = ["pywrap_fast_bert_normalizer_model_builder_test.py"],
data = [
"//tensorflow_text:python/ops/test_data/fast_bert_normalizer_model.fb",
"//tensorflow_text:python/ops/test_data/fast_bert_normalizer_model_lower_case_nfd_strip_accents.fb",
],
deps = [
":pywrap_fast_bert_normalizer_model_builder",
"@release_or_nightly//:tensorflow_pkg", # tensorflow package dep
],
)

pybind_extension(
name = "pywrap_fast_wordpiece_tokenizer_model_builder",
srcs = ["pywrap_fast_wordpiece_tokenizer_model_builder.cc"],
Expand All @@ -98,18 +85,6 @@ pybind_extension(
],
)

py_test(
name = "pywrap_fast_wordpiece_tokenizer_model_builder_test",
srcs = ["pywrap_fast_wordpiece_tokenizer_model_builder_test.py"],
data = [
"//tensorflow_text:python/ops/test_data/fast_wordpiece_tokenizer_model.fb",
],
deps = [
":pywrap_fast_wordpiece_tokenizer_model_builder",
"@release_or_nightly//:tensorflow_pkg", # tensorflow package dep
],
)

pybind_extension(
name = "pywrap_phrase_tokenizer_model_builder",
srcs = ["pywrap_phrase_tokenizer_model_builder.cc"],
Expand All @@ -133,18 +108,6 @@ pybind_extension(
],
)

py_test(
name = "pywrap_phrase_tokenizer_model_builder_test",
srcs = ["pywrap_phrase_tokenizer_model_builder_test.py"],
data = [
"//tensorflow_text:python/ops/test_data/phrase_tokenizer_model_test.fb",
],
deps = [
":pywrap_phrase_tokenizer_model_builder",
"@release_or_nightly//:tensorflow_pkg", # tensorflow package dep
],
)

pybind_extension(
name = "pywrap_model_converter",
srcs = ["pywrap_model_converter.cc"],
Expand Down Expand Up @@ -184,12 +147,3 @@ pybind_extension(
"@pybind11",
],
)

py_test(
name = "pywrap_whitespace_tokenizer_config_builder_test",
srcs = ["pywrap_whitespace_tokenizer_config_builder_test.py"],
deps = [
":pywrap_whitespace_tokenizer_config_builder",
"@release_or_nightly//:tensorflow_pkg", # tensorflow package dep
],
)
10 changes: 10 additions & 0 deletions tensorflow_text/tftext.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -139,6 +139,16 @@ def tf_cc_library(
oss_deps = oss_deps + _dedupe(deps, "@com_google_absl//absl/time")
oss_deps = oss_deps + _dedupe(deps, "@com_google_absl//absl/types:variant")
oss_deps = oss_deps + _dedupe(deps, "@com_google_absl//absl/functional:any_invocable")
oss_deps = oss_deps + _dedupe(deps, "@com_google_absl//absl/log:check")
oss_deps = oss_deps + _dedupe(deps, "@com_google_absl//absl/log:log")
oss_deps = oss_deps + _dedupe(deps, "@com_google_absl//absl/log:absl_check")
oss_deps = oss_deps + _dedupe(deps, "@com_google_absl//absl/log:absl_log")
oss_deps = oss_deps + _dedupe(deps, "@com_google_absl//absl/strings")
oss_deps = oss_deps + _dedupe(deps, "@com_google_absl//absl/container:btree")
oss_deps = oss_deps + _dedupe(deps, "@com_google_absl//absl/container:flat_hash_set")
oss_deps = oss_deps + _dedupe(deps, "@com_google_absl//absl/base")
oss_deps = oss_deps + _dedupe(deps, "@com_google_absl//absl/meta:type_traits")
oss_deps = oss_deps + _dedupe(deps, "@com_google_absl//absl/utility:if_constexpr")
deps += select({
"@org_tensorflow//tensorflow:mobile": [
"@org_tensorflow//tensorflow/core:portable_tensorflow_lib_lite",
Expand Down
Loading