Skip to content

Commit a93de7d

Browse files
cantoniostf-text-github-robot
authored andcommitted
Add explicit tf-keras dependency.
PiperOrigin-RevId: 738408878
1 parent 608b4aa commit a93de7d

13 files changed

+772
-847
lines changed

oss_scripts/pip_package/requirements_lock_3_10.txt

+191-211
Large diffs are not rendered by default.

oss_scripts/pip_package/requirements_lock_3_11.txt

+192-211
Large diffs are not rendered by default.

oss_scripts/pip_package/requirements_lock_3_12.txt

+191-193
Large diffs are not rendered by default.

oss_scripts/pip_package/requirements_lock_3_9.txt

+171-191
Large diffs are not rendered by default.

tensorflow_text/BUILD

+15
Original file line numberDiff line numberDiff line change
@@ -226,6 +226,7 @@ py_test(
226226
deps = [
227227
":tensorflow_text",
228228
"@release_or_nightly//:tensorflow_pkg", # tensorflow package dep
229+
"@release_or_nightly//:tf_keras_pkg",
229230
# python/platform:client_testlib tensorflow dep,
230231
],
231232
)
@@ -301,6 +302,7 @@ py_test(
301302
":tensorflow_text",
302303
"@absl_py//absl/testing:parameterized",
303304
"@release_or_nightly//:tensorflow_pkg", # tensorflow package dep
305+
"@release_or_nightly//:tf_keras_pkg",
304306
# python/compat tensorflow dep,
305307
# python/data/kernel_tests:test_base tensorflow dep,
306308
# python/framework:dtypes tensorflow dep,
@@ -338,6 +340,7 @@ py_test(
338340
":tensorflow_text",
339341
"@absl_py//absl/testing:parameterized",
340342
"@release_or_nightly//:tensorflow_pkg", # tensorflow package dep
343+
"@release_or_nightly//:tf_keras_pkg",
341344
# python/framework:constant_op tensorflow dep,
342345
# python/framework:dtypes tensorflow dep,
343346
# python/framework:test_lib tensorflow dep,
@@ -382,6 +385,7 @@ py_test(
382385
"@absl_py//absl/logging",
383386
"@absl_py//absl/testing:parameterized",
384387
"@release_or_nightly//:tensorflow_pkg", # tensorflow package dep
388+
"@release_or_nightly//:tf_keras_pkg",
385389
],
386390
)
387391

@@ -418,6 +422,7 @@ py_test(
418422
":tensorflow_text",
419423
"@absl_py//absl/testing:parameterized",
420424
"@release_or_nightly//:tensorflow_pkg", # tensorflow package dep
425+
"@release_or_nightly//:tf_keras_pkg",
421426
# python/compat tensorflow dep,
422427
# python/data/kernel_tests:test_base tensorflow dep,
423428
# python/framework:dtypes tensorflow dep,
@@ -689,6 +694,7 @@ py_test(
689694
deps = [
690695
":tensorflow_text",
691696
"@release_or_nightly//:tensorflow_pkg", # tensorflow package dep
697+
"@release_or_nightly//:tf_keras_pkg",
692698
# python/framework:constant_op tensorflow dep,
693699
# python/framework:errors tensorflow dep,
694700
# python/framework:test_lib tensorflow dep,
@@ -857,6 +863,7 @@ py_test(
857863
deps = [
858864
":tensorflow_text",
859865
"@release_or_nightly//:tensorflow_pkg", # tensorflow package dep
866+
"@release_or_nightly//:tf_keras_pkg",
860867
# python/framework:constant_op tensorflow dep,
861868
# python/framework:test_lib tensorflow dep,
862869
# python/ops/ragged:ragged_factory_ops tensorflow dep,
@@ -1109,6 +1116,7 @@ py_test(
11091116
":tensorflow_text",
11101117
"@absl_py//absl/testing:parameterized",
11111118
"@release_or_nightly//:tensorflow_pkg", # tensorflow package dep
1119+
"@release_or_nightly//:tf_keras_pkg",
11121120
# python/framework:constant_op tensorflow dep,
11131121
# python/framework:dtypes tensorflow dep,
11141122
# python/framework:errors tensorflow dep,
@@ -1177,6 +1185,7 @@ py_library(
11771185
srcs = ["python/keras/layers/todense.py"],
11781186
deps = [
11791187
"@release_or_nightly//:tensorflow_pkg", # tensorflow package dep
1188+
"@release_or_nightly//:tf_keras_pkg",
11801189
],
11811190
)
11821191

@@ -1190,6 +1199,7 @@ py_test(
11901199
"@absl_py//absl/testing:parameterized",
11911200
"@pypi//numpy:pkg",
11921201
"@release_or_nightly//:tensorflow_pkg", # tensorflow package dep
1202+
"@release_or_nightly//:tf_keras_pkg",
11931203
# python/framework:test_lib tensorflow dep,
11941204
],
11951205
)
@@ -1211,6 +1221,7 @@ py_library(
12111221
":whitespace_tokenizer_v2",
12121222
":wordpiece_tokenizer",
12131223
"@release_or_nightly//:tensorflow_pkg", # tensorflow package dep
1224+
"@release_or_nightly//:tf_keras_pkg",
12141225
# python/ops:lookup_ops tensorflow dep,
12151226
# python/ops/ragged:ragged_conversion_ops tensorflow dep,
12161227
],
@@ -1226,6 +1237,7 @@ py_test(
12261237
"@absl_py//absl/testing:parameterized",
12271238
"@pypi//numpy:pkg",
12281239
"@release_or_nightly//:tensorflow_pkg", # tensorflow package dep
1240+
"@release_or_nightly//:tf_keras_pkg",
12291241
],
12301242
)
12311243

@@ -1259,6 +1271,7 @@ py_test(
12591271
":trimmer_ops",
12601272
"@absl_py//absl/testing:parameterized",
12611273
"@release_or_nightly//:tensorflow_pkg", # tensorflow package dep
1274+
"@release_or_nightly//:tf_keras_pkg",
12621275
# python/framework:constant_op tensorflow dep,
12631276
# python/framework:test_lib tensorflow dep,
12641277
# python/ops/ragged:ragged_factory_ops tensorflow dep,
@@ -1358,6 +1371,7 @@ py_test(
13581371
":tensorflow_text",
13591372
":utf8_binarize_op",
13601373
"@release_or_nightly//:tensorflow_pkg", # tensorflow package dep
1374+
"@release_or_nightly//:tf_keras_pkg",
13611375
# python/framework:test_lib tensorflow dep,
13621376
# python/platform:client_testlib tensorflow dep,
13631377
],
@@ -1460,6 +1474,7 @@ py_test(
14601474
deps = [
14611475
":tensorflow_text",
14621476
"@release_or_nightly//:tensorflow_pkg", # tensorflow package dep
1477+
"@release_or_nightly//:tf_keras_pkg",
14631478
# python/framework:constant_op tensorflow dep,
14641479
# python/framework:test_lib tensorflow dep,
14651480
# python/ops/ragged:ragged_factory_ops tensorflow dep,

tensorflow_text/core/kernels/fast_wordpiece_tokenizer_test.cc

+3-3
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ using ::testing::AnyOf;
2929
using ::testing::ElementsAre;
3030

3131
constexpr char kTestConfigPath[] =
32-
"third_party/tensorflow_text/python/ops/test_data/"
32+
"tensorflow_text/python/ops/test_data/"
3333
"fast_wordpiece_tokenizer_model.fb";
3434

3535
TEST(FastWordpieceTokenizerTest, LoadAndTokenize) {
@@ -119,10 +119,10 @@ INSTANTIATE_TEST_SUITE_P(FastWordpieceTokenizerPunctuationTest,
119119
TestPunctuationVersionMismatch,
120120
testing::Values(
121121
// Unicode v 15.1 config
122-
"third_party/tensorflow_text/python/ops/test_data/"
122+
"tensorflow_text/python/ops/test_data/"
123123
"fast_wordpiece_tokenizer_model_ver_15_1.fb",
124124
// Unicode v 16.0 config
125-
"third_party/tensorflow_text/python/ops/test_data/"
125+
"tensorflow_text/python/ops/test_data/"
126126
"fast_wordpiece_tokenizer_model_ver_16_0.fb"));
127127

128128
template <typename T>

tensorflow_text/core/kernels/phrase_tokenizer_test.cc

+1-1
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ heard news today
3838
the news today
3939
*/
4040
constexpr char kTestConfigPath[] =
41-
"third_party/tensorflow_text/python/ops/test_data/"
41+
"tensorflow_text/python/ops/test_data/"
4242
"phrase_tokenizer_model.fb";
4343

4444
TEST(PhraseTokenizerTest, Tokenize) {

tensorflow_text/core/ops/fast_sentencepiece_ops.cc

-1
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@
2020
namespace tensorflow {
2121
namespace text {
2222

23-
// copied from third_party/tensorflow_text/core/ops/sentencepiece_ops.cc
2423
REGISTER_OP("TFText>FastSentencepieceTokenize")
2524
.Input("sp_model: uint8")
2625
.Input("input: string")

tensorflow_text/core/pybinds/pywrap_fast_bert_normalizer_model_builder_test.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -24,8 +24,8 @@
2424
from tensorflow.python.platform import test
2525
from tensorflow_text.core.pybinds import pywrap_fast_bert_normalizer_model_builder
2626

27-
EXPECTED_MODEL_BUFFER_PATH = "third_party/tensorflow_text/python/ops/test_data/fast_bert_normalizer_model.fb"
28-
EXPECTED_MODEL_LOWER_CASE_NFD_STRIP_ACCENTS_BUFFER_PATH = "third_party/tensorflow_text/python/ops/test_data/fast_bert_normalizer_model_lower_case_nfd_strip_accents.fb"
27+
EXPECTED_MODEL_BUFFER_PATH = "tensorflow_text/python/ops/test_data/fast_bert_normalizer_model.fb"
28+
EXPECTED_MODEL_LOWER_CASE_NFD_STRIP_ACCENTS_BUFFER_PATH = "tensorflow_text/python/ops/test_data/fast_bert_normalizer_model_lower_case_nfd_strip_accents.fb"
2929

3030

3131
class PywrapCodepointWiseTextNormalizerModelBuilderTest(

tensorflow_text/core/pybinds/pywrap_fast_wordpiece_tokenizer_model_builder_test.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424
from tensorflow.python.platform import test
2525
from tensorflow_text.core.pybinds import pywrap_fast_wordpiece_tokenizer_model_builder
2626

27-
EXPECTED_MODEL_BUFFER_PATH = "third_party/tensorflow_text/python/ops/test_data/fast_wordpiece_tokenizer_model.fb"
27+
EXPECTED_MODEL_BUFFER_PATH = "tensorflow_text/python/ops/test_data/fast_wordpiece_tokenizer_model.fb"
2828

2929

3030
class PywrapFastWordpieceBuilderTest(test_util.TensorFlowTestCase):

tensorflow_text/core/pybinds/pywrap_phrase_tokenizer_model_builder_test.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424
from tensorflow.python.platform import test
2525
from tensorflow_text.core.pybinds import pywrap_phrase_tokenizer_model_builder
2626

27-
EXPECTED_MODEL_BUFFER_PATH = "third_party/tensorflow_text/python/ops/test_data/phrase_tokenizer_model_test.fb"
27+
EXPECTED_MODEL_BUFFER_PATH = "tensorflow_text/python/ops/test_data/phrase_tokenizer_model_test.fb"
2828

2929

3030
class PywrapPhraseBuilderTest(test_util.TensorFlowTestCase):

tensorflow_text/python/benchmarks/tokenizers_benchmarks.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -56,10 +56,10 @@
5656

5757
# These are needed when generating the parameterized benchmarks and cannot use
5858
# absl FLAGS
59-
_BERT_VOCAB_PATH = "third_party/tensorflow_text/python/benchmarks/test_data/uncased_L-12_H-768_A-12/vocab.txt"
60-
_HUB_MODULE_HANDLE = "third_party/tensorflow_text/python/ops/test_data/segmenter_hub_module"
61-
_SENTENCEPIECE_MODEL_FILE = "third_party/tensorflow_text/python/ops/test_data/test_oss_model.model"
62-
_FAST_SENTENCEPIECE_MODEL_FILE = "third_party/tensorflow_text/python/ops/test_data/fast_sentencepiece.model"
59+
_BERT_VOCAB_PATH = "tensorflow_text/python/benchmarks/test_data/uncased_L-12_H-768_A-12/vocab.txt"
60+
_HUB_MODULE_HANDLE = "tensorflow_text/python/ops/test_data/segmenter_hub_module"
61+
_SENTENCEPIECE_MODEL_FILE = "tensorflow_text/python/ops/test_data/test_oss_model.model"
62+
_FAST_SENTENCEPIECE_MODEL_FILE = "tensorflow_text/python/ops/test_data/fast_sentencepiece.model"
6363

6464

6565
class TokenizationBenchmark(

tensorflow_text/python/ops/test_data/fast_wordpiece_README.google.txt

-28
This file was deleted.

0 commit comments

Comments
 (0)