"Fossies" - the Fresh Open Source Software Archive 
Member "qt-everywhere-src-6.3.1/qtwebengine/src/3rdparty/chromium/third_party/tflite-support/patches/0001-no-sentencepiece-tokenizer.patch" (8 Jun 2022, 2253 Bytes) of package /linux/misc/qt-everywhere-src-6.3.1.tar.xz:
As a special service "Fossies" has tried to format the requested source page into HTML format using (guessed) Diff source code syntax highlighting (style:
standard) with prefixed line numbers.
Alternatively you can here
view or
download the uninterpreted source code file.
1 From 7faac3ddcbc05275d797dda64a9b9d7f2279ae1c Mon Sep 17 00:00:00 2001
2 From: Sophie Chang <sophiechang@chromium.org>
3 Date: Thu, 11 Feb 2021 00:53:47 +0000
4 Subject: [PATCH] no sentencepiece tokenizer
5
6 ---
7 .../cc/text/tokenizers/tokenizer_utils.cc | 11 -----------
8 1 file changed, 11 deletions(-)
9
10 diff --git a/third_party/tflite-support/src/tensorflow_lite_support/cc/text/tokenizers/tokenizer_utils.cc b/third_party/tflite-support/src/tensorflow_lite_support/cc/text/tokenizers/tokenizer_utils.cc
11 index 352c4a8c5e4f..46786fd7faf8 100644
12 --- a/third_party/tflite-support/src/tensorflow_lite_support/cc/text/tokenizers/tokenizer_utils.cc
13 +++ b/third_party/tflite-support/src/tensorflow_lite_support/cc/text/tokenizers/tokenizer_utils.cc
14 @@ -20,7 +20,6 @@ limitations under the License.
15 #include "tensorflow_lite_support/cc/port/status_macros.h"
16 #include "tensorflow_lite_support/cc/text/tokenizers/bert_tokenizer.h"
17 #include "tensorflow_lite_support/cc/text/tokenizers/regex_tokenizer.h"
18 -#include "tensorflow_lite_support/cc/text/tokenizers/sentencepiece_tokenizer.h"
19 #include "tensorflow_lite_support/metadata/metadata_schema_generated.h"
20
21 namespace tflite {
22 @@ -73,16 +72,6 @@ StatusOr<std::unique_ptr<Tokenizer>> CreateTokenizerFromProcessUnit(
23 return absl::make_unique<BertTokenizer>(vocab_buffer.data(),
24 vocab_buffer.size());
25 }
26 - case ProcessUnitOptions_SentencePieceTokenizerOptions: {
27 - const tflite::SentencePieceTokenizerOptions* options =
28 - tokenizer_process_unit->options_as<SentencePieceTokenizerOptions>();
29 - ASSIGN_OR_RETURN(absl::string_view model_buffer,
30 - CheckAndLoadFirstAssociatedFile(
31 - options->sentencePiece_model(), metadata_extractor));
32 - // TODO(b/160647204): Extract sentence piece model vocabulary
33 - return absl::make_unique<SentencePieceTokenizer>(model_buffer.data(),
34 - model_buffer.size());
35 - }
36 case ProcessUnitOptions_RegexTokenizerOptions: {
37 const tflite::RegexTokenizerOptions* options =
38 tokenizer_process_unit->options_as<RegexTokenizerOptions>();
39 --
40 2.30.0.478.g8a0d178c01-goog