增加环绕侦察场景适配

This commit is contained in:
2026-01-08 15:44:38 +08:00
parent 3eba1f962b
commit 10c5bb5a8a
5441 changed files with 40219 additions and 379695 deletions

View File

@@ -6,6 +6,14 @@ class PostProcessor:
This class is not supposed to be instantiated directly. Instead, any implementation of
a PostProcessor will return an instance of this class when instantiated.
"""
def __getstate__(self):
""" """
pass
def __setstate__(self, state):
""" """
pass
def num_special_tokens_to_add(self, is_pair):
"""
Return the number of special tokens that would be added for single/pair sentences.
@@ -56,6 +64,28 @@ class BertProcessing(PostProcessor):
def __init__(self, sep, cls):
pass
def __getnewargs__(self):
""" """
pass
def __getstate__(self):
""" """
pass
def __setstate__(self, state):
""" """
pass
@property
def cls(self):
""" """
pass
@cls.setter
def cls(self, value):
""" """
pass
def num_special_tokens_to_add(self, is_pair):
"""
Return the number of special tokens that would be added for single/pair sentences.
@@ -88,6 +118,16 @@ class BertProcessing(PostProcessor):
"""
pass
@property
def sep(self):
""" """
pass
@sep.setter
def sep(self, value):
""" """
pass
class ByteLevel(PostProcessor):
"""
This post-processor takes care of trimming the offsets.
@@ -98,8 +138,31 @@ class ByteLevel(PostProcessor):
Args:
trim_offsets (:obj:`bool`):
Whether to trim the whitespaces from the produced offsets.
add_prefix_space (:obj:`bool`, `optional`, defaults to :obj:`True`):
If :obj:`True`, keeps the first token's offset as is. If :obj:`False`, increments
the start of the first token's offset by 1. Only has an effect if :obj:`trim_offsets`
is set to :obj:`True`.
"""
def __init__(self, trim_offsets=True):
def __init__(self, add_prefix_space=None, trim_offsets=None, use_regex=None):
pass
def __getstate__(self):
""" """
pass
def __setstate__(self, state):
""" """
pass
@property
def add_prefix_space(self):
""" """
pass
@add_prefix_space.setter
def add_prefix_space(self, value):
""" """
pass
def num_special_tokens_to_add(self, is_pair):
@@ -134,6 +197,26 @@ class ByteLevel(PostProcessor):
"""
pass
@property
def trim_offsets(self):
""" """
pass
@trim_offsets.setter
def trim_offsets(self, value):
""" """
pass
@property
def use_regex(self):
""" """
pass
@use_regex.setter
def use_regex(self, value):
""" """
pass
class RobertaProcessing(PostProcessor):
"""
This post-processor takes care of adding the special tokens needed by
@@ -164,6 +247,38 @@ class RobertaProcessing(PostProcessor):
def __init__(self, sep, cls, trim_offsets=True, add_prefix_space=True):
pass
def __getnewargs__(self):
""" """
pass
def __getstate__(self):
""" """
pass
def __setstate__(self, state):
""" """
pass
@property
def add_prefix_space(self):
""" """
pass
@add_prefix_space.setter
def add_prefix_space(self, value):
""" """
pass
@property
def cls(self):
""" """
pass
@cls.setter
def cls(self, value):
""" """
pass
def num_special_tokens_to_add(self, is_pair):
"""
Return the number of special tokens that would be added for single/pair sentences.
@@ -196,6 +311,26 @@ class RobertaProcessing(PostProcessor):
"""
pass
@property
def sep(self):
""" """
pass
@sep.setter
def sep(self, value):
""" """
pass
@property
def trim_offsets(self):
""" """
pass
@trim_offsets.setter
def trim_offsets(self, value):
""" """
pass
class Sequence(PostProcessor):
"""
Sequence Processor
@@ -207,6 +342,30 @@ class Sequence(PostProcessor):
def __init__(self, processors):
pass
def __getitem__(self, key):
"""
Return self[key].
"""
pass
def __getnewargs__(self):
""" """
pass
def __getstate__(self):
""" """
pass
def __setitem__(self, key, value):
"""
Set self[key] to value.
"""
pass
def __setstate__(self, state):
""" """
pass
def num_special_tokens_to_add(self, is_pair):
"""
Return the number of special tokens that would be added for single/pair sentences.
@@ -306,7 +465,15 @@ class TemplateProcessing(PostProcessor):
The given dict expects the provided :obj:`ids` and :obj:`tokens` lists to have
the same length.
"""
def __init__(self, single, pair, special_tokens):
def __init__(self, single=None, pair=None, special_tokens=None):
pass
def __getstate__(self):
""" """
pass
def __setstate__(self, state):
""" """
pass
def num_special_tokens_to_add(self, is_pair):
@@ -340,3 +507,13 @@ class TemplateProcessing(PostProcessor):
:class:`~tokenizers.Encoding`: The final encoding
"""
pass
@property
def single(self):
""" """
pass
@single.setter
def single(self, value):
""" """
pass