You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
0%| | 0/1000 [00:00<?, ?it/s]C:\Users\13189\Anaconda3\envs\Envtextworld\lib\site-packages\transformers\tokenization_utils_base.py:1770: FutureWarning: The pad_to_max_length argument is deprecated and will be removed in a future version, use padding=True or padding='longest' to pad to the longest sequence in the batch, or use padding='max_length' to pad to a max length. In this case, you can give a specific length with max_length (e.g. max_length=45) or leave max_length to None to pad to the maximal input size of the model (e.g. 512 for Bert).
FutureWarning,
0%| | 0/1000 [00:01<?, ?it/s]
Traceback (most recent call last):
File "C:\Users\13189\Anaconda3\envs\Envtextworld\lib\site-packages\OpenAttack\data_manager.py", line 73, in load
cls.data_path[data_name]
File "C:\Users\13189\Anaconda3\envs\Envtextworld\lib\site-packages\OpenAttack\data\nltk_senttokenizer.py", line 17, in LOAD
return import("nltk").data.load(os.path.join(path, "english.pickle")).tokenize
File "C:\Users\13189\Anaconda3\envs\Envtextworld\lib\site-packages\nltk\data.py", line 752, in load
opened_resource = _open(resource_url)
File "C:\Users\13189\Anaconda3\envs\Envtextworld\lib\site-packages\nltk\data.py", line 882, in _open
return urlopen(resource_url)
File "C:\Users\13189\Anaconda3\envs\Envtextworld\lib\urllib\request.py", line 222, in urlopen
return opener.open(url, data, timeout)
File "C:\Users\13189\Anaconda3\envs\Envtextworld\lib\urllib\request.py", line 525, in open
response = self._open(req, data)
File "C:\Users\13189\Anaconda3\envs\Envtextworld\lib\urllib\request.py", line 548, in _open
'unknown_open', req)
File "C:\Users\13189\Anaconda3\envs\Envtextworld\lib\urllib\request.py", line 503, in _call_chain
result = func(*args)
File "C:\Users\13189\Anaconda3\envs\Envtextworld\lib\urllib\request.py", line 1389, in unknown_open
raise URLError('unknown url type: %s' % type)
urllib.error.URLError:
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "e:\ResearchTraining\openAttack\test.py", line 11, in
attack_eval.eval(dataset, visualize=False)
File "C:\Users\13189\Anaconda3\envs\Envtextworld\lib\site-packages\OpenAttack\attack_evals\default.py", line 96, in eval
for data, x_adv, y_adv, info in (tqdm(self.eval_results(dataset), total=total_len) if self.__progress_bar else self.eval_results(dataset)):
File "C:\Users\13189\Anaconda3\envs\Envtextworld\lib\site-packages\tqdm\std.py", line 1129, in iter
for obj in iterable:
File "C:\Users\13189\Anaconda3\envs\Envtextworld\lib\site-packages\OpenAttack\attack_evals\default.py", line 151, in eval_results
res = self.attacker(self.classifier, data.x, data.target)
File "C:\Users\13189\Anaconda3\envs\Envtextworld\lib\site-packages\OpenAttack\attackers\genetic.py", line 86, in call
x_orig = self.config["processor"].get_tokens(x_orig)
File "C:\Users\13189\Anaconda3\envs\Envtextworld\lib\site-packages\OpenAttack\text_processors\default.py", line 37, in get_tokens
self.__tokenize = self.__make_tokenizer( DataManager.load("TProcess.NLTKSentTokenizer") )
File "C:\Users\13189\Anaconda3\envs\Envtextworld\lib\site-packages\OpenAttack\data_manager.py", line 76, in load
raise DataNotExistException(data_name, cls.data_path[data_name])
OpenAttack.exceptions.data_manager.DataNotExistException: ('TProcess.NLTKSentTokenizer', 'E:\ResearchTraining\openAttack\data\TProcess.NLTKSentTokenizer')
Sorry but I don't know how to solve this problem......Please help me. Thank you !
The text was updated successfully, but these errors were encountered:
I tried to start the demo code in readme.md, but it seems that some exceptions have happened....
The code is shown as below:
import OpenAttack as oa
choose a trained victim classification model
victim = oa.DataManager.load("Victim.BERT.SST")
choose an evaluation dataset
dataset = oa.DataManager.load("Dataset.SST.sample")
choose Genetic as the attacker and initialize it with default parameters
attacker = oa.attackers.GeneticAttacker()
prepare for attacking
attack_eval = oa.attack_evals.DefaultAttackEval(attacker, victim)
launch attacks and print attack results
attack_eval.eval(dataset, visualize=False)
And the exceptions are as below:
0%| | 0/1000 [00:00<?, ?it/s]C:\Users\13189\Anaconda3\envs\Envtextworld\lib\site-packages\transformers\tokenization_utils_base.py:1770: FutureWarning: The
pad_to_max_length
argument is deprecated and will be removed in a future version, usepadding=True
orpadding='longest'
to pad to the longest sequence in the batch, or usepadding='max_length'
to pad to a max length. In this case, you can give a specific length withmax_length
(e.g.max_length=45
) or leave max_length to None to pad to the maximal input size of the model (e.g. 512 for Bert).FutureWarning,
0%| | 0/1000 [00:01<?, ?it/s]
Traceback (most recent call last):
File "C:\Users\13189\Anaconda3\envs\Envtextworld\lib\site-packages\OpenAttack\data_manager.py", line 73, in load
cls.data_path[data_name]
File "C:\Users\13189\Anaconda3\envs\Envtextworld\lib\site-packages\OpenAttack\data\nltk_senttokenizer.py", line 17, in LOAD
return import("nltk").data.load(os.path.join(path, "english.pickle")).tokenize
File "C:\Users\13189\Anaconda3\envs\Envtextworld\lib\site-packages\nltk\data.py", line 752, in load
opened_resource = _open(resource_url)
File "C:\Users\13189\Anaconda3\envs\Envtextworld\lib\site-packages\nltk\data.py", line 882, in _open
return urlopen(resource_url)
File "C:\Users\13189\Anaconda3\envs\Envtextworld\lib\urllib\request.py", line 222, in urlopen
return opener.open(url, data, timeout)
File "C:\Users\13189\Anaconda3\envs\Envtextworld\lib\urllib\request.py", line 525, in open
response = self._open(req, data)
File "C:\Users\13189\Anaconda3\envs\Envtextworld\lib\urllib\request.py", line 548, in _open
'unknown_open', req)
File "C:\Users\13189\Anaconda3\envs\Envtextworld\lib\urllib\request.py", line 503, in _call_chain
result = func(*args)
File "C:\Users\13189\Anaconda3\envs\Envtextworld\lib\urllib\request.py", line 1389, in unknown_open
raise URLError('unknown url type: %s' % type)
urllib.error.URLError:
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "e:\ResearchTraining\openAttack\test.py", line 11, in
attack_eval.eval(dataset, visualize=False)
File "C:\Users\13189\Anaconda3\envs\Envtextworld\lib\site-packages\OpenAttack\attack_evals\default.py", line 96, in eval
for data, x_adv, y_adv, info in (tqdm(self.eval_results(dataset), total=total_len) if self.__progress_bar else self.eval_results(dataset)):
File "C:\Users\13189\Anaconda3\envs\Envtextworld\lib\site-packages\tqdm\std.py", line 1129, in iter
for obj in iterable:
File "C:\Users\13189\Anaconda3\envs\Envtextworld\lib\site-packages\OpenAttack\attack_evals\default.py", line 151, in eval_results
res = self.attacker(self.classifier, data.x, data.target)
File "C:\Users\13189\Anaconda3\envs\Envtextworld\lib\site-packages\OpenAttack\attackers\genetic.py", line 86, in call
x_orig = self.config["processor"].get_tokens(x_orig)
File "C:\Users\13189\Anaconda3\envs\Envtextworld\lib\site-packages\OpenAttack\text_processors\default.py", line 37, in get_tokens
self.__tokenize = self.__make_tokenizer( DataManager.load("TProcess.NLTKSentTokenizer") )
File "C:\Users\13189\Anaconda3\envs\Envtextworld\lib\site-packages\OpenAttack\data_manager.py", line 76, in load
raise DataNotExistException(data_name, cls.data_path[data_name])
OpenAttack.exceptions.data_manager.DataNotExistException: ('TProcess.NLTKSentTokenizer', 'E:\ResearchTraining\openAttack\data\TProcess.NLTKSentTokenizer')
Sorry but I don't know how to solve this problem......Please help me. Thank you !
The text was updated successfully, but these errors were encountered: