-
Notifications
You must be signed in to change notification settings - Fork 124
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #118 from thunlp/dev
Dev
- Loading branch information
Showing
35 changed files
with
879 additions
and
35 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,25 @@ | ||
name: publish to pypi | ||
on: | ||
push: | ||
branches: | ||
- master | ||
jobs: | ||
build-n-publish: | ||
name: Build and publish | ||
runs-on: ubuntu-latest | ||
steps: | ||
- uses: actions/checkout@master | ||
- uses: actions/setup-python@v2 | ||
with: | ||
python-version: '3.7' | ||
architecture: 'x64' | ||
- name: Run build script | ||
run: | | ||
pip install twine --user | ||
python setup.py sdist bdist_wheel | ||
- name: Publish distribution 📦 to PyPI | ||
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') | ||
uses: pypa/gh-action-pypi-publish@master | ||
with: | ||
user: __token__ | ||
password: ${{ secrets.pypi_password }} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,25 @@ | ||
""" | ||
:type: a tuple of three :py:class:`.Dataset` s, `(train, valid, test)`. | ||
:Size: 31.0MB | ||
AG News dataset which is used to train victim models. | ||
""" | ||
import pickle | ||
|
||
NAME = "Dataset.AG" | ||
DOWNLOAD = "https://thunlp.oss-cn-qingdao.aliyuncs.com/TAADToolbox/dataset/sst.pkl" | ||
|
||
|
||
def LOAD(path): | ||
from OpenAttack.utils import Dataset, DataInstance | ||
|
||
def mapping(data): | ||
return Dataset([ | ||
DataInstance( | ||
x=it[0], | ||
y=it[1] | ||
) for it in data | ||
], copy=False) | ||
|
||
train, valid, test = pickle.load(open(path, "rb")) | ||
return mapping(train), mapping(valid), mapping(test) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,25 @@ | ||
""" | ||
:type: a tuple of three :py:class:`.Dataset` s, `(train, valid, test)`. | ||
:Size: 56.2MB | ||
IMDB dataset which is used to train victim models. | ||
""" | ||
import pickle | ||
|
||
NAME = "Dataset.IMDB" | ||
DOWNLOAD = "https://thunlp.oss-cn-qingdao.aliyuncs.com/TAADToolbox/dataset/imdb.pkl" | ||
|
||
|
||
def LOAD(path): | ||
from OpenAttack.utils import Dataset, DataInstance | ||
|
||
def mapping(data): | ||
return Dataset([ | ||
DataInstance( | ||
x=it[0], | ||
y=it[1] | ||
) for it in data | ||
], copy=False) | ||
|
||
train, valid, test = pickle.load(open(path, "rb")) | ||
return mapping(train), mapping(valid), mapping(test) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,26 @@ | ||
""" | ||
:type: a tuple of three :py:class:`.Dataset` s, `(train, valid, test)`. | ||
:Size: 77.373MB | ||
MNLI dataset which is used to train victim models. | ||
""" | ||
import pickle | ||
|
||
NAME = "Dataset.MNLI" | ||
DOWNLOAD = "https://thunlp.oss-cn-qingdao.aliyuncs.com/TAADToolbox/dataset/mnli.pkl" | ||
|
||
|
||
def LOAD(path): | ||
from OpenAttack.utils import Dataset, DataInstance | ||
|
||
def mapping(data): | ||
return Dataset([ | ||
DataInstance( | ||
x=it[0], | ||
y=it[2], | ||
meta= { "reference": it[1] } | ||
) for it in data | ||
], copy=False) | ||
|
||
train, valid, test = pickle.load(open(path, "rb")) | ||
return mapping(train), mapping(valid), mapping(test) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,26 @@ | ||
""" | ||
:type: a tuple of three :py:class:`.Dataset` s, `(train, valid, test)`. | ||
:Size: 72.596MB | ||
SNLI dataset which is used to train victim models. | ||
""" | ||
import pickle | ||
|
||
NAME = "Dataset.SNLI" | ||
DOWNLOAD = "https://thunlp.oss-cn-qingdao.aliyuncs.com/TAADToolbox/dataset/snli.pkl" | ||
|
||
|
||
def LOAD(path): | ||
from OpenAttack.utils import Dataset, DataInstance | ||
|
||
def mapping(data): | ||
return Dataset([ | ||
DataInstance( | ||
x=it[0], | ||
y=it[2], | ||
meta= { "reference": it[1] } | ||
) for it in data | ||
], copy=False) | ||
|
||
train, valid, test = pickle.load(open(path, "rb")) | ||
return mapping(train), mapping(valid), mapping(test) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,20 @@ | ||
""" | ||
:type: OpenAttack.utils.AlbertClassifier | ||
:Size: 788.697MB | ||
:Package Requirements: | ||
* transformers | ||
* pytorch | ||
Pretrained ALBERT model on AG-4 dataset. See :py:data:`Dataset.AG` for detail. | ||
""" | ||
|
||
from OpenAttack.utils import make_zip_downloader, AlbertClassifier | ||
|
||
NAME = "Victim.ALBERT.AG" | ||
|
||
URL = "https://thunlp.oss-cn-qingdao.aliyuncs.com/TAADToolbox/victim/albert_ag.zip" | ||
DOWNLOAD = make_zip_downloader(URL) | ||
|
||
def LOAD(path): | ||
from OpenAttack import Classifier | ||
return AlbertClassifier(path, 5) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,20 @@ | ||
""" | ||
:type: OpenAttack.utils.AlbertClassifier | ||
:Size: 788.662MB | ||
:Package Requirements: | ||
* transformers | ||
* pytorch | ||
Pretrained ALBERT model on IMDB dataset. See :py:data:`Dataset.IMDB` for detail. | ||
""" | ||
|
||
from OpenAttack.utils import make_zip_downloader, AlbertClassifier | ||
|
||
NAME = "Victim.ALBERT.IMDB" | ||
|
||
URL = "https://thunlp.oss-cn-qingdao.aliyuncs.com/TAADToolbox/victim/albert_imdb.zip" | ||
DOWNLOAD = make_zip_downloader(URL) | ||
|
||
def LOAD(path): | ||
from OpenAttack import Classifier | ||
return AlbertClassifier(path, 2) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,20 @@ | ||
""" | ||
:type: OpenAttack.utils.AlbertClassifier | ||
:Size: 788.668MB | ||
:Package Requirements: | ||
* transformers | ||
* pytorch | ||
Pretrained ALBERT model on MNLI dataset. See :py:data:`Dataset.MNLI` for detail. | ||
""" | ||
|
||
from OpenAttack.utils import make_zip_downloader, AlbertClassifier | ||
|
||
NAME = "Victim.ALBERT.MNLI" | ||
|
||
URL = "https://thunlp.oss-cn-qingdao.aliyuncs.com/TAADToolbox/victim/albert_mnli.zip" | ||
DOWNLOAD = make_zip_downloader(URL) | ||
|
||
def LOAD(path): | ||
from OpenAttack import Classifier | ||
return AlbertClassifier(path, 2) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,20 @@ | ||
""" | ||
:type: OpenAttack.utils.AlbertClassifier | ||
:Size: 788.672MB | ||
:Package Requirements: | ||
* transformers | ||
* pytorch | ||
Pretrained ALBERT model on SNLI dataset. See :py:data:`Dataset.SNLI` for detail. | ||
""" | ||
|
||
from OpenAttack.utils import make_zip_downloader, AlbertClassifier | ||
|
||
NAME = "Victim.ALBERT.SNLI" | ||
|
||
URL = "https://thunlp.oss-cn-qingdao.aliyuncs.com/TAADToolbox/victim/albert_snli.zip" | ||
DOWNLOAD = make_zip_downloader(URL) | ||
|
||
def LOAD(path): | ||
from OpenAttack import Classifier | ||
return AlbertClassifier(path, 3) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,20 @@ | ||
""" | ||
:type: OpenAttack.utils.AlbertClassifier | ||
:Size: 788.66MB | ||
:Package Requirements: | ||
* transformers | ||
* pytorch | ||
Pretrained ALBERT model on SST-2 dataset. See :py:data:`Dataset.SST` for detail. | ||
""" | ||
|
||
from OpenAttack.utils import make_zip_downloader, AlbertClassifier | ||
|
||
NAME = "Victim.ALBERT.SST" | ||
|
||
URL = "https://thunlp.oss-cn-qingdao.aliyuncs.com/TAADToolbox/victim/albert_sst.zip" | ||
DOWNLOAD = make_zip_downloader(URL) | ||
|
||
def LOAD(path): | ||
from OpenAttack import Classifier | ||
return AlbertClassifier(path, 2) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,20 @@ | ||
""" | ||
:type: OpenAttack.utils.BertClassifier | ||
:Size: 1.23GB | ||
:Package Requirements: | ||
* transformers | ||
* pytorch | ||
Pretrained BERT model on MNLI dataset. See :py:data:`Dataset.MNLI` for detail. | ||
""" | ||
|
||
from OpenAttack.utils import make_zip_downloader, BertClassifier | ||
|
||
NAME = "Victim.BERT.MNLI" | ||
|
||
URL = "https://thunlp.oss-cn-qingdao.aliyuncs.com/TAADToolbox/victim/bert_mnli.zip" | ||
DOWNLOAD = make_zip_downloader(URL) | ||
|
||
def LOAD(path): | ||
from OpenAttack import Classifier | ||
return BertClassifier(path, 2) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,20 @@ | ||
""" | ||
:type: OpenAttack.utils.BertClassifier | ||
:Size: 1.23GB | ||
:Package Requirements: | ||
* transformers | ||
* pytorch | ||
Pretrained BERT model on SNLI dataset. See :py:data:`Dataset.SNLI` for detail. | ||
""" | ||
|
||
from OpenAttack.utils import make_zip_downloader, BertClassifier | ||
|
||
NAME = "Victim.BERT.SNLI" | ||
|
||
URL = "https://thunlp.oss-cn-qingdao.aliyuncs.com/TAADToolbox/victim/bert_snli.zip" | ||
DOWNLOAD = make_zip_downloader(URL) | ||
|
||
def LOAD(path): | ||
from OpenAttack import Classifier | ||
return BertClassifier(path, 3) |
Oops, something went wrong.