master
Bel LaPointe 2022-08-08 16:51:06 -06:00
parent 6aacd00105
commit bbea7d8df5
4 changed files with 83 additions and 0 deletions

3
.gitignore vendored
View File

@ -1 +1,4 @@
**/*.sw*
/tabtab
/gpt2-large
/models

28
download.py Normal file
View File

@ -0,0 +1,28 @@
import os
import sys
import requests
from tqdm import tqdm
if len(sys.argv) != 2:
print('You must enter the model name as a parameter, e.g.: download_model.py 124M')
sys.exit(1)
model = sys.argv[1]
subdir = os.path.join('models', model)
if not os.path.exists(subdir):
os.makedirs(subdir)
subdir = subdir.replace('\\','/') # needed for Windows
for filename in ['checkpoint','encoder.json','hparams.json','model.ckpt.data-00000-of-00001', 'model.ckpt.index', 'model.ckpt.meta', 'vocab.bpe']:
r = requests.get("https://openaipublic.blob.core.windows.net/gpt-2/" + subdir + "/" + filename, stream=True)
with open(os.path.join(subdir, filename), 'wb') as f:
file_size = int(r.headers["content-length"])
chunk_size = 1000
with tqdm(ncols=100, desc="Fetching " + filename, total=file_size, unit_scale=True) as pbar:
# 1k for chunk_size, since Ethernet packet size is around 1500 bytes
for chunk in r.iter_content(chunk_size=chunk_size):
f.write(chunk)
pbar.update(chunk_size)

47
run.sh Normal file
View File

@ -0,0 +1,47 @@
#! /bin/bash
set -e
set -o pipefail
if ! grep tabtab .gitignore; then
echo >> .gitignore
echo tabtab >> .gitignore
fi
image_ui=bel/tabtab:ui-$(date +%Y%m%d)
if ! docker images | grep ${image_ui##*:}; then
if ! [ -d ./tabtab ]; then
git clone https://github.com/ainize-team/tabtab.git
fi
pushd tabtab
cp ../tabtab.requirements.txt ./requirements.txt
docker build -t $image_ui .
popd
fi
if ! [ -d ./models ]; then
mkdir ./models
python3 -c 'from tqdm import tqdm' &> /dev/null || pip3 install tqdm
python3 ./download.py 124M
fi
image_server=${image_ui//:ui/:server}
if ! docker images | grep ${image_server##*:}; then
if ! [ -d ./gpt2-large ]; then
git clone https://github.com/Henriquepheak/gpt2-large.git
fi
pushd ./gpt2-large
docker build -t $image_server .
popd
fi
cleanup() {
docker rm -f $(docker ps -aq)
}
trap cleanup EXIT
localhost=$(ifconfig | grep -o '192.168[^ ]*' | head -n 1)
docker run -p 12313:80 --rm -d --name tabtab-server $image_server
docker run -p 12314:80 --rm -d --name tabtab-ui -e GPT2_SERVER_URL=http://$localhost:12313 $image_ui
docker logs -f $image_ui

5
tabtab.requirements.txt Normal file
View File

@ -0,0 +1,5 @@
Flask==1.1.4
requests==2.24.0
transformers
#Jinja2==3.0.3
markupsafe==2.0.1