File: quantization-results.sh

package info (click to toggle)
fasttext 0.9.2%2Bds-1
  • links: PTS, VCS
  • area: main
  • in suites: bookworm
  • size: 4,900 kB
  • sloc: cpp: 5,458; python: 2,425; javascript: 635; sh: 616; makefile: 102; xml: 81; perl: 43
file content (43 lines) | stat: -rw-r--r-- 1,181 bytes parent folder | download | duplicates (4)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
#!/usr/bin/env bash
#
# Copyright (c) 2016-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
#

# This script applies quantization to the models from Table 1 in:
# Bag of Tricks for Efficient Text Classification, arXiv 1607.01759, 2016

set -e

DATASET=(
  ag_news
  sogou_news
  dbpedia
  yelp_review_polarity
  yelp_review_full
  yahoo_answers
  amazon_review_full
  amazon_review_polarity
)

# These learning rates were chosen by validation on a subset of the training set.
LR=( 0.25 0.5 0.5 0.1 0.1 0.1 0.05 0.05 )

RESULTDIR=result
DATADIR=data

echo 'Warning! Make sure you run the classification-results.sh script before this one'
echo 'Otherwise you can expect the commands in this script to fail'

for i in {0..7}
do
  echo "Working on dataset ${DATASET[i]}"
  ../../fasttext quantize -input "${DATADIR}/${DATASET[i]}.train" \
    -output "${RESULTDIR}/${DATASET[i]}" -lr "${LR[i]}" \
    -thread 4 -qnorm -retrain -epoch 5 -cutoff 100000 > /dev/null
  ../../fasttext test "${RESULTDIR}/${DATASET[i]}.ftz" \
    "${DATADIR}/${DATASET[i]}.test"
done