1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185
|
[GENERAL]
root.dir=/var/lib/biomaj
conf.dir=%(root.dir)s/conf
log.dir=%(root.dir)s/log
process.dir=%(root.dir)s/process
cache.dir=%(root.dir)s/cache
lock.dir=%(root.dir)s/lock
#The root directory where all databases are stored.
#If your data is not stored under one directory hirearchy
#you can override this value in the database properties file.
data.dir=%(root.dir)s//data
db.url=mongodb://localhost:27017
db.name=biomaj
use_ldap=0
ldap.host=localhost
ldap.port=389
ldap.dn=nodomain
use_elastic=0
#Comma separated list of elasticsearch nodes host1,host2:port2
elastic_nodes=localhost
elastic_index=biomaj
# Calculate data.dir size stats
data.stats=1
redis.host=localhost
redis.port=6379
redis.db=0
redis.prefix=biomaj
# options to send to downloaders (protocol dependent)
# options.names=option1,option2
# options.option1=value1
# options.option2=value2
#
# Example supported options for ftp/http
# tcp_keepalive: number of seconds for curl keep alive (maps to curl option TCP_KEEPALIVE, TCP_KEEPINTVL and TCP_KEEPIDLE)
# ssl_verifyhost: boolean, check or skips SSL host (maps to curl option SSL_VERIFYHOST)
# ssl_verifypeer: boolean, SSL checks (maps to curl option SSL_VERIFYPEER)
# ssl_server_cert: path to certificate (maps to curl option CAINFO).
# Influxdb configuration (optional)
# User and db must be manually created in influxdb before use
influxdb.host=
influxdb.port=8086
influxdb.user=root
influxdb.password=root
influxdb.db=biomaj
# Needed for download remote service (should be a load balancer to services
#micro.biomaj.proxy=http://127.0.0.1:5000
#micro.biomaj.rabbit_mq=127.0.0.1
#micro.biomaj.rabbit_mq_port=5672
#micro.biomaj.rabbit_mq_user=
#micro.biomaj.rabbit_mq_password=
#micro.biomaj.rabbit_mq_virtualhost=
#micro.biomaj.service.download=1
#micro.biomaj.service.process=1
#micro.biomaj.service.user=1
#micro.biomaj.service.daemon=1
## Optional
# micro.biomaj.proxy.[user,cron,release,daemon,download,process]=http://127.0.0.1:5000
auto_publish=1
########################
# Global properties file
#To override these settings for a specific database go to its
#properties file and uncomment or add the specific line you want
#to override.
#----------------
# Mail Configuration
#---------------
#Uncomment thes lines if you want receive mail when the workflow is finished
mail.smtp.host=
mail.admin=
mail.from=
mail.user=
mail.password=
mail.tls=true
mail.body.tail=2000000
mail.body.attach=9000000
#---------------------
#Proxy authentification
#---------------------
#proxyHost=
#proxyPort=
#proxyUser=
#proxyPassword=
#---------------------
# PROTOCOL
#-------------------
#possible values : ftp, http, rsync, local
port=21
username=anonymous
password=anonymous@nowhere.com
#access user for production directories
production.directory.chmod=775
#Number of thread during the download
bank.num.threads=4
#Number of threads to use for downloading and processing
files.num.threads=4
#Timeout for a download, in seconds (applies for each file, not for the whole downloads)
timeout.download=3600
#to keep more than one release increase this value
keep.old.version=0
#Link copy property
do.link.copy=true
#The historic log file is generated in log/
#define level information for output : DEBUG,INFO,WARN,ERR
historic.logfile.level=DEBUG
# Hint: you can use online service https://regex101.com/ to test your regexps
http.parse.dir.line=<a[\\s]+href=\"([\\S]+)/\".*alt=\"\\[DIR\\]\">.*([\\d]{2}-[\\w\\d]{2,5}-[\\d]{4}\\s[\\d]{2}:[\\d]{2})
http.parse.file.line=<a[\\s]+href=\"([\\S]+)\".*([\\d]{2}-[\\w\\d]{2,5}-[\\d]{4}\\s[\\d]{2}:[\\d]{2})[\\s]+([\\d\\.]+[MKG]{0,1})
http.group.dir.name=1
http.group.dir.date=2
http.group.file.name=1
http.group.file.date=2
http.group.file.size=3
#Needed if data sources are contains in an archive
log.files=true
local.files.excluded=\\.panfs.*
#~40mn
ftp.timeout=2000000
ftp.automatic.reconnect=5
ftp.active.mode=false
# Bank default access
visibility.default=public
# use hard links instead of copy
# use_hardlinks=0
[loggers]
keys = root, biomaj
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = DEBUG
handlers = console
[logger_biomaj]
level = DEBUG
handlers = console
qualname = biomaj
propagate=0
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = DEBUG
formatter = generic
[formatter_generic]
format = %(asctime)s %(levelname)-5.5s [%(name)s][%(threadName)s] %(message)s
|