123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212 |
- run.config:
- engine: ruby
- engine.config:
- runtime: ruby-2.5
- extra_packages:
- # basic servers:
- - nginx
- - nodejs
- # for images:
- - ImageMagick
- - jemalloc
- # for videos:
- - ffmpeg3
- # to prep the .env file:
- - gettext-tools
- # for node-gyp, used in the asset compilation process:
- - python-2
- # i18n:
- - libidn
- cache_dirs:
- - node_modules
- extra_path_dirs:
- - node_modules/.bin
- build_triggers:
- - .ruby-version
- - Gemfile
- - Gemfile.lock
- - package.json
- - yarn.lock
- extra_steps:
- - cp .env.nanobox .env
- - yarn
- fs_watch: true
- deploy.config:
- extra_steps:
- - NODE_ENV=production bundle exec rake assets:precompile
- transform:
- - "envsubst < /app/.env.nanobox > /app/.env.production"
- - |-
- if [ -z "$LOCAL_DOMAIN" ]
- then
- . /app/.env.production
- export LOCAL_DOMAIN
- fi
- erb /app/nanobox/nginx-web.conf.erb > /app/nanobox/nginx-web.conf
- erb /app/nanobox/nginx-stream.conf.erb > /app/nanobox/nginx-stream.conf
- - touch /app/log/production.log
- before_live:
- web.web:
- - bin/tootctl cache clear
- - bundle exec rake db:migrate:setup
- after_live:
- worker.sidekiq:
- - |-
- if [[ "${ES_ENABLED}" != "false" ]]
- then
- bin/tootctl search deploy
- fi
- web.web:
- start:
- nginx: nginx -c /app/nanobox/nginx-web.conf
- rails: bundle exec puma -C /app/config/puma.rb
- routes:
- - '/'
- writable_dirs:
- - tmp
- log_watch:
- rails: 'log/production.log'
- network_dirs:
- data.storage:
- - public/system
- web.stream:
- start:
- nginx: nginx -c /app/nanobox/nginx-stream.conf
- node: yarn run start
- routes:
- - '/api/v1/streaming*'
- # Somehow we're getting requests for scheme://domain//api/v1/streaming* - match those, too
- - '//api/v1/streaming*'
- writable_dirs:
- - tmp
- worker.sidekiq:
- start:
- default: bundle exec sidekiq -c 5 -q default -L /app/log/sidekiq.log
- mailers: bundle exec sidekiq -c 5 -q mailers -L /app/log/sidekiq.log
- pull: bundle exec sidekiq -c 5 -q pull -L /app/log/sidekiq.log
- push: bundle exec sidekiq -c 5 -q push -L /app/log/sidekiq.log
- writable_dirs:
- - tmp
- log_watch:
- rails: 'log/production.log'
- sidekiq: 'log/sidekiq.log'
- network_dirs:
- data.storage:
- - public/system
- data.db:
- image: nanobox/postgresql:9.6
- cron:
- - id: backup
- schedule: '0 3 * * *'
- command: |
- PGPASSWORD=${DATA_DB_PASS} pg_dump -U ${DATA_DB_USER} -w -Fc -O gonano |
- gzip |
- curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/backup-${HOSTNAME}-$(date -u +%Y-%m-%d.%H-%M-%S).sql.gz -X POST -T - >&2
- curl -k -s -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/ |
- sed 's/,/\n/g' |
- grep ${HOSTNAME} |
- sort |
- head -n-${BACKUP_COUNT:-1} |
- sed 's/.*: \?"\(.*\)".*/\1/' |
- while read file
- do
- curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/${file} -X DELETE
- done
- data.elastic:
- image: nanobox/elasticsearch:5
- cron:
- - id: backup
- schedule: '0 3 * * *'
- command: |
- id=$(cat /proc/sys/kernel/random/uuid)
- curl -X PUT -H "Content-Type: application/json" "127.0.0.1:9200/_snapshot/${id}" -d "{\"type\": \"fs\",\"settings\": {\"location\": \"/var/tmp/${id}\",\"compress\": true}}"
- curl -X PUT -H "Content-Type: application/json" "127.0.0.1:9200/_snapshot/${id}/backup?wait_for_completion=true&pretty"
- tar -cz -C "/var/tmp/${id}" . |
- curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/backup-${HOSTNAME}-$(date -u +%Y-%m-%d.%H-%M-%S).tgz -X POST -T - >&2
- curl -X DELETE -H "Content-Type: application/json" "127.0.0.1:9200/_snapshot/${id}"
- rm -rf "/var/tmp/${id}"
- curl -k -s -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/ |
- sed 's/,/\n/g' |
- grep ${HOSTNAME} |
- sort |
- head -n-${BACKUP_COUNT:-1} |
- sed 's/.*: \?"\(.*\)".*/\1/' |
- while read file
- do
- curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/${file} -X DELETE
- done
- data.redis:
- image: nanobox/redis:4.0
- cron:
- - id: backup
- schedule: '0 3 * * *'
- command: |
- curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/backup-${HOSTNAME}-$(date -u +%Y-%m-%d.%H-%M-%S).rdb -X POST -T /data/var/db/redis/dump.rdb >&2
- curl -k -s -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/ |
- sed 's/,/\n/g' |
- grep ${HOSTNAME} |
- sort |
- head -n-${BACKUP_COUNT:-1} |
- sed 's/.*: \?"\(.*\)".*/\1/' |
- while read file
- do
- curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/${file} -X DELETE
- done
- data.storage:
- image: nanobox/unfs:0.9
- cron:
- - id: backup
- schedule: '0 3 * * *'
- command: |
- tar cz -C /data/var/db/unfs/ . |
- curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/backup-${HOSTNAME}-$(date -u +%Y-%m-%d.%H-%M-%S).tgz -X POST -T - >&2
- curl -k -s -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/ |
- sed 's/,/\n/g' |
- grep ${HOSTNAME} |
- sort |
- head -n-${BACKUP_COUNT:-1} |
- sed 's/.*: \?"\(.*\)".*/\1/' |
- while read file
- do
- curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/${file} -X DELETE
- done
|