Merge remote-tracking branch 'tootsuite/master' into glitchsoc/master

This commit is contained in:
Jenkins 2018-03-17 17:17:21 +00:00
commit 9341b982b6
8 changed files with 42 additions and 12 deletions

View file

@ -1,4 +1,4 @@
FROM ruby:2.5.0-alpine3.7
FROM ruby:2.4.3-alpine3.6
LABEL maintainer="https://github.com/tootsuite/mastodon" \
description="Your self-hosted, globally interconnected microblogging community"
@ -9,6 +9,8 @@ ARG GID=991
ENV RAILS_SERVE_STATIC_FILES=true \
RAILS_ENV=production NODE_ENV=production
ARG YARN_VERSION=1.3.2
ARG YARN_DOWNLOAD_SHA256=6cfe82e530ef0837212f13e45c1565ba53f5199eec2527b85ecbcd88bf26821d
ARG LIBICONV_VERSION=1.15
ARG LIBICONV_DOWNLOAD_SHA256=ccf536620a45458d26ba83887a983b96827001e92a13847b45e4925cc8913178
@ -30,17 +32,24 @@ RUN apk -U upgrade \
ca-certificates \
ffmpeg \
file \
git \
icu-libs \
imagemagick \
libidn \
libpq \
nodejs \
nodejs-npm \
protobuf \
tini \
tzdata \
yarn \
&& update-ca-certificates \
&& mkdir -p /tmp/src \
&& mkdir -p /tmp/src /opt \
&& wget -O yarn.tar.gz "https://github.com/yarnpkg/yarn/releases/download/v$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz" \
&& echo "$YARN_DOWNLOAD_SHA256 *yarn.tar.gz" | sha256sum -c - \
&& tar -xzf yarn.tar.gz -C /tmp/src \
&& rm yarn.tar.gz \
&& mv /tmp/src/yarn-v$YARN_VERSION /opt/yarn \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarn \
&& wget -O libiconv.tar.gz "https://ftp.gnu.org/pub/gnu/libiconv/libiconv-$LIBICONV_VERSION.tar.gz" \
&& echo "$LIBICONV_DOWNLOAD_SHA256 *libiconv.tar.gz" | sha256sum -c - \
&& tar -xzf libiconv.tar.gz -C /tmp/src \

View file

@ -1,10 +1,30 @@
import './web_push_notifications';
function fetchRoot() {
return fetch('/', { credentials: 'include' });
}
// Cause a new version of a registered Service Worker to replace an existing one
// that is already installed, and replace the currently active worker on open pages.
self.addEventListener('install', function(event) {
event.waitUntil(self.skipWaiting());
const promises = Promise.all([caches.open('mastodon-web'), fetchRoot()]);
const asyncAdd = promises.then(([cache, root]) => cache.put('/', root));
event.waitUntil(asyncAdd);
});
self.addEventListener('activate', function(event) {
event.waitUntil(self.clients.claim());
});
self.addEventListener('fetch', function(event) {
const url = new URL(event.request.url);
if (url.pathname.startsWith('/web/')) {
event.respondWith(fetchRoot().then(response => {
if (response.ok) {
return response;
}
throw null;
}).catch(() => caches.match('/')));
}
});

View file

@ -440,6 +440,7 @@
text-align: center;
padding: 60px 0;
padding-top: 55px;
margin: 0 auto;
cursor: default;
}

View file

@ -38,7 +38,7 @@ module Remotable
send("#{attachment_name}_file_name=", basename + extname)
self[attribute_name] = url if has_attribute?(attribute_name)
rescue HTTP::TimeoutError, HTTP::ConnectionError, OpenSSL::SSL::SSLError, Paperclip::Errors::NotIdentifiedByImageMagickError, Addressable::URI::InvalidURIError => e
rescue HTTP::TimeoutError, HTTP::ConnectionError, OpenSSL::SSL::SSLError, Paperclip::Errors::NotIdentifiedByImageMagickError, Addressable::URI::InvalidURIError, Mastodon::HostValidationError => e
Rails.logger.debug "Error fetching remote #{attachment_name}: #{e}"
nil
end

View file

@ -49,7 +49,7 @@ class BackupService < BaseService
end
end
archive_filename = ['archive', Time.now.utc.strftime('%Y%m%d%H%M%S'), SecureRandom.hex(2)].join('-') + '.tar.gz'
archive_filename = ['archive', Time.now.utc.strftime('%Y%m%d%H%M%S'), SecureRandom.hex(16)].join('-') + '.tar.gz'
@backup.dump = ActionDispatch::Http::UploadedFile.new(tempfile: tmp_file, filename: archive_filename)
@backup.processed = true

View file

@ -195,7 +195,7 @@ data.db:
command: |
PGPASSWORD=${DATA_DB_PASS} pg_dump -U ${DATA_DB_USER} -w -Fc -O gonano |
gzip |
curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/backup-${HOSTNAME}-$(date -u +%Y-%m-%d.%H-%M-%S).sql.gz --data-binary @- &&
curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/backup-${HOSTNAME}-$(date -u +%Y-%m-%d.%H-%M-%S).sql.gz -X POST -T - >&2
curl -k -s -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/ |
sed 's/,/\n/g' |
grep ${HOSTNAME} |
@ -215,7 +215,7 @@ data.redis:
- id: backup
schedule: '0 3 * * *'
command: |
curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/backup-${HOSTNAME}-$(date -u +%Y-%m-%d.%H-%M-%S).rdb --data-binary @/data/var/db/redis/dump.rdb &&
curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/backup-${HOSTNAME}-$(date -u +%Y-%m-%d.%H-%M-%S).rdb -X POST -T /data/var/db/redis/dump.rdb >&2
curl -k -s -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/ |
sed 's/,/\n/g' |
grep ${HOSTNAME} |
@ -236,7 +236,7 @@ data.storage:
schedule: '0 3 * * *'
command: |
tar cz -C /data/var/db/unfs/ . |
curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/backup-${HOSTNAME}-$(date -u +%Y-%m-%d.%H-%M-%S).tgz --data-binary @- &&
curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/backup-${HOSTNAME}-$(date -u +%Y-%m-%d.%H-%M-%S).tgz -X POST -T - >&2
curl -k -s -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/ |
sed 's/,/\n/g' |
grep ${HOSTNAME} |

View file

@ -13,7 +13,7 @@ module Mastodon
end
def patch
1
2
end
def pre
@ -21,7 +21,7 @@ module Mastodon
end
def flags
''
'rc2'
end
def to_a

View file

@ -472,7 +472,7 @@ namespace :mastodon do
if user.save
prompt.ok 'User created and confirmation mail sent to the user\'s email address.'
prompt.ok "Here is the random password generated for the user: #{password}"
prompt.ok "Here is the random password generated for the user: #{user.password}"
else
prompt.warn 'User was not created because of the following errors:'