2017-06-26 03:49:39 +01:00
const os = require ( 'os' ) ;
const throng = require ( 'throng' ) ;
const dotenv = require ( 'dotenv' ) ;
const express = require ( 'express' ) ;
const http = require ( 'http' ) ;
const redis = require ( 'redis' ) ;
const pg = require ( 'pg' ) ;
const log = require ( 'npmlog' ) ;
const url = require ( 'url' ) ;
const WebSocket = require ( 'uws' ) ;
const uuid = require ( 'uuid' ) ;
2017-05-20 16:31:47 +01:00
const env = process . env . NODE _ENV || 'development' ;
2017-02-02 15:11:36 +00:00
dotenv . config ( {
2017-05-20 16:31:47 +01:00
path : env === 'production' ? '.env.production' : '.env' ,
} ) ;
2017-02-02 00:31:09 +00:00
2017-05-28 15:25:26 +01:00
log . level = process . env . LOG _LEVEL || 'verbose' ;
2017-05-03 22:18:13 +01:00
const dbUrlToConfig = ( dbUrl ) => {
if ( ! dbUrl ) {
2017-05-20 16:31:47 +01:00
return { } ;
2017-05-03 22:18:13 +01:00
}
2017-05-20 16:31:47 +01:00
const params = url . parse ( dbUrl ) ;
const config = { } ;
2017-05-04 14:53:44 +01:00
if ( params . auth ) {
2017-05-20 16:31:47 +01:00
[ config . user , config . password ] = params . auth . split ( ':' ) ;
2017-05-04 14:53:44 +01:00
}
if ( params . hostname ) {
2017-05-20 16:31:47 +01:00
config . host = params . hostname ;
2017-05-04 14:53:44 +01:00
}
if ( params . port ) {
2017-05-20 16:31:47 +01:00
config . port = params . port ;
2017-05-03 22:18:13 +01:00
}
2017-05-04 14:53:44 +01:00
if ( params . pathname ) {
2017-05-20 16:31:47 +01:00
config . database = params . pathname . split ( '/' ) [ 1 ] ;
2017-05-04 14:53:44 +01:00
}
2017-05-20 16:31:47 +01:00
const ssl = params . query && params . query . ssl ;
2017-05-20 20:06:09 +01:00
2017-05-04 14:53:44 +01:00
if ( ssl ) {
2017-05-20 16:31:47 +01:00
config . ssl = ssl === 'true' || ssl === '1' ;
2017-05-04 14:53:44 +01:00
}
2017-05-20 16:31:47 +01:00
return config ;
} ;
2017-05-03 22:18:13 +01:00
2017-05-20 20:06:09 +01:00
const redisUrlToClient = ( defaultConfig , redisUrl ) => {
const config = defaultConfig ;
if ( ! redisUrl ) {
return redis . createClient ( config ) ;
}
if ( redisUrl . startsWith ( 'unix://' ) ) {
return redis . createClient ( redisUrl . slice ( 7 ) , config ) ;
}
return redis . createClient ( Object . assign ( config , {
url : redisUrl ,
} ) ) ;
} ;
2017-05-28 15:25:26 +01:00
const numWorkers = + process . env . STREAMING _CLUSTER _NUM || ( env === 'development' ? 1 : Math . max ( os . cpus ( ) . length - 1 , 1 ) ) ;
2017-05-03 22:18:13 +01:00
2017-05-28 15:25:26 +01:00
const startMaster = ( ) => {
log . info ( ` Starting streaming API server master with ${ numWorkers } workers ` ) ;
} ;
2017-05-03 22:18:13 +01:00
2017-05-28 15:25:26 +01:00
const startWorker = ( workerId ) => {
log . info ( ` Starting worker ${ workerId } ` ) ;
2017-04-17 03:32:30 +01:00
const pgConfigs = {
development : {
2017-06-25 17:13:31 +01:00
user : process . env . DB _USER || pg . defaults . user ,
password : process . env . DB _PASS || pg . defaults . password ,
2017-10-17 10:45:37 +01:00
database : process . env . DB _NAME || 'mastodon_development' ,
2017-06-25 17:13:31 +01:00
host : process . env . DB _HOST || pg . defaults . host ,
port : process . env . DB _PORT || pg . defaults . port ,
2017-05-20 16:31:47 +01:00
max : 10 ,
2017-04-17 03:32:30 +01:00
} ,
production : {
user : process . env . DB _USER || 'mastodon' ,
password : process . env . DB _PASS || '' ,
database : process . env . DB _NAME || 'mastodon_production' ,
host : process . env . DB _HOST || 'localhost' ,
port : process . env . DB _PORT || 5432 ,
2017-05-20 16:31:47 +01:00
max : 10 ,
} ,
} ;
2017-02-02 00:31:09 +00:00
2017-05-20 16:31:47 +01:00
const app = express ( ) ;
2017-12-12 14:13:24 +00:00
app . set ( 'trusted proxy' , process . env . TRUSTED _PROXY _IP || 'loopback,uniquelocal' ) ;
2017-05-20 16:31:47 +01:00
const pgPool = new pg . Pool ( Object . assign ( pgConfigs [ env ] , dbUrlToConfig ( process . env . DATABASE _URL ) ) ) ;
const server = http . createServer ( app ) ;
const redisNamespace = process . env . REDIS _NAMESPACE || null ;
2017-02-07 13:37:12 +00:00
2017-05-07 18:42:32 +01:00
const redisParams = {
2017-04-17 03:32:30 +01:00
host : process . env . REDIS _HOST || '127.0.0.1' ,
port : process . env . REDIS _PORT || 6379 ,
2017-05-17 14:36:34 +01:00
db : process . env . REDIS _DB || 0 ,
2017-05-03 22:18:13 +01:00
password : process . env . REDIS _PASSWORD ,
2017-05-20 16:31:47 +01:00
} ;
2017-05-07 18:42:32 +01:00
if ( redisNamespace ) {
2017-05-20 16:31:47 +01:00
redisParams . namespace = redisNamespace ;
2017-05-07 18:42:32 +01:00
}
2017-05-20 20:06:09 +01:00
2017-05-20 16:31:47 +01:00
const redisPrefix = redisNamespace ? ` ${ redisNamespace } : ` : '' ;
2017-05-07 18:42:32 +01:00
2017-06-03 19:50:53 +01:00
const redisSubscribeClient = redisUrlToClient ( redisParams , process . env . REDIS _URL ) ;
2017-05-20 20:06:09 +01:00
const redisClient = redisUrlToClient ( redisParams , process . env . REDIS _URL ) ;
2017-02-07 13:37:12 +00:00
2017-05-20 16:31:47 +01:00
const subs = { } ;
2017-02-07 13:37:12 +00:00
2017-06-20 19:41:41 +01:00
redisSubscribeClient . on ( 'message' , ( channel , message ) => {
2017-05-20 16:31:47 +01:00
const callbacks = subs [ channel ] ;
2017-02-07 13:37:12 +00:00
2017-05-20 16:31:47 +01:00
log . silly ( ` New message on channel ${ channel } ` ) ;
2017-02-07 13:37:12 +00:00
2017-04-17 03:32:30 +01:00
if ( ! callbacks ) {
2017-05-20 16:31:47 +01:00
return ;
2017-04-17 03:32:30 +01:00
}
2017-05-28 15:25:26 +01:00
2017-05-20 16:31:47 +01:00
callbacks . forEach ( callback => callback ( message ) ) ;
} ) ;
2017-02-07 13:37:12 +00:00
2017-06-03 19:50:53 +01:00
const subscriptionHeartbeat = ( channel ) => {
const interval = 6 * 60 ;
const tellSubscribed = ( ) => {
redisClient . set ( ` ${ redisPrefix } subscribed: ${ channel } ` , '1' , 'EX' , interval * 3 ) ;
} ;
tellSubscribed ( ) ;
const heartbeat = setInterval ( tellSubscribed , interval * 1000 ) ;
return ( ) => {
clearInterval ( heartbeat ) ;
} ;
} ;
2017-02-07 13:37:12 +00:00
2017-04-17 03:32:30 +01:00
const subscribe = ( channel , callback ) => {
2017-05-20 16:31:47 +01:00
log . silly ( ` Adding listener for ${ channel } ` ) ;
subs [ channel ] = subs [ channel ] || [ ] ;
2017-06-20 19:41:41 +01:00
if ( subs [ channel ] . length === 0 ) {
log . verbose ( ` Subscribe ${ channel } ` ) ;
redisSubscribeClient . subscribe ( channel ) ;
}
2017-05-20 16:31:47 +01:00
subs [ channel ] . push ( callback ) ;
} ;
2017-02-03 17:27:42 +00:00
2017-04-17 03:32:30 +01:00
const unsubscribe = ( channel , callback ) => {
2017-05-20 16:31:47 +01:00
log . silly ( ` Removing listener for ${ channel } ` ) ;
subs [ channel ] = subs [ channel ] . filter ( item => item !== callback ) ;
2017-06-20 19:41:41 +01:00
if ( subs [ channel ] . length === 0 ) {
log . verbose ( ` Unsubscribe ${ channel } ` ) ;
redisSubscribeClient . unsubscribe ( channel ) ;
}
2017-05-20 16:31:47 +01:00
} ;
2017-02-03 17:27:42 +00:00
2017-04-17 03:32:30 +01:00
const allowCrossDomain = ( req , res , next ) => {
2017-05-20 16:31:47 +01:00
res . header ( 'Access-Control-Allow-Origin' , '*' ) ;
res . header ( 'Access-Control-Allow-Headers' , 'Authorization, Accept, Cache-Control' ) ;
res . header ( 'Access-Control-Allow-Methods' , 'GET, OPTIONS' ) ;
2017-02-05 22:37:25 +00:00
2017-05-20 16:31:47 +01:00
next ( ) ;
} ;
2017-02-05 22:37:25 +00:00
2017-04-17 03:32:30 +01:00
const setRequestId = ( req , res , next ) => {
2017-05-20 16:31:47 +01:00
req . requestId = uuid . v4 ( ) ;
res . header ( 'X-Request-Id' , req . requestId ) ;
2017-02-02 00:31:09 +00:00
2017-05-20 16:31:47 +01:00
next ( ) ;
} ;
2017-02-02 00:31:09 +00:00
2017-12-12 14:13:24 +00:00
const setRemoteAddress = ( req , res , next ) => {
req . remoteAddress = req . connection . remoteAddress ;
next ( ) ;
} ;
2017-04-17 03:32:30 +01:00
const accountFromToken = ( token , req , next ) => {
pgPool . connect ( ( err , client , done ) => {
2017-02-02 00:31:09 +00:00
if ( err ) {
2017-05-20 16:31:47 +01:00
next ( err ) ;
return ;
2017-02-02 00:31:09 +00:00
}
2017-05-27 22:27:54 +01:00
client . query ( 'SELECT oauth_access_tokens.resource_owner_id, users.account_id, users.filtered_languages FROM oauth_access_tokens INNER JOIN users ON oauth_access_tokens.resource_owner_id = users.id WHERE oauth_access_tokens.token = $1 AND oauth_access_tokens.revoked_at IS NULL LIMIT 1' , [ token ] , ( err , result ) => {
2017-05-20 16:31:47 +01:00
done ( ) ;
2017-02-02 00:31:09 +00:00
2017-04-17 03:32:30 +01:00
if ( err ) {
2017-05-20 16:31:47 +01:00
next ( err ) ;
return ;
2017-04-17 03:32:30 +01:00
}
2017-02-02 00:31:09 +00:00
2017-04-17 03:32:30 +01:00
if ( result . rows . length === 0 ) {
2017-05-20 16:31:47 +01:00
err = new Error ( 'Invalid access token' ) ;
err . statusCode = 401 ;
2017-02-03 23:34:31 +00:00
2017-05-20 16:31:47 +01:00
next ( err ) ;
return ;
2017-04-17 03:32:30 +01:00
}
2017-02-03 23:34:31 +00:00
2017-05-20 16:31:47 +01:00
req . accountId = result . rows [ 0 ] . account _id ;
2017-05-26 23:53:48 +01:00
req . filteredLanguages = result . rows [ 0 ] . filtered _languages ;
2017-02-03 23:34:31 +00:00
2017-05-20 16:31:47 +01:00
next ( ) ;
} ) ;
} ) ;
} ;
2017-02-03 23:34:31 +00:00
2017-12-12 14:13:24 +00:00
const accountFromRequest = ( req , next , required = true ) => {
2017-05-29 17:20:53 +01:00
const authorization = req . headers . authorization ;
const location = url . parse ( req . url , true ) ;
const accessToken = location . query . access _token ;
2017-02-03 23:34:31 +00:00
2017-05-21 20:13:11 +01:00
if ( ! authorization && ! accessToken ) {
2017-12-12 14:13:24 +00:00
if ( required ) {
const err = new Error ( 'Missing access token' ) ;
err . statusCode = 401 ;
2017-02-02 00:31:09 +00:00
2017-12-12 14:13:24 +00:00
next ( err ) ;
return ;
} else {
next ( ) ;
return ;
}
2017-04-17 03:32:30 +01:00
}
2017-02-02 16:10:59 +00:00
2017-05-21 20:13:11 +01:00
const token = authorization ? authorization . replace ( /^Bearer / , '' ) : accessToken ;
2017-02-02 12:56:14 +00:00
2017-05-20 16:31:47 +01:00
accountFromToken ( token , req , next ) ;
} ;
2017-02-05 02:19:04 +00:00
2017-12-12 14:13:24 +00:00
const PUBLIC _STREAMS = [
'public' ,
'public:local' ,
'hashtag' ,
'hashtag:local' ,
] ;
2017-05-29 17:20:53 +01:00
const wsVerifyClient = ( info , cb ) => {
2017-12-12 14:13:24 +00:00
const location = url . parse ( info . req . url , true ) ;
const authRequired = ! PUBLIC _STREAMS . some ( stream => stream === location . query . stream ) ;
2017-05-29 17:20:53 +01:00
accountFromRequest ( info . req , err => {
if ( ! err ) {
cb ( true , undefined , undefined ) ;
} else {
log . error ( info . req . requestId , err . toString ( ) ) ;
cb ( false , 401 , 'Unauthorized' ) ;
}
2017-12-12 14:13:24 +00:00
} , authRequired ) ;
2017-05-29 17:20:53 +01:00
} ;
2017-12-12 14:13:24 +00:00
const PUBLIC _ENDPOINTS = [
'/api/v1/streaming/public' ,
'/api/v1/streaming/public/local' ,
'/api/v1/streaming/hashtag' ,
'/api/v1/streaming/hashtag/local' ,
] ;
2017-05-29 17:20:53 +01:00
const authenticationMiddleware = ( req , res , next ) => {
if ( req . method === 'OPTIONS' ) {
next ( ) ;
return ;
}
2017-12-12 14:13:24 +00:00
const authRequired = ! PUBLIC _ENDPOINTS . some ( endpoint => endpoint === req . path ) ;
accountFromRequest ( req , next , authRequired ) ;
2017-05-29 17:20:53 +01:00
} ;
2017-06-26 00:46:15 +01:00
const errorMiddleware = ( err , req , res , { } ) => {
2017-05-28 15:25:26 +01:00
log . error ( req . requestId , err . toString ( ) ) ;
2017-05-20 16:31:47 +01:00
res . writeHead ( err . statusCode || 500 , { 'Content-Type' : 'application/json' } ) ;
2017-05-28 15:25:26 +01:00
res . end ( JSON . stringify ( { error : err . statusCode ? err . toString ( ) : 'An unexpected error occurred' } ) ) ;
2017-05-20 16:31:47 +01:00
} ;
2017-02-05 02:19:04 +00:00
2017-04-17 03:32:30 +01:00
const placeholders = ( arr , shift = 0 ) => arr . map ( ( _ , i ) => ` $ ${ i + 1 + shift } ` ) . join ( ', ' ) ;
2017-02-02 00:31:09 +00:00
2017-11-17 23:16:48 +00:00
const authorizeListAccess = ( id , req , next ) => {
pgPool . connect ( ( err , client , done ) => {
if ( err ) {
next ( false ) ;
return ;
}
client . query ( 'SELECT id, account_id FROM lists WHERE id = $1 LIMIT 1' , [ id ] , ( err , result ) => {
done ( ) ;
if ( err || result . rows . length === 0 || result . rows [ 0 ] . account _id !== req . accountId ) {
next ( false ) ;
return ;
}
next ( true ) ;
} ) ;
} ) ;
} ;
2017-06-03 19:50:53 +01:00
const streamFrom = ( id , req , output , attachCloseHandler , needsFiltering = false , notificationOnly = false ) => {
2017-12-12 14:13:24 +00:00
const accountId = req . accountId || req . remoteAddress ;
2017-06-03 19:50:53 +01:00
const streamType = notificationOnly ? ' (notification)' : '' ;
2017-12-12 14:13:24 +00:00
log . verbose ( req . requestId , ` Starting stream from ${ id } for ${ accountId } ${ streamType } ` ) ;
2017-04-17 03:32:30 +01:00
const listener = message => {
2017-05-20 16:31:47 +01:00
const { event , payload , queued _at } = JSON . parse ( message ) ;
2017-02-02 12:56:14 +00:00
2017-04-17 03:32:30 +01:00
const transmit = ( ) => {
2017-07-07 15:56:52 +01:00
const now = new Date ( ) . getTime ( ) ;
const delta = now - queued _at ;
2017-09-24 14:31:03 +01:00
const encodedPayload = typeof payload === 'object' ? JSON . stringify ( payload ) : payload ;
2017-02-02 12:56:14 +00:00
2017-12-12 14:13:24 +00:00
log . silly ( req . requestId , ` Transmitting for ${ accountId } : ${ event } ${ encodedPayload } Delay: ${ delta } ms ` ) ;
2017-07-07 15:56:52 +01:00
output ( event , encodedPayload ) ;
2017-05-20 16:31:47 +01:00
} ;
2017-02-02 12:56:14 +00:00
2017-06-03 19:50:53 +01:00
if ( notificationOnly && event !== 'notification' ) {
return ;
}
2017-04-17 03:32:30 +01:00
// Only messages that may require filtering are statuses, since notifications
// are already personalized and deletes do not matter
if ( needsFiltering && event === 'update' ) {
pgPool . connect ( ( err , client , done ) => {
2017-02-02 12:56:14 +00:00
if ( err ) {
2017-05-20 16:31:47 +01:00
log . error ( err ) ;
return ;
2017-02-02 12:56:14 +00:00
}
2017-07-07 15:56:52 +01:00
const unpackedPayload = payload ;
2017-05-29 17:01:08 +01:00
const targetAccountIds = [ unpackedPayload . account . id ] . concat ( unpackedPayload . mentions . map ( item => item . id ) ) ;
2017-05-20 16:31:47 +01:00
const accountDomain = unpackedPayload . account . acct . split ( '@' ) [ 1 ] ;
2017-04-17 03:32:30 +01:00
2017-06-09 18:46:33 +01:00
if ( Array . isArray ( req . filteredLanguages ) && req . filteredLanguages . indexOf ( unpackedPayload . language ) !== - 1 ) {
2017-05-26 23:53:48 +01:00
log . silly ( req . requestId , ` Message ${ unpackedPayload . id } filtered by language ( ${ unpackedPayload . language } ) ` ) ;
2017-05-28 15:25:26 +01:00
done ( ) ;
2017-05-26 23:53:48 +01:00
return ;
}
2017-12-13 12:42:16 +00:00
if ( req . accountId ) {
2017-12-12 14:13:24 +00:00
const queries = [
client . query ( ` SELECT 1 FROM blocks WHERE (account_id = $ 1 AND target_account_id IN ( ${ placeholders ( targetAccountIds , 2 ) } )) OR (account_id = $ 2 AND target_account_id = $ 1) UNION SELECT 1 FROM mutes WHERE account_id = $ 1 AND target_account_id IN ( ${ placeholders ( targetAccountIds , 2 ) } ) ` , [ req . accountId , unpackedPayload . account . id ] . concat ( targetAccountIds ) ) ,
] ;
2017-04-17 03:32:30 +01:00
2017-12-12 14:13:24 +00:00
if ( accountDomain ) {
queries . push ( client . query ( 'SELECT 1 FROM account_domain_blocks WHERE account_id = $1 AND domain = $2' , [ req . accountId , accountDomain ] ) ) ;
}
2017-02-02 12:56:14 +00:00
2017-12-12 14:13:24 +00:00
Promise . all ( queries ) . then ( values => {
done ( ) ;
Account domain blocks (#2381)
* Add <ostatus:conversation /> tag to Atom input/output
Only uses ref attribute (not href) because href would be
the alternate link that's always included also.
Creates new conversation for every non-reply status. Carries
over conversation for every reply. Keeps remote URIs verbatim,
generates local URIs on the fly like the rest of them.
* Conversation muting - prevents notifications that reference a conversation
(including replies, favourites, reblogs) from being created. API endpoints
/api/v1/statuses/:id/mute and /api/v1/statuses/:id/unmute
Currently no way to tell when a status/conversation is muted, so the web UI
only has a "disable notifications" button, doesn't work as a toggle
* Display "Dismiss notifications" on all statuses in notifications column, not just own
* Add "muted" as a boolean attribute on statuses JSON
For now always false on contained reblogs, since it's only relevant for
statuses returned from the notifications endpoint, which are not nested
Remove "Disable notifications" from detailed status view, since it's
only relevant in the notifications column
* Up max class length
* Remove pending test for conversation mute
* Add tests, clean up
* Rename to "mute conversation" and "unmute conversation"
* Raise validation error when trying to mute/unmute status without conversation
* Adding account domain blocks that filter notifications and public timelines
* Add tests for domain blocks in notifications, public timelines
Filter reblogs of blocked domains from home
* Add API for listing and creating account domain blocks
* API for creating/deleting domain blocks, tests for Status#ancestors
and Status#descendants, filter domain blocks from them
* Filter domains in streaming API
* Update account_domain_block_spec.rb
2017-05-19 00:14:30 +01:00
2017-12-12 14:13:24 +00:00
if ( values [ 0 ] . rows . length > 0 || ( values . length > 1 && values [ 1 ] . rows . length > 0 ) ) {
return ;
}
2017-04-17 03:32:30 +01:00
2017-12-12 14:13:24 +00:00
transmit ( ) ;
} ) . catch ( err => {
done ( ) ;
log . error ( err ) ;
} ) ;
} else {
2017-05-28 15:25:26 +01:00
done ( ) ;
2017-05-20 16:31:47 +01:00
transmit ( ) ;
2017-12-12 14:13:24 +00:00
}
2017-05-20 16:31:47 +01:00
} ) ;
2017-04-17 03:32:30 +01:00
} else {
2017-05-20 16:31:47 +01:00
transmit ( ) ;
2017-04-17 03:32:30 +01:00
}
2017-05-20 16:31:47 +01:00
} ;
2017-04-17 03:32:30 +01:00
2017-05-20 16:31:47 +01:00
subscribe ( ` ${ redisPrefix } ${ id } ` , listener ) ;
attachCloseHandler ( ` ${ redisPrefix } ${ id } ` , listener ) ;
} ;
2017-02-02 00:31:09 +00:00
2017-04-17 03:32:30 +01:00
// Setup stream output to HTTP
const streamToHttp = ( req , res ) => {
2017-12-12 14:13:24 +00:00
const accountId = req . accountId || req . remoteAddress ;
2017-05-20 16:31:47 +01:00
res . setHeader ( 'Content-Type' , 'text/event-stream' ) ;
res . setHeader ( 'Transfer-Encoding' , 'chunked' ) ;
2017-02-03 23:34:31 +00:00
2017-05-20 16:31:47 +01:00
const heartbeat = setInterval ( ( ) => res . write ( ':thump\n' ) , 15000 ) ;
2017-02-03 23:34:31 +00:00
2017-04-17 03:32:30 +01:00
req . on ( 'close' , ( ) => {
2017-12-12 14:13:24 +00:00
log . verbose ( req . requestId , ` Ending stream for ${ accountId } ` ) ;
2017-05-20 16:31:47 +01:00
clearInterval ( heartbeat ) ;
} ) ;
2017-02-02 14:20:31 +00:00
2017-04-17 03:32:30 +01:00
return ( event , payload ) => {
2017-05-20 16:31:47 +01:00
res . write ( ` event: ${ event } \n ` ) ;
res . write ( ` data: ${ payload } \n \n ` ) ;
} ;
} ;
2017-02-02 00:31:09 +00:00
2017-04-17 03:32:30 +01:00
// Setup stream end for HTTP
2017-06-03 19:50:53 +01:00
const streamHttpEnd = ( req , closeHandler = false ) => ( id , listener ) => {
2017-04-17 03:32:30 +01:00
req . on ( 'close' , ( ) => {
2017-05-20 16:31:47 +01:00
unsubscribe ( id , listener ) ;
2017-06-03 19:50:53 +01:00
if ( closeHandler ) {
closeHandler ( ) ;
}
2017-05-20 16:31:47 +01:00
} ) ;
} ;
2017-02-03 23:34:31 +00:00
2017-04-17 03:32:30 +01:00
// Setup stream output to WebSockets
2017-05-28 15:25:26 +01:00
const streamToWs = ( req , ws ) => ( event , payload ) => {
if ( ws . readyState !== ws . OPEN ) {
log . error ( req . requestId , 'Tried writing to closed socket' ) ;
return ;
}
2017-02-03 23:34:31 +00:00
2017-05-28 15:25:26 +01:00
ws . send ( JSON . stringify ( { event , payload } ) ) ;
2017-05-20 16:31:47 +01:00
} ;
2017-02-02 00:31:09 +00:00
2017-04-17 03:32:30 +01:00
// Setup stream end for WebSockets
2017-06-03 19:50:53 +01:00
const streamWsEnd = ( req , ws , closeHandler = false ) => ( id , listener ) => {
2017-12-12 14:13:24 +00:00
const accountId = req . accountId || req . remoteAddress ;
2017-04-17 03:32:30 +01:00
ws . on ( 'close' , ( ) => {
2017-12-12 14:13:24 +00:00
log . verbose ( req . requestId , ` Ending stream for ${ accountId } ` ) ;
2017-05-20 16:31:47 +01:00
unsubscribe ( id , listener ) ;
2017-06-03 19:50:53 +01:00
if ( closeHandler ) {
closeHandler ( ) ;
}
2017-05-20 16:31:47 +01:00
} ) ;
2017-04-02 20:27:14 +01:00
2017-06-23 15:05:04 +01:00
ws . on ( 'error' , ( ) => {
2017-12-12 14:13:24 +00:00
log . verbose ( req . requestId , ` Ending stream for ${ accountId } ` ) ;
2017-05-20 16:31:47 +01:00
unsubscribe ( id , listener ) ;
2017-06-03 19:50:53 +01:00
if ( closeHandler ) {
closeHandler ( ) ;
}
2017-05-20 16:31:47 +01:00
} ) ;
} ;
2017-02-03 23:34:31 +00:00
2017-05-20 16:31:47 +01:00
app . use ( setRequestId ) ;
2017-12-12 14:13:24 +00:00
app . use ( setRemoteAddress ) ;
2017-05-20 16:31:47 +01:00
app . use ( allowCrossDomain ) ;
app . use ( authenticationMiddleware ) ;
app . use ( errorMiddleware ) ;
2017-02-02 00:31:09 +00:00
2017-04-17 03:32:30 +01:00
app . get ( '/api/v1/streaming/user' , ( req , res ) => {
2017-06-03 19:50:53 +01:00
const channel = ` timeline: ${ req . accountId } ` ;
streamFrom ( channel , req , streamToHttp ( req , res ) , streamHttpEnd ( req , subscriptionHeartbeat ( channel ) ) ) ;
} ) ;
app . get ( '/api/v1/streaming/user/notification' , ( req , res ) => {
streamFrom ( ` timeline: ${ req . accountId } ` , req , streamToHttp ( req , res ) , streamHttpEnd ( req ) , false , true ) ;
2017-05-20 16:31:47 +01:00
} ) ;
2017-02-03 23:34:31 +00:00
2017-04-17 03:32:30 +01:00
app . get ( '/api/v1/streaming/public' , ( req , res ) => {
2017-05-20 16:31:47 +01:00
streamFrom ( 'timeline:public' , req , streamToHttp ( req , res ) , streamHttpEnd ( req ) , true ) ;
} ) ;
2017-02-03 23:34:31 +00:00
2017-04-17 03:32:30 +01:00
app . get ( '/api/v1/streaming/public/local' , ( req , res ) => {
2017-05-20 16:31:47 +01:00
streamFrom ( 'timeline:public:local' , req , streamToHttp ( req , res ) , streamHttpEnd ( req ) , true ) ;
} ) ;
2017-02-06 22:46:14 +00:00
2017-10-16 05:02:39 +01:00
app . get ( '/api/v1/streaming/direct' , ( req , res ) => {
streamFrom ( ` timeline:direct: ${ req . accountId } ` , req , streamToHttp ( req , res ) , streamHttpEnd ( req ) , true ) ;
} ) ;
2017-04-17 03:32:30 +01:00
app . get ( '/api/v1/streaming/hashtag' , ( req , res ) => {
2017-09-04 11:52:06 +01:00
streamFrom ( ` timeline:hashtag: ${ req . query . tag . toLowerCase ( ) } ` , req , streamToHttp ( req , res ) , streamHttpEnd ( req ) , true ) ;
2017-05-20 16:31:47 +01:00
} ) ;
2017-02-02 12:56:14 +00:00
2017-04-17 03:32:30 +01:00
app . get ( '/api/v1/streaming/hashtag/local' , ( req , res ) => {
2017-09-04 11:52:06 +01:00
streamFrom ( ` timeline:hashtag: ${ req . query . tag . toLowerCase ( ) } :local ` , req , streamToHttp ( req , res ) , streamHttpEnd ( req ) , true ) ;
2017-05-20 16:31:47 +01:00
} ) ;
2017-02-06 22:46:14 +00:00
2017-11-17 23:16:48 +00:00
app . get ( '/api/v1/streaming/list' , ( req , res ) => {
const listId = req . query . list ;
authorizeListAccess ( listId , req , authorized => {
if ( ! authorized ) {
res . writeHead ( 404 , { 'Content-Type' : 'application/json' } ) ;
res . end ( JSON . stringify ( { error : 'Not found' } ) ) ;
return ;
}
const channel = ` timeline:list: ${ listId } ` ;
streamFrom ( channel , req , streamToHttp ( req , res ) , streamHttpEnd ( req , subscriptionHeartbeat ( channel ) ) ) ;
} ) ;
} ) ;
const wss = new WebSocket . Server ( { server , verifyClient : wsVerifyClient } ) ;
2017-05-29 17:20:53 +01:00
2017-04-17 03:32:30 +01:00
wss . on ( 'connection' , ws => {
2017-05-29 17:20:53 +01:00
const req = ws . upgradeReq ;
const location = url . parse ( req . url , true ) ;
req . requestId = uuid . v4 ( ) ;
2017-12-12 14:13:24 +00:00
req . remoteAddress = ws . _socket . remoteAddress ;
2017-02-02 00:31:09 +00:00
2017-05-28 15:25:26 +01:00
ws . isAlive = true ;
ws . on ( 'pong' , ( ) => {
ws . isAlive = true ;
} ) ;
2017-05-29 17:20:53 +01:00
switch ( location . query . stream ) {
case 'user' :
2017-06-03 19:50:53 +01:00
const channel = ` timeline: ${ req . accountId } ` ;
streamFrom ( channel , req , streamToWs ( req , ws ) , streamWsEnd ( req , ws , subscriptionHeartbeat ( channel ) ) ) ;
break ;
case 'user:notification' :
streamFrom ( ` timeline: ${ req . accountId } ` , req , streamToWs ( req , ws ) , streamWsEnd ( req , ws ) , false , true ) ;
2017-05-29 17:20:53 +01:00
break ;
case 'public' :
streamFrom ( 'timeline:public' , req , streamToWs ( req , ws ) , streamWsEnd ( req , ws ) , true ) ;
break ;
case 'public:local' :
streamFrom ( 'timeline:public:local' , req , streamToWs ( req , ws ) , streamWsEnd ( req , ws ) , true ) ;
break ;
2017-10-16 05:02:39 +01:00
case 'direct' :
streamFrom ( ` timeline:direct: ${ req . accountId } ` , req , streamToWs ( req , ws ) , streamWsEnd ( req , ws ) , true ) ;
break ;
2017-05-29 17:20:53 +01:00
case 'hashtag' :
2017-09-04 11:52:06 +01:00
streamFrom ( ` timeline:hashtag: ${ location . query . tag . toLowerCase ( ) } ` , req , streamToWs ( req , ws ) , streamWsEnd ( req , ws ) , true ) ;
2017-05-29 17:20:53 +01:00
break ;
case 'hashtag:local' :
2017-09-04 11:52:06 +01:00
streamFrom ( ` timeline:hashtag: ${ location . query . tag . toLowerCase ( ) } :local ` , req , streamToWs ( req , ws ) , streamWsEnd ( req , ws ) , true ) ;
2017-05-29 17:20:53 +01:00
break ;
2017-11-17 23:16:48 +00:00
case 'list' :
const listId = location . query . list ;
authorizeListAccess ( listId , req , authorized => {
if ( ! authorized ) {
ws . close ( ) ;
return ;
}
const channel = ` timeline:list: ${ listId } ` ;
streamFrom ( channel , req , streamToWs ( req , ws ) , streamWsEnd ( req , ws , subscriptionHeartbeat ( channel ) ) ) ;
} ) ;
break ;
2017-05-29 17:20:53 +01:00
default :
ws . close ( ) ;
}
2017-05-20 16:31:47 +01:00
} ) ;
2017-02-03 23:34:31 +00:00
2017-06-23 15:05:04 +01:00
setInterval ( ( ) => {
2017-05-28 15:25:26 +01:00
wss . clients . forEach ( ws => {
if ( ws . isAlive === false ) {
ws . terminate ( ) ;
return ;
}
ws . isAlive = false ;
ws . ping ( '' , false , true ) ;
} ) ;
} , 30000 ) ;
2017-11-18 03:44:19 +00:00
server . listen ( process . env . PORT || 4000 , process . env . BIND || '0.0.0.0' , ( ) => {
2017-05-28 15:25:26 +01:00
log . info ( ` Worker ${ workerId } now listening on ${ server . address ( ) . address } : ${ server . address ( ) . port } ` ) ;
2017-05-20 16:31:47 +01:00
} ) ;
2017-04-21 18:24:31 +01:00
2017-05-28 15:25:26 +01:00
const onExit = ( ) => {
log . info ( ` Worker ${ workerId } exiting, bye bye ` ) ;
2017-05-20 16:31:47 +01:00
server . close ( ) ;
2017-07-07 19:01:00 +01:00
process . exit ( 0 ) ;
2017-05-28 15:25:26 +01:00
} ;
const onError = ( err ) => {
log . error ( err ) ;
2017-12-12 19:19:33 +00:00
server . close ( ) ;
process . exit ( 0 ) ;
2017-05-28 15:25:26 +01:00
} ;
process . on ( 'SIGINT' , onExit ) ;
process . on ( 'SIGTERM' , onExit ) ;
process . on ( 'exit' , onExit ) ;
2017-12-12 19:19:33 +00:00
process . on ( 'uncaughtException' , onError ) ;
2017-05-28 15:25:26 +01:00
} ;
throng ( {
workers : numWorkers ,
lifetime : Infinity ,
start : startWorker ,
master : startMaster ,
} ) ;