2017-04-17 03:32:30 +01:00
import os from 'os' ;
import cluster from 'cluster' ;
2017-05-20 16:31:47 +01:00
import dotenv from 'dotenv' ;
import express from 'express' ;
import http from 'http' ;
import redis from 'redis' ;
import pg from 'pg' ;
import log from 'npmlog' ;
import url from 'url' ;
import WebSocket from 'uws' ;
import uuid from 'uuid' ;
const env = process . env . NODE _ENV || 'development' ;
2017-02-02 15:11:36 +00:00
dotenv . config ( {
2017-05-20 16:31:47 +01:00
path : env === 'production' ? '.env.production' : '.env' ,
} ) ;
2017-02-02 00:31:09 +00:00
2017-05-03 22:18:13 +01:00
const dbUrlToConfig = ( dbUrl ) => {
if ( ! dbUrl ) {
2017-05-20 16:31:47 +01:00
return { } ;
2017-05-03 22:18:13 +01:00
}
2017-05-20 16:31:47 +01:00
const params = url . parse ( dbUrl ) ;
const config = { } ;
2017-05-04 14:53:44 +01:00
if ( params . auth ) {
2017-05-20 16:31:47 +01:00
[ config . user , config . password ] = params . auth . split ( ':' ) ;
2017-05-04 14:53:44 +01:00
}
if ( params . hostname ) {
2017-05-20 16:31:47 +01:00
config . host = params . hostname ;
2017-05-04 14:53:44 +01:00
}
if ( params . port ) {
2017-05-20 16:31:47 +01:00
config . port = params . port ;
2017-05-03 22:18:13 +01:00
}
2017-05-04 14:53:44 +01:00
if ( params . pathname ) {
2017-05-20 16:31:47 +01:00
config . database = params . pathname . split ( '/' ) [ 1 ] ;
2017-05-04 14:53:44 +01:00
}
2017-05-20 16:31:47 +01:00
const ssl = params . query && params . query . ssl ;
2017-05-20 20:06:09 +01:00
2017-05-04 14:53:44 +01:00
if ( ssl ) {
2017-05-20 16:31:47 +01:00
config . ssl = ssl === 'true' || ssl === '1' ;
2017-05-04 14:53:44 +01:00
}
2017-05-20 16:31:47 +01:00
return config ;
} ;
2017-05-03 22:18:13 +01:00
2017-05-20 20:06:09 +01:00
const redisUrlToClient = ( defaultConfig , redisUrl ) => {
const config = defaultConfig ;
if ( ! redisUrl ) {
return redis . createClient ( config ) ;
}
if ( redisUrl . startsWith ( 'unix://' ) ) {
return redis . createClient ( redisUrl . slice ( 7 ) , config ) ;
}
return redis . createClient ( Object . assign ( config , {
url : redisUrl ,
} ) ) ;
} ;
2017-05-03 22:18:13 +01:00
if ( cluster . isMaster ) {
// Cluster master
2017-05-20 16:31:47 +01:00
const core = + process . env . STREAMING _CLUSTER _NUM || ( env === 'development' ? 1 : Math . max ( os . cpus ( ) . length - 1 , 1 ) ) ;
2017-05-03 22:18:13 +01:00
2017-04-17 03:32:30 +01:00
const fork = ( ) => {
const worker = cluster . fork ( ) ;
2017-05-03 22:18:13 +01:00
2017-04-17 03:32:30 +01:00
worker . on ( 'exit' , ( code , signal ) => {
log . error ( ` Worker died with exit code ${ code } , signal ${ signal } received. ` ) ;
setTimeout ( ( ) => fork ( ) , 0 ) ;
} ) ;
} ;
2017-05-03 22:18:13 +01:00
2017-04-17 03:32:30 +01:00
for ( let i = 0 ; i < core ; i ++ ) fork ( ) ;
2017-05-20 16:31:47 +01:00
log . info ( ` Starting streaming API server master with ${ core } workers ` ) ;
2017-04-17 03:32:30 +01:00
} else {
2017-05-03 22:18:13 +01:00
// Cluster worker
2017-04-17 03:32:30 +01:00
const pgConfigs = {
development : {
database : 'mastodon_development' ,
host : '/var/run/postgresql' ,
2017-05-20 16:31:47 +01:00
max : 10 ,
2017-04-17 03:32:30 +01:00
} ,
production : {
user : process . env . DB _USER || 'mastodon' ,
password : process . env . DB _PASS || '' ,
database : process . env . DB _NAME || 'mastodon_production' ,
host : process . env . DB _HOST || 'localhost' ,
port : process . env . DB _PORT || 5432 ,
2017-05-20 16:31:47 +01:00
max : 10 ,
} ,
} ;
2017-02-02 00:31:09 +00:00
2017-05-20 16:31:47 +01:00
const app = express ( ) ;
const pgPool = new pg . Pool ( Object . assign ( pgConfigs [ env ] , dbUrlToConfig ( process . env . DATABASE _URL ) ) ) ;
const server = http . createServer ( app ) ;
const wss = new WebSocket . Server ( { server } ) ;
const redisNamespace = process . env . REDIS _NAMESPACE || null ;
2017-02-07 13:37:12 +00:00
2017-05-07 18:42:32 +01:00
const redisParams = {
2017-04-17 03:32:30 +01:00
host : process . env . REDIS _HOST || '127.0.0.1' ,
port : process . env . REDIS _PORT || 6379 ,
2017-05-17 14:36:34 +01:00
db : process . env . REDIS _DB || 0 ,
2017-05-03 22:18:13 +01:00
password : process . env . REDIS _PASSWORD ,
2017-05-20 16:31:47 +01:00
} ;
2017-05-07 18:42:32 +01:00
if ( redisNamespace ) {
2017-05-20 16:31:47 +01:00
redisParams . namespace = redisNamespace ;
2017-05-07 18:42:32 +01:00
}
2017-05-20 20:06:09 +01:00
2017-05-20 16:31:47 +01:00
const redisPrefix = redisNamespace ? ` ${ redisNamespace } : ` : '' ;
2017-05-07 18:42:32 +01:00
2017-05-20 20:06:09 +01:00
const redisClient = redisUrlToClient ( redisParams , process . env . REDIS _URL ) ;
2017-02-07 13:37:12 +00:00
2017-05-20 16:31:47 +01:00
const subs = { } ;
2017-02-07 13:37:12 +00:00
2017-04-17 03:32:30 +01:00
redisClient . on ( 'pmessage' , ( _ , channel , message ) => {
2017-05-20 16:31:47 +01:00
const callbacks = subs [ channel ] ;
2017-02-07 13:37:12 +00:00
2017-05-20 16:31:47 +01:00
log . silly ( ` New message on channel ${ channel } ` ) ;
2017-02-07 13:37:12 +00:00
2017-04-17 03:32:30 +01:00
if ( ! callbacks ) {
2017-05-20 16:31:47 +01:00
return ;
2017-04-17 03:32:30 +01:00
}
2017-05-20 16:31:47 +01:00
callbacks . forEach ( callback => callback ( message ) ) ;
} ) ;
2017-02-07 13:37:12 +00:00
2017-05-20 16:31:47 +01:00
redisClient . psubscribe ( ` ${ redisPrefix } timeline:* ` ) ;
2017-02-07 13:37:12 +00:00
2017-04-17 03:32:30 +01:00
const subscribe = ( channel , callback ) => {
2017-05-20 16:31:47 +01:00
log . silly ( ` Adding listener for ${ channel } ` ) ;
subs [ channel ] = subs [ channel ] || [ ] ;
subs [ channel ] . push ( callback ) ;
} ;
2017-02-03 17:27:42 +00:00
2017-04-17 03:32:30 +01:00
const unsubscribe = ( channel , callback ) => {
2017-05-20 16:31:47 +01:00
log . silly ( ` Removing listener for ${ channel } ` ) ;
subs [ channel ] = subs [ channel ] . filter ( item => item !== callback ) ;
} ;
2017-02-03 17:27:42 +00:00
2017-04-17 03:32:30 +01:00
const allowCrossDomain = ( req , res , next ) => {
2017-05-20 16:31:47 +01:00
res . header ( 'Access-Control-Allow-Origin' , '*' ) ;
res . header ( 'Access-Control-Allow-Headers' , 'Authorization, Accept, Cache-Control' ) ;
res . header ( 'Access-Control-Allow-Methods' , 'GET, OPTIONS' ) ;
2017-02-05 22:37:25 +00:00
2017-05-20 16:31:47 +01:00
next ( ) ;
} ;
2017-02-05 22:37:25 +00:00
2017-04-17 03:32:30 +01:00
const setRequestId = ( req , res , next ) => {
2017-05-20 16:31:47 +01:00
req . requestId = uuid . v4 ( ) ;
res . header ( 'X-Request-Id' , req . requestId ) ;
2017-02-02 00:31:09 +00:00
2017-05-20 16:31:47 +01:00
next ( ) ;
} ;
2017-02-02 00:31:09 +00:00
2017-04-17 03:32:30 +01:00
const accountFromToken = ( token , req , next ) => {
pgPool . connect ( ( err , client , done ) => {
2017-02-02 00:31:09 +00:00
if ( err ) {
2017-05-20 16:31:47 +01:00
next ( err ) ;
return ;
2017-02-02 00:31:09 +00:00
}
2017-04-17 03:32:30 +01:00
client . query ( 'SELECT oauth_access_tokens.resource_owner_id, users.account_id FROM oauth_access_tokens INNER JOIN users ON oauth_access_tokens.resource_owner_id = users.id WHERE oauth_access_tokens.token = $1 LIMIT 1' , [ token ] , ( err , result ) => {
2017-05-20 16:31:47 +01:00
done ( ) ;
2017-02-02 00:31:09 +00:00
2017-04-17 03:32:30 +01:00
if ( err ) {
2017-05-20 16:31:47 +01:00
next ( err ) ;
return ;
2017-04-17 03:32:30 +01:00
}
2017-02-02 00:31:09 +00:00
2017-04-17 03:32:30 +01:00
if ( result . rows . length === 0 ) {
2017-05-20 16:31:47 +01:00
err = new Error ( 'Invalid access token' ) ;
err . statusCode = 401 ;
2017-02-03 23:34:31 +00:00
2017-05-20 16:31:47 +01:00
next ( err ) ;
return ;
2017-04-17 03:32:30 +01:00
}
2017-02-03 23:34:31 +00:00
2017-05-20 16:31:47 +01:00
req . accountId = result . rows [ 0 ] . account _id ;
2017-02-03 23:34:31 +00:00
2017-05-20 16:31:47 +01:00
next ( ) ;
} ) ;
} ) ;
} ;
2017-02-03 23:34:31 +00:00
2017-04-17 03:32:30 +01:00
const authenticationMiddleware = ( req , res , next ) => {
if ( req . method === 'OPTIONS' ) {
2017-05-20 16:31:47 +01:00
next ( ) ;
return ;
2017-04-17 03:32:30 +01:00
}
2017-02-03 23:34:31 +00:00
2017-05-20 16:31:47 +01:00
const authorization = req . get ( 'Authorization' ) ;
2017-02-03 23:34:31 +00:00
2017-04-17 03:32:30 +01:00
if ( ! authorization ) {
2017-05-20 16:31:47 +01:00
const err = new Error ( 'Missing access token' ) ;
err . statusCode = 401 ;
2017-02-02 00:31:09 +00:00
2017-05-20 16:31:47 +01:00
next ( err ) ;
return ;
2017-04-17 03:32:30 +01:00
}
2017-02-02 16:10:59 +00:00
2017-05-20 16:31:47 +01:00
const token = authorization . replace ( /^Bearer / , '' ) ;
2017-02-02 12:56:14 +00:00
2017-05-20 16:31:47 +01:00
accountFromToken ( token , req , next ) ;
} ;
2017-02-05 02:19:04 +00:00
2017-04-17 03:32:30 +01:00
const errorMiddleware = ( err , req , res , next ) => {
2017-05-20 16:31:47 +01:00
log . error ( req . requestId , err ) ;
res . writeHead ( err . statusCode || 500 , { 'Content-Type' : 'application/json' } ) ;
res . end ( JSON . stringify ( { error : err . statusCode ? ` ${ err } ` : 'An unexpected error occurred' } ) ) ;
} ;
2017-02-05 02:19:04 +00:00
2017-04-17 03:32:30 +01:00
const placeholders = ( arr , shift = 0 ) => arr . map ( ( _ , i ) => ` $ ${ i + 1 + shift } ` ) . join ( ', ' ) ;
2017-02-02 00:31:09 +00:00
2017-04-17 03:32:30 +01:00
const streamFrom = ( id , req , output , attachCloseHandler , needsFiltering = false ) => {
2017-05-20 16:31:47 +01:00
log . verbose ( req . requestId , ` Starting stream from ${ id } for ${ req . accountId } ` ) ;
2017-04-17 03:32:30 +01:00
const listener = message => {
2017-05-20 16:31:47 +01:00
const { event , payload , queued _at } = JSON . parse ( message ) ;
2017-02-02 12:56:14 +00:00
2017-04-17 03:32:30 +01:00
const transmit = ( ) => {
2017-05-20 16:31:47 +01:00
const now = new Date ( ) . getTime ( ) ;
2017-04-17 03:32:30 +01:00
const delta = now - queued _at ;
2017-02-02 12:56:14 +00:00
2017-05-20 16:31:47 +01:00
log . silly ( req . requestId , ` Transmitting for ${ req . accountId } : ${ event } ${ payload } Delay: ${ delta } ms ` ) ;
output ( event , payload ) ;
} ;
2017-02-02 12:56:14 +00:00
2017-04-17 03:32:30 +01:00
// Only messages that may require filtering are statuses, since notifications
// are already personalized and deletes do not matter
if ( needsFiltering && event === 'update' ) {
pgPool . connect ( ( err , client , done ) => {
2017-02-02 12:56:14 +00:00
if ( err ) {
2017-05-20 16:31:47 +01:00
log . error ( err ) ;
return ;
2017-02-02 12:56:14 +00:00
}
2017-05-20 16:31:47 +01:00
const unpackedPayload = JSON . parse ( payload ) ;
const targetAccountIds = [ unpackedPayload . account . id ] . concat ( unpackedPayload . mentions . map ( item => item . id ) ) . concat ( unpackedPayload . reblog ? [ unpackedPayload . reblog . account . id ] : [ ] ) ;
const accountDomain = unpackedPayload . account . acct . split ( '@' ) [ 1 ] ;
2017-04-17 03:32:30 +01:00
Account domain blocks (#2381)
* Add <ostatus:conversation /> tag to Atom input/output
Only uses ref attribute (not href) because href would be
the alternate link that's always included also.
Creates new conversation for every non-reply status. Carries
over conversation for every reply. Keeps remote URIs verbatim,
generates local URIs on the fly like the rest of them.
* Conversation muting - prevents notifications that reference a conversation
(including replies, favourites, reblogs) from being created. API endpoints
/api/v1/statuses/:id/mute and /api/v1/statuses/:id/unmute
Currently no way to tell when a status/conversation is muted, so the web UI
only has a "disable notifications" button, doesn't work as a toggle
* Display "Dismiss notifications" on all statuses in notifications column, not just own
* Add "muted" as a boolean attribute on statuses JSON
For now always false on contained reblogs, since it's only relevant for
statuses returned from the notifications endpoint, which are not nested
Remove "Disable notifications" from detailed status view, since it's
only relevant in the notifications column
* Up max class length
* Remove pending test for conversation mute
* Add tests, clean up
* Rename to "mute conversation" and "unmute conversation"
* Raise validation error when trying to mute/unmute status without conversation
* Adding account domain blocks that filter notifications and public timelines
* Add tests for domain blocks in notifications, public timelines
Filter reblogs of blocked domains from home
* Add API for listing and creating account domain blocks
* API for creating/deleting domain blocks, tests for Status#ancestors
and Status#descendants, filter domain blocks from them
* Filter domains in streaming API
* Update account_domain_block_spec.rb
2017-05-19 00:14:30 +01:00
const queries = [
client . query ( ` SELECT 1 FROM blocks WHERE account_id = $ 1 AND target_account_id IN ( ${ placeholders ( targetAccountIds , 1 ) } ) UNION SELECT 1 FROM mutes WHERE account_id = $ 1 AND target_account_id IN ( ${ placeholders ( targetAccountIds , 1 ) } ) ` , [ req . accountId ] . concat ( targetAccountIds ) ) ,
2017-05-20 16:31:47 +01:00
] ;
2017-04-17 03:32:30 +01:00
Account domain blocks (#2381)
* Add <ostatus:conversation /> tag to Atom input/output
Only uses ref attribute (not href) because href would be
the alternate link that's always included also.
Creates new conversation for every non-reply status. Carries
over conversation for every reply. Keeps remote URIs verbatim,
generates local URIs on the fly like the rest of them.
* Conversation muting - prevents notifications that reference a conversation
(including replies, favourites, reblogs) from being created. API endpoints
/api/v1/statuses/:id/mute and /api/v1/statuses/:id/unmute
Currently no way to tell when a status/conversation is muted, so the web UI
only has a "disable notifications" button, doesn't work as a toggle
* Display "Dismiss notifications" on all statuses in notifications column, not just own
* Add "muted" as a boolean attribute on statuses JSON
For now always false on contained reblogs, since it's only relevant for
statuses returned from the notifications endpoint, which are not nested
Remove "Disable notifications" from detailed status view, since it's
only relevant in the notifications column
* Up max class length
* Remove pending test for conversation mute
* Add tests, clean up
* Rename to "mute conversation" and "unmute conversation"
* Raise validation error when trying to mute/unmute status without conversation
* Adding account domain blocks that filter notifications and public timelines
* Add tests for domain blocks in notifications, public timelines
Filter reblogs of blocked domains from home
* Add API for listing and creating account domain blocks
* API for creating/deleting domain blocks, tests for Status#ancestors
and Status#descendants, filter domain blocks from them
* Filter domains in streaming API
* Update account_domain_block_spec.rb
2017-05-19 00:14:30 +01:00
if ( accountDomain ) {
2017-05-20 16:31:47 +01:00
queries . push ( client . query ( 'SELECT 1 FROM account_domain_blocks WHERE account_id = $1 AND domain = $2' , [ req . accountId , accountDomain ] ) ) ;
Account domain blocks (#2381)
* Add <ostatus:conversation /> tag to Atom input/output
Only uses ref attribute (not href) because href would be
the alternate link that's always included also.
Creates new conversation for every non-reply status. Carries
over conversation for every reply. Keeps remote URIs verbatim,
generates local URIs on the fly like the rest of them.
* Conversation muting - prevents notifications that reference a conversation
(including replies, favourites, reblogs) from being created. API endpoints
/api/v1/statuses/:id/mute and /api/v1/statuses/:id/unmute
Currently no way to tell when a status/conversation is muted, so the web UI
only has a "disable notifications" button, doesn't work as a toggle
* Display "Dismiss notifications" on all statuses in notifications column, not just own
* Add "muted" as a boolean attribute on statuses JSON
For now always false on contained reblogs, since it's only relevant for
statuses returned from the notifications endpoint, which are not nested
Remove "Disable notifications" from detailed status view, since it's
only relevant in the notifications column
* Up max class length
* Remove pending test for conversation mute
* Add tests, clean up
* Rename to "mute conversation" and "unmute conversation"
* Raise validation error when trying to mute/unmute status without conversation
* Adding account domain blocks that filter notifications and public timelines
* Add tests for domain blocks in notifications, public timelines
Filter reblogs of blocked domains from home
* Add API for listing and creating account domain blocks
* API for creating/deleting domain blocks, tests for Status#ancestors
and Status#descendants, filter domain blocks from them
* Filter domains in streaming API
* Update account_domain_block_spec.rb
2017-05-19 00:14:30 +01:00
}
2017-02-02 12:56:14 +00:00
Account domain blocks (#2381)
* Add <ostatus:conversation /> tag to Atom input/output
Only uses ref attribute (not href) because href would be
the alternate link that's always included also.
Creates new conversation for every non-reply status. Carries
over conversation for every reply. Keeps remote URIs verbatim,
generates local URIs on the fly like the rest of them.
* Conversation muting - prevents notifications that reference a conversation
(including replies, favourites, reblogs) from being created. API endpoints
/api/v1/statuses/:id/mute and /api/v1/statuses/:id/unmute
Currently no way to tell when a status/conversation is muted, so the web UI
only has a "disable notifications" button, doesn't work as a toggle
* Display "Dismiss notifications" on all statuses in notifications column, not just own
* Add "muted" as a boolean attribute on statuses JSON
For now always false on contained reblogs, since it's only relevant for
statuses returned from the notifications endpoint, which are not nested
Remove "Disable notifications" from detailed status view, since it's
only relevant in the notifications column
* Up max class length
* Remove pending test for conversation mute
* Add tests, clean up
* Rename to "mute conversation" and "unmute conversation"
* Raise validation error when trying to mute/unmute status without conversation
* Adding account domain blocks that filter notifications and public timelines
* Add tests for domain blocks in notifications, public timelines
Filter reblogs of blocked domains from home
* Add API for listing and creating account domain blocks
* API for creating/deleting domain blocks, tests for Status#ancestors
and Status#descendants, filter domain blocks from them
* Filter domains in streaming API
* Update account_domain_block_spec.rb
2017-05-19 00:14:30 +01:00
Promise . all ( queries ) . then ( values => {
2017-05-20 16:31:47 +01:00
done ( ) ;
Account domain blocks (#2381)
* Add <ostatus:conversation /> tag to Atom input/output
Only uses ref attribute (not href) because href would be
the alternate link that's always included also.
Creates new conversation for every non-reply status. Carries
over conversation for every reply. Keeps remote URIs verbatim,
generates local URIs on the fly like the rest of them.
* Conversation muting - prevents notifications that reference a conversation
(including replies, favourites, reblogs) from being created. API endpoints
/api/v1/statuses/:id/mute and /api/v1/statuses/:id/unmute
Currently no way to tell when a status/conversation is muted, so the web UI
only has a "disable notifications" button, doesn't work as a toggle
* Display "Dismiss notifications" on all statuses in notifications column, not just own
* Add "muted" as a boolean attribute on statuses JSON
For now always false on contained reblogs, since it's only relevant for
statuses returned from the notifications endpoint, which are not nested
Remove "Disable notifications" from detailed status view, since it's
only relevant in the notifications column
* Up max class length
* Remove pending test for conversation mute
* Add tests, clean up
* Rename to "mute conversation" and "unmute conversation"
* Raise validation error when trying to mute/unmute status without conversation
* Adding account domain blocks that filter notifications and public timelines
* Add tests for domain blocks in notifications, public timelines
Filter reblogs of blocked domains from home
* Add API for listing and creating account domain blocks
* API for creating/deleting domain blocks, tests for Status#ancestors
and Status#descendants, filter domain blocks from them
* Filter domains in streaming API
* Update account_domain_block_spec.rb
2017-05-19 00:14:30 +01:00
if ( values [ 0 ] . rows . length > 0 || ( values . length > 1 && values [ 1 ] . rows . length > 0 ) ) {
2017-05-20 16:31:47 +01:00
return ;
2017-04-17 03:32:30 +01:00
}
2017-05-20 16:31:47 +01:00
transmit ( ) ;
Account domain blocks (#2381)
* Add <ostatus:conversation /> tag to Atom input/output
Only uses ref attribute (not href) because href would be
the alternate link that's always included also.
Creates new conversation for every non-reply status. Carries
over conversation for every reply. Keeps remote URIs verbatim,
generates local URIs on the fly like the rest of them.
* Conversation muting - prevents notifications that reference a conversation
(including replies, favourites, reblogs) from being created. API endpoints
/api/v1/statuses/:id/mute and /api/v1/statuses/:id/unmute
Currently no way to tell when a status/conversation is muted, so the web UI
only has a "disable notifications" button, doesn't work as a toggle
* Display "Dismiss notifications" on all statuses in notifications column, not just own
* Add "muted" as a boolean attribute on statuses JSON
For now always false on contained reblogs, since it's only relevant for
statuses returned from the notifications endpoint, which are not nested
Remove "Disable notifications" from detailed status view, since it's
only relevant in the notifications column
* Up max class length
* Remove pending test for conversation mute
* Add tests, clean up
* Rename to "mute conversation" and "unmute conversation"
* Raise validation error when trying to mute/unmute status without conversation
* Adding account domain blocks that filter notifications and public timelines
* Add tests for domain blocks in notifications, public timelines
Filter reblogs of blocked domains from home
* Add API for listing and creating account domain blocks
* API for creating/deleting domain blocks, tests for Status#ancestors
and Status#descendants, filter domain blocks from them
* Filter domains in streaming API
* Update account_domain_block_spec.rb
2017-05-19 00:14:30 +01:00
} ) . catch ( err => {
2017-05-20 16:31:47 +01:00
log . error ( err ) ;
} ) ;
} ) ;
2017-04-17 03:32:30 +01:00
} else {
2017-05-20 16:31:47 +01:00
transmit ( ) ;
2017-04-17 03:32:30 +01:00
}
2017-05-20 16:31:47 +01:00
} ;
2017-04-17 03:32:30 +01:00
2017-05-20 16:31:47 +01:00
subscribe ( ` ${ redisPrefix } ${ id } ` , listener ) ;
attachCloseHandler ( ` ${ redisPrefix } ${ id } ` , listener ) ;
} ;
2017-02-02 00:31:09 +00:00
2017-04-17 03:32:30 +01:00
// Setup stream output to HTTP
const streamToHttp = ( req , res ) => {
2017-05-20 16:31:47 +01:00
res . setHeader ( 'Content-Type' , 'text/event-stream' ) ;
res . setHeader ( 'Transfer-Encoding' , 'chunked' ) ;
2017-02-03 23:34:31 +00:00
2017-05-20 16:31:47 +01:00
const heartbeat = setInterval ( ( ) => res . write ( ':thump\n' ) , 15000 ) ;
2017-02-03 23:34:31 +00:00
2017-04-17 03:32:30 +01:00
req . on ( 'close' , ( ) => {
2017-05-20 16:31:47 +01:00
log . verbose ( req . requestId , ` Ending stream for ${ req . accountId } ` ) ;
clearInterval ( heartbeat ) ;
} ) ;
2017-02-02 14:20:31 +00:00
2017-04-17 03:32:30 +01:00
return ( event , payload ) => {
2017-05-20 16:31:47 +01:00
res . write ( ` event: ${ event } \n ` ) ;
res . write ( ` data: ${ payload } \n \n ` ) ;
} ;
} ;
2017-02-02 00:31:09 +00:00
2017-04-17 03:32:30 +01:00
// Setup stream end for HTTP
const streamHttpEnd = req => ( id , listener ) => {
req . on ( 'close' , ( ) => {
2017-05-20 16:31:47 +01:00
unsubscribe ( id , listener ) ;
} ) ;
} ;
2017-02-03 23:34:31 +00:00
2017-04-17 03:32:30 +01:00
// Setup stream output to WebSockets
const streamToWs = ( req , ws ) => {
2017-05-06 16:05:38 +01:00
const heartbeat = setInterval ( ( ) => {
// TODO: Can't add multiple listeners, due to the limitation of uws.
if ( ws . readyState !== ws . OPEN ) {
2017-05-20 16:31:47 +01:00
log . verbose ( req . requestId , ` Ending stream for ${ req . accountId } ` ) ;
clearInterval ( heartbeat ) ;
return ;
2017-05-06 16:05:38 +01:00
}
2017-02-07 13:37:12 +00:00
2017-05-20 16:31:47 +01:00
ws . ping ( ) ;
} , 15000 ) ;
2017-04-02 20:27:14 +01:00
2017-04-17 03:32:30 +01:00
return ( event , payload ) => {
if ( ws . readyState !== ws . OPEN ) {
2017-05-20 16:31:47 +01:00
log . error ( req . requestId , 'Tried writing to closed socket' ) ;
return ;
2017-04-17 03:32:30 +01:00
}
2017-02-03 23:34:31 +00:00
2017-05-20 16:31:47 +01:00
ws . send ( JSON . stringify ( { event , payload } ) ) ;
} ;
} ;
2017-02-02 00:31:09 +00:00
2017-04-17 03:32:30 +01:00
// Setup stream end for WebSockets
const streamWsEnd = ws => ( id , listener ) => {
ws . on ( 'close' , ( ) => {
2017-05-20 16:31:47 +01:00
unsubscribe ( id , listener ) ;
} ) ;
2017-04-02 20:27:14 +01:00
2017-04-17 03:32:30 +01:00
ws . on ( 'error' , e => {
2017-05-20 16:31:47 +01:00
unsubscribe ( id , listener ) ;
} ) ;
} ;
2017-02-03 23:34:31 +00:00
2017-05-20 16:31:47 +01:00
app . use ( setRequestId ) ;
app . use ( allowCrossDomain ) ;
app . use ( authenticationMiddleware ) ;
app . use ( errorMiddleware ) ;
2017-02-02 00:31:09 +00:00
2017-04-17 03:32:30 +01:00
app . get ( '/api/v1/streaming/user' , ( req , res ) => {
2017-05-20 16:31:47 +01:00
streamFrom ( ` timeline: ${ req . accountId } ` , req , streamToHttp ( req , res ) , streamHttpEnd ( req ) ) ;
} ) ;
2017-02-03 23:34:31 +00:00
2017-04-17 03:32:30 +01:00
app . get ( '/api/v1/streaming/public' , ( req , res ) => {
2017-05-20 16:31:47 +01:00
streamFrom ( 'timeline:public' , req , streamToHttp ( req , res ) , streamHttpEnd ( req ) , true ) ;
} ) ;
2017-02-03 23:34:31 +00:00
2017-04-17 03:32:30 +01:00
app . get ( '/api/v1/streaming/public/local' , ( req , res ) => {
2017-05-20 16:31:47 +01:00
streamFrom ( 'timeline:public:local' , req , streamToHttp ( req , res ) , streamHttpEnd ( req ) , true ) ;
} ) ;
2017-02-06 22:46:14 +00:00
2017-04-17 03:32:30 +01:00
app . get ( '/api/v1/streaming/hashtag' , ( req , res ) => {
2017-05-20 16:31:47 +01:00
streamFrom ( ` timeline:hashtag: ${ req . query . tag } ` , req , streamToHttp ( req , res ) , streamHttpEnd ( req ) , true ) ;
} ) ;
2017-02-02 12:56:14 +00:00
2017-04-17 03:32:30 +01:00
app . get ( '/api/v1/streaming/hashtag/local' , ( req , res ) => {
2017-05-20 16:31:47 +01:00
streamFrom ( ` timeline:hashtag: ${ req . query . tag } :local ` , req , streamToHttp ( req , res ) , streamHttpEnd ( req ) , true ) ;
} ) ;
2017-02-06 22:46:14 +00:00
2017-04-17 03:32:30 +01:00
wss . on ( 'connection' , ws => {
2017-05-20 16:31:47 +01:00
const location = url . parse ( ws . upgradeReq . url , true ) ;
const token = location . query . access _token ;
const req = { requestId : uuid . v4 ( ) } ;
2017-02-02 00:31:09 +00:00
2017-04-17 03:32:30 +01:00
accountFromToken ( token , req , err => {
if ( err ) {
2017-05-20 16:31:47 +01:00
log . error ( req . requestId , err ) ;
ws . close ( ) ;
return ;
2017-04-17 03:32:30 +01:00
}
2017-02-03 23:34:31 +00:00
2017-04-17 03:32:30 +01:00
switch ( location . query . stream ) {
case 'user' :
2017-05-20 16:31:47 +01:00
streamFrom ( ` timeline: ${ req . accountId } ` , req , streamToWs ( req , ws ) , streamWsEnd ( ws ) ) ;
2017-04-17 03:32:30 +01:00
break ;
case 'public' :
2017-05-20 16:31:47 +01:00
streamFrom ( 'timeline:public' , req , streamToWs ( req , ws ) , streamWsEnd ( ws ) , true ) ;
2017-04-17 03:32:30 +01:00
break ;
case 'public:local' :
2017-05-20 16:31:47 +01:00
streamFrom ( 'timeline:public:local' , req , streamToWs ( req , ws ) , streamWsEnd ( ws ) , true ) ;
2017-04-17 03:32:30 +01:00
break ;
case 'hashtag' :
2017-05-20 16:31:47 +01:00
streamFrom ( ` timeline:hashtag: ${ location . query . tag } ` , req , streamToWs ( req , ws ) , streamWsEnd ( ws ) , true ) ;
2017-04-17 03:32:30 +01:00
break ;
case 'hashtag:local' :
2017-05-20 16:31:47 +01:00
streamFrom ( ` timeline:hashtag: ${ location . query . tag } :local ` , req , streamToWs ( req , ws ) , streamWsEnd ( ws ) , true ) ;
2017-04-17 03:32:30 +01:00
break ;
default :
2017-05-20 16:31:47 +01:00
ws . close ( ) ;
2017-04-17 03:32:30 +01:00
}
2017-05-20 16:31:47 +01:00
} ) ;
} ) ;
2017-02-03 23:34:31 +00:00
2017-04-17 03:32:30 +01:00
server . listen ( process . env . PORT || 4000 , ( ) => {
2017-05-20 16:31:47 +01:00
log . level = process . env . LOG _LEVEL || 'verbose' ;
log . info ( ` Starting streaming API server worker on ${ server . address ( ) . address } : ${ server . address ( ) . port } ` ) ;
} ) ;
2017-04-21 18:24:31 +01:00
2017-05-20 16:31:47 +01:00
process . on ( 'SIGINT' , exit ) ;
process . on ( 'SIGTERM' , exit ) ;
process . on ( 'exit' , exit ) ;
2017-04-21 18:24:31 +01:00
function exit ( ) {
2017-05-20 16:31:47 +01:00
server . close ( ) ;
2017-04-21 18:24:31 +01:00
}
2017-04-17 03:32:30 +01:00
}