Merge commit '6c4c72497a5722870e4432ef41dd4c9ec36a8928' into glitch-soc/merge-upstream
Conflicts: - `.github/workflows/build-releases.yml`: Upstream changed comments close to a line we modified to account for different container image repositories. Updated the comments as upstream did.
This commit is contained in:
commit
ac2dae0d11
|
@ -76,8 +76,6 @@ jobs:
|
||||||
if: ${{ inputs.push_to_images != '' }}
|
if: ${{ inputs.push_to_images != '' }}
|
||||||
with:
|
with:
|
||||||
images: ${{ inputs.push_to_images }}
|
images: ${{ inputs.push_to_images }}
|
||||||
# Only tag with latest when ran against the latest stable branch
|
|
||||||
# This needs to be updated after each minor version release
|
|
||||||
flavor: ${{ inputs.flavor }}
|
flavor: ${{ inputs.flavor }}
|
||||||
tags: ${{ inputs.tags }}
|
tags: ${{ inputs.tags }}
|
||||||
labels: ${{ inputs.labels }}
|
labels: ${{ inputs.labels }}
|
||||||
|
|
|
@ -16,6 +16,8 @@ jobs:
|
||||||
use_native_arm64_builder: false
|
use_native_arm64_builder: false
|
||||||
push_to_images: |
|
push_to_images: |
|
||||||
ghcr.io/${{ github.repository_owner }}/mastodon
|
ghcr.io/${{ github.repository_owner }}/mastodon
|
||||||
|
# Only tag with latest when ran against the latest stable branch
|
||||||
|
# This needs to be updated after each minor version release
|
||||||
flavor: |
|
flavor: |
|
||||||
latest=${{ startsWith(github.ref, 'refs/tags/v4.1.') }}
|
latest=${{ startsWith(github.ref, 'refs/tags/v4.1.') }}
|
||||||
tags: |
|
tags: |
|
||||||
|
|
|
@ -482,7 +482,7 @@ GEM
|
||||||
nokogiri (1.15.4)
|
nokogiri (1.15.4)
|
||||||
mini_portile2 (~> 2.8.2)
|
mini_portile2 (~> 2.8.2)
|
||||||
racc (~> 1.4)
|
racc (~> 1.4)
|
||||||
oj (3.16.0)
|
oj (3.16.1)
|
||||||
omniauth (2.1.1)
|
omniauth (2.1.1)
|
||||||
hashie (>= 3.4.6)
|
hashie (>= 3.4.6)
|
||||||
rack (>= 2.2.3)
|
rack (>= 2.2.3)
|
||||||
|
@ -519,7 +519,7 @@ GEM
|
||||||
parslet (2.0.0)
|
parslet (2.0.0)
|
||||||
pastel (0.8.0)
|
pastel (0.8.0)
|
||||||
tty-color (~> 0.5)
|
tty-color (~> 0.5)
|
||||||
pg (1.5.3)
|
pg (1.5.4)
|
||||||
pghero (3.3.3)
|
pghero (3.3.3)
|
||||||
activerecord (>= 6)
|
activerecord (>= 6)
|
||||||
posix-spawn (0.3.15)
|
posix-spawn (0.3.15)
|
||||||
|
|
|
@ -34,7 +34,7 @@ class AccountsIndex < Chewy::Index
|
||||||
},
|
},
|
||||||
|
|
||||||
verbatim: {
|
verbatim: {
|
||||||
tokenizer: 'uax_url_email',
|
tokenizer: 'standard',
|
||||||
filter: %w(lowercase asciifolding cjk_width),
|
filter: %w(lowercase asciifolding cjk_width),
|
||||||
},
|
},
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
# frozen_string_literal: true
|
# frozen_string_literal: true
|
||||||
|
|
||||||
class Api::V1::Timelines::TagController < Api::BaseController
|
class Api::V1::Timelines::TagController < Api::BaseController
|
||||||
|
before_action -> { doorkeeper_authorize! :read, :'read:statuses' }, only: :show, if: :require_auth?
|
||||||
before_action :load_tag
|
before_action :load_tag
|
||||||
after_action :insert_pagination_headers, unless: -> { @statuses.empty? }
|
after_action :insert_pagination_headers, unless: -> { @statuses.empty? }
|
||||||
|
|
||||||
|
@ -12,6 +13,10 @@ class Api::V1::Timelines::TagController < Api::BaseController
|
||||||
|
|
||||||
private
|
private
|
||||||
|
|
||||||
|
def require_auth?
|
||||||
|
!Setting.timeline_preview
|
||||||
|
end
|
||||||
|
|
||||||
def load_tag
|
def load_tag
|
||||||
@tag = Tag.find_normalized(params[:id])
|
@tag = Tag.find_normalized(params[:id])
|
||||||
end
|
end
|
||||||
|
|
|
@ -80,7 +80,7 @@ class Search extends PureComponent {
|
||||||
|
|
||||||
handleKeyDown = (e) => {
|
handleKeyDown = (e) => {
|
||||||
const { selectedOption } = this.state;
|
const { selectedOption } = this.state;
|
||||||
const options = this._getOptions().concat(this.defaultOptions);
|
const options = searchEnabled ? this._getOptions().concat(this.defaultOptions) : this._getOptions();
|
||||||
|
|
||||||
switch(e.key) {
|
switch(e.key) {
|
||||||
case 'Escape':
|
case 'Escape':
|
||||||
|
@ -353,15 +353,19 @@ class Search extends PureComponent {
|
||||||
</>
|
</>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
<h4><FormattedMessage id='search_popout.options' defaultMessage='Search options' /></h4>
|
{searchEnabled && (
|
||||||
|
<>
|
||||||
|
<h4><FormattedMessage id='search_popout.options' defaultMessage='Search options' /></h4>
|
||||||
|
|
||||||
<div className='search__popout__menu'>
|
<div className='search__popout__menu'>
|
||||||
{this.defaultOptions.map(({ key, label, action }, i) => (
|
{this.defaultOptions.map(({ key, label, action }, i) => (
|
||||||
<button key={key} onMouseDown={action} className={classNames('search__popout__menu__item', { selected: selectedOption === (options.length + i) })}>
|
<button key={key} onMouseDown={action} className={classNames('search__popout__menu__item', { selected: selectedOption === (options.length + i) })}>
|
||||||
{label}
|
{label}
|
||||||
</button>
|
</button>
|
||||||
))}
|
))}
|
||||||
</div>
|
</div>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|
|
@ -31,6 +31,7 @@ const messages = defineMessages({
|
||||||
about: { id: 'navigation_bar.about', defaultMessage: 'About' },
|
about: { id: 'navigation_bar.about', defaultMessage: 'About' },
|
||||||
search: { id: 'navigation_bar.search', defaultMessage: 'Search' },
|
search: { id: 'navigation_bar.search', defaultMessage: 'Search' },
|
||||||
advancedInterface: { id: 'navigation_bar.advanced_interface', defaultMessage: 'Open in advanced web interface' },
|
advancedInterface: { id: 'navigation_bar.advanced_interface', defaultMessage: 'Open in advanced web interface' },
|
||||||
|
openedInClassicInterface: { id: 'navigation_bar.opened_in_classic_interface', defaultMessage: 'Posts, accounts, and other specific pages are opened by default in the classic web interface.' },
|
||||||
});
|
});
|
||||||
|
|
||||||
class NavigationPanel extends Component {
|
class NavigationPanel extends Component {
|
||||||
|
@ -57,12 +58,17 @@ class NavigationPanel extends Component {
|
||||||
<div className='navigation-panel__logo'>
|
<div className='navigation-panel__logo'>
|
||||||
<Link to='/' className='column-link column-link--logo'><WordmarkLogo /></Link>
|
<Link to='/' className='column-link column-link--logo'><WordmarkLogo /></Link>
|
||||||
|
|
||||||
{transientSingleColumn && (
|
{transientSingleColumn ? (
|
||||||
<a href={`/deck${location.pathname}`} className='button button--block'>
|
<div class='switch-to-advanced'>
|
||||||
{intl.formatMessage(messages.advancedInterface)}
|
{intl.formatMessage(messages.openedInClassicInterface)}
|
||||||
</a>
|
{" "}
|
||||||
|
<a href={`/deck${location.pathname}`} class='switch-to-advanced__toggle'>
|
||||||
|
{intl.formatMessage(messages.advancedInterface)}
|
||||||
|
</a>
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<hr />
|
||||||
)}
|
)}
|
||||||
<hr />
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{signedIn && (
|
{signedIn && (
|
||||||
|
|
|
@ -411,6 +411,7 @@
|
||||||
"navigation_bar.lists": "Lists",
|
"navigation_bar.lists": "Lists",
|
||||||
"navigation_bar.logout": "Logout",
|
"navigation_bar.logout": "Logout",
|
||||||
"navigation_bar.mutes": "Muted users",
|
"navigation_bar.mutes": "Muted users",
|
||||||
|
"navigation_bar.opened_in_classic_interface": "Posts, accounts, and other specific pages are opened by default in the classic web interface.",
|
||||||
"navigation_bar.personal": "Personal",
|
"navigation_bar.personal": "Personal",
|
||||||
"navigation_bar.pins": "Pinned posts",
|
"navigation_bar.pins": "Pinned posts",
|
||||||
"navigation_bar.preferences": "Preferences",
|
"navigation_bar.preferences": "Preferences",
|
||||||
|
|
|
@ -409,6 +409,7 @@
|
||||||
"navigation_bar.lists": "Listes",
|
"navigation_bar.lists": "Listes",
|
||||||
"navigation_bar.logout": "Déconnexion",
|
"navigation_bar.logout": "Déconnexion",
|
||||||
"navigation_bar.mutes": "Comptes masqués",
|
"navigation_bar.mutes": "Comptes masqués",
|
||||||
|
"navigation_bar.opened_in_classic_interface": "Les messages, les comptes et les pages spécifiques sont ouvertes dans l’interface classique.",
|
||||||
"navigation_bar.personal": "Personnel",
|
"navigation_bar.personal": "Personnel",
|
||||||
"navigation_bar.pins": "Messages épinglés",
|
"navigation_bar.pins": "Messages épinglés",
|
||||||
"navigation_bar.preferences": "Préférences",
|
"navigation_bar.preferences": "Préférences",
|
||||||
|
|
|
@ -2381,6 +2381,7 @@ $ui-header-height: 55px;
|
||||||
|
|
||||||
.filter-form {
|
.filter-form {
|
||||||
display: flex;
|
display: flex;
|
||||||
|
flex-wrap: wrap;
|
||||||
}
|
}
|
||||||
|
|
||||||
.autosuggest-textarea__textarea {
|
.autosuggest-textarea__textarea {
|
||||||
|
@ -3270,6 +3271,22 @@ $ui-header-height: 55px;
|
||||||
border-color: $ui-highlight-color;
|
border-color: $ui-highlight-color;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.switch-to-advanced {
|
||||||
|
color: $classic-primary-color;
|
||||||
|
background-color: $classic-base-color;
|
||||||
|
padding: 15px;
|
||||||
|
border-radius: 4px;
|
||||||
|
margin-top: 4px;
|
||||||
|
margin-bottom: 12px;
|
||||||
|
font-size: 13px;
|
||||||
|
line-height: 18px;
|
||||||
|
|
||||||
|
.switch-to-advanced__toggle {
|
||||||
|
color: $ui-button-tertiary-color;
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
.column-link {
|
.column-link {
|
||||||
background: lighten($ui-base-color, 8%);
|
background: lighten($ui-base-color, 8%);
|
||||||
color: $primary-text-color;
|
color: $primary-text-color;
|
||||||
|
|
|
@ -4,10 +4,10 @@ class Importer::AccountsIndexImporter < Importer::BaseImporter
|
||||||
def import!
|
def import!
|
||||||
scope.includes(:account_stat).find_in_batches(batch_size: @batch_size) do |tmp|
|
scope.includes(:account_stat).find_in_batches(batch_size: @batch_size) do |tmp|
|
||||||
in_work_unit(tmp) do |accounts|
|
in_work_unit(tmp) do |accounts|
|
||||||
bulk = Chewy::Index::Import::BulkBuilder.new(index, to_index: accounts).bulk_body
|
bulk = build_bulk_body(accounts)
|
||||||
|
|
||||||
indexed = bulk.count { |entry| entry[:index] }
|
indexed = bulk.size
|
||||||
deleted = bulk.count { |entry| entry[:delete] }
|
deleted = 0
|
||||||
|
|
||||||
Chewy::Index::Import::BulkRequest.new(index).perform(bulk)
|
Chewy::Index::Import::BulkRequest.new(index).perform(bulk)
|
||||||
|
|
||||||
|
|
|
@ -68,6 +68,14 @@ class Importer::BaseImporter
|
||||||
|
|
||||||
protected
|
protected
|
||||||
|
|
||||||
|
def build_bulk_body(to_import)
|
||||||
|
# Specialize `Chewy::Index::Import::BulkBuilder#bulk_body` to avoid a few
|
||||||
|
# inefficiencies, as none of our fields or join fields and we do not need
|
||||||
|
# `BulkBuilder`'s versatility.
|
||||||
|
crutches = Chewy::Index::Crutch::Crutches.new index, to_import
|
||||||
|
to_import.map { |object| { index: { _id: object.id, data: index.compose(object, crutches, fields: []) } } }
|
||||||
|
end
|
||||||
|
|
||||||
def in_work_unit(...)
|
def in_work_unit(...)
|
||||||
work_unit = Concurrent::Promises.future_on(@executor, ...)
|
work_unit = Concurrent::Promises.future_on(@executor, ...)
|
||||||
|
|
||||||
|
|
|
@ -4,10 +4,10 @@ class Importer::InstancesIndexImporter < Importer::BaseImporter
|
||||||
def import!
|
def import!
|
||||||
index.adapter.default_scope.find_in_batches(batch_size: @batch_size) do |tmp|
|
index.adapter.default_scope.find_in_batches(batch_size: @batch_size) do |tmp|
|
||||||
in_work_unit(tmp) do |instances|
|
in_work_unit(tmp) do |instances|
|
||||||
bulk = Chewy::Index::Import::BulkBuilder.new(index, to_index: instances).bulk_body
|
bulk = build_bulk_body(instances)
|
||||||
|
|
||||||
indexed = bulk.count { |entry| entry[:index] }
|
indexed = bulk.size
|
||||||
deleted = bulk.count { |entry| entry[:delete] }
|
deleted = 0
|
||||||
|
|
||||||
Chewy::Index::Import::BulkRequest.new(index).perform(bulk)
|
Chewy::Index::Import::BulkRequest.new(index).perform(bulk)
|
||||||
|
|
||||||
|
|
|
@ -5,11 +5,11 @@ class Importer::PublicStatusesIndexImporter < Importer::BaseImporter
|
||||||
scope.select(:id).find_in_batches(batch_size: @batch_size) do |batch|
|
scope.select(:id).find_in_batches(batch_size: @batch_size) do |batch|
|
||||||
in_work_unit(batch.pluck(:id)) do |status_ids|
|
in_work_unit(batch.pluck(:id)) do |status_ids|
|
||||||
bulk = ActiveRecord::Base.connection_pool.with_connection do
|
bulk = ActiveRecord::Base.connection_pool.with_connection do
|
||||||
Chewy::Index::Import::BulkBuilder.new(index, to_index: Status.includes(:media_attachments, :preloadable_poll, :preview_cards).where(id: status_ids)).bulk_body
|
build_bulk_body(index.adapter.default_scope.where(id: status_ids))
|
||||||
end
|
end
|
||||||
|
|
||||||
indexed = bulk.count { |entry| entry[:index] }
|
indexed = bulk.size
|
||||||
deleted = bulk.count { |entry| entry[:delete] }
|
deleted = 0
|
||||||
|
|
||||||
Chewy::Index::Import::BulkRequest.new(index).perform(bulk)
|
Chewy::Index::Import::BulkRequest.new(index).perform(bulk)
|
||||||
|
|
||||||
|
|
|
@ -13,32 +13,25 @@ class Importer::StatusesIndexImporter < Importer::BaseImporter
|
||||||
|
|
||||||
scope.find_in_batches(batch_size: @batch_size) do |tmp|
|
scope.find_in_batches(batch_size: @batch_size) do |tmp|
|
||||||
in_work_unit(tmp.map(&:status_id)) do |status_ids|
|
in_work_unit(tmp.map(&:status_id)) do |status_ids|
|
||||||
bulk = ActiveRecord::Base.connection_pool.with_connection do
|
|
||||||
Chewy::Index::Import::BulkBuilder.new(index, to_index: index.adapter.default_scope.where(id: status_ids)).bulk_body
|
|
||||||
end
|
|
||||||
|
|
||||||
indexed = 0
|
|
||||||
deleted = 0
|
deleted = 0
|
||||||
|
|
||||||
# We can't use the delete_if proc to do the filtering because delete_if
|
bulk = ActiveRecord::Base.connection_pool.with_connection do
|
||||||
# is called before rendering the data and we need to filter based
|
to_index = index.adapter.default_scope.where(id: status_ids)
|
||||||
# on the results of the filter, so this filtering happens here instead
|
crutches = Chewy::Index::Crutch::Crutches.new index, to_index
|
||||||
bulk.map! do |entry|
|
to_index.map do |object|
|
||||||
new_entry = if entry[:index] && entry.dig(:index, :data, 'searchable_by').blank?
|
# This is unlikely to happen, but the post may have been
|
||||||
{ delete: entry[:index].except(:data) }
|
# un-interacted with since it was queued for indexing
|
||||||
else
|
if object.searchable_by.empty?
|
||||||
entry
|
deleted += 1
|
||||||
end
|
{ delete: { _id: object.id } }
|
||||||
|
else
|
||||||
if new_entry[:index]
|
{ index: { _id: object.id, data: index.compose(object, crutches, fields: []) } }
|
||||||
indexed += 1
|
end
|
||||||
else
|
|
||||||
deleted += 1
|
|
||||||
end
|
end
|
||||||
|
|
||||||
new_entry
|
|
||||||
end
|
end
|
||||||
|
|
||||||
|
indexed = bulk.size - deleted
|
||||||
|
|
||||||
Chewy::Index::Import::BulkRequest.new(index).perform(bulk)
|
Chewy::Index::Import::BulkRequest.new(index).perform(bulk)
|
||||||
|
|
||||||
[indexed, deleted]
|
[indexed, deleted]
|
||||||
|
|
|
@ -4,10 +4,10 @@ class Importer::TagsIndexImporter < Importer::BaseImporter
|
||||||
def import!
|
def import!
|
||||||
index.adapter.default_scope.find_in_batches(batch_size: @batch_size) do |tmp|
|
index.adapter.default_scope.find_in_batches(batch_size: @batch_size) do |tmp|
|
||||||
in_work_unit(tmp) do |tags|
|
in_work_unit(tmp) do |tags|
|
||||||
bulk = Chewy::Index::Import::BulkBuilder.new(index, to_index: tags).bulk_body
|
bulk = build_bulk_body(tags)
|
||||||
|
|
||||||
indexed = bulk.count { |entry| entry[:index] }
|
indexed = bulk.size
|
||||||
deleted = bulk.count { |entry| entry[:delete] }
|
deleted = 0
|
||||||
|
|
||||||
Chewy::Index::Import::BulkRequest.new(index).perform(bulk)
|
Chewy::Index::Import::BulkRequest.new(index).perform(bulk)
|
||||||
|
|
||||||
|
|
|
@ -6,10 +6,10 @@ class SearchQueryParser < Parslet::Parser
|
||||||
rule(:colon) { str(':') }
|
rule(:colon) { str(':') }
|
||||||
rule(:space) { match('\s').repeat(1) }
|
rule(:space) { match('\s').repeat(1) }
|
||||||
rule(:operator) { (str('+') | str('-')).as(:operator) }
|
rule(:operator) { (str('+') | str('-')).as(:operator) }
|
||||||
rule(:prefix) { (term >> colon).as(:prefix) }
|
rule(:prefix) { term >> colon }
|
||||||
rule(:shortcode) { (colon >> term >> colon.maybe).as(:shortcode) }
|
rule(:shortcode) { (colon >> term >> colon.maybe).as(:shortcode) }
|
||||||
rule(:phrase) { (quote >> (term >> space.maybe).repeat >> quote).as(:phrase) }
|
rule(:phrase) { (quote >> (term >> space.maybe).repeat >> quote).as(:phrase) }
|
||||||
rule(:clause) { (operator.maybe >> prefix.maybe >> (phrase | term | shortcode)).as(:clause) }
|
rule(:clause) { (operator.maybe >> prefix.maybe.as(:prefix) >> (phrase | term | shortcode)).as(:clause) | prefix.as(:clause) | quote.as(:junk) }
|
||||||
rule(:query) { (clause >> space.maybe).repeat.as(:query) }
|
rule(:query) { (clause >> space.maybe).repeat.as(:query) }
|
||||||
root(:query)
|
root(:query)
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,50 +1,32 @@
|
||||||
# frozen_string_literal: true
|
# frozen_string_literal: true
|
||||||
|
|
||||||
class SearchQueryTransformer < Parslet::Transform
|
class SearchQueryTransformer < Parslet::Transform
|
||||||
|
SUPPORTED_PREFIXES = %w(
|
||||||
|
has
|
||||||
|
is
|
||||||
|
language
|
||||||
|
from
|
||||||
|
before
|
||||||
|
after
|
||||||
|
during
|
||||||
|
).freeze
|
||||||
|
|
||||||
class Query
|
class Query
|
||||||
attr_reader :should_clauses, :must_not_clauses, :must_clauses, :filter_clauses
|
attr_reader :must_not_clauses, :must_clauses, :filter_clauses
|
||||||
|
|
||||||
def initialize(clauses)
|
def initialize(clauses)
|
||||||
grouped = clauses.chunk(&:operator).to_h
|
grouped = clauses.compact.chunk(&:operator).to_h
|
||||||
@should_clauses = grouped.fetch(:should, [])
|
|
||||||
@must_not_clauses = grouped.fetch(:must_not, [])
|
@must_not_clauses = grouped.fetch(:must_not, [])
|
||||||
@must_clauses = grouped.fetch(:must, [])
|
@must_clauses = grouped.fetch(:must, [])
|
||||||
@filter_clauses = grouped.fetch(:filter, [])
|
@filter_clauses = grouped.fetch(:filter, [])
|
||||||
end
|
end
|
||||||
|
|
||||||
def apply(search)
|
def apply(search)
|
||||||
should_clauses.each { |clause| search = search.query.should(clause_to_query(clause)) }
|
must_clauses.each { |clause| search = search.query.must(clause.to_query) }
|
||||||
must_clauses.each { |clause| search = search.query.must(clause_to_query(clause)) }
|
must_not_clauses.each { |clause| search = search.query.must_not(clause.to_query) }
|
||||||
must_not_clauses.each { |clause| search = search.query.must_not(clause_to_query(clause)) }
|
filter_clauses.each { |clause| search = search.filter(**clause.to_query) }
|
||||||
filter_clauses.each { |clause| search = search.filter(**clause_to_filter(clause)) }
|
|
||||||
search.query.minimum_should_match(1)
|
search.query.minimum_should_match(1)
|
||||||
end
|
end
|
||||||
|
|
||||||
private
|
|
||||||
|
|
||||||
def clause_to_query(clause)
|
|
||||||
case clause
|
|
||||||
when TermClause
|
|
||||||
{ multi_match: { type: 'most_fields', query: clause.term, fields: ['text', 'text.stemmed'] } }
|
|
||||||
when PhraseClause
|
|
||||||
{ match_phrase: { text: { query: clause.phrase } } }
|
|
||||||
else
|
|
||||||
raise "Unexpected clause type: #{clause}"
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def clause_to_filter(clause)
|
|
||||||
case clause
|
|
||||||
when PrefixClause
|
|
||||||
if clause.negated?
|
|
||||||
{ bool: { must_not: { clause.type => { clause.filter => clause.term } } } }
|
|
||||||
else
|
|
||||||
{ clause.type => { clause.filter => clause.term } }
|
|
||||||
end
|
|
||||||
else
|
|
||||||
raise "Unexpected clause type: #{clause}"
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
class Operator
|
class Operator
|
||||||
|
@ -63,31 +45,38 @@ class SearchQueryTransformer < Parslet::Transform
|
||||||
end
|
end
|
||||||
|
|
||||||
class TermClause
|
class TermClause
|
||||||
attr_reader :prefix, :operator, :term
|
attr_reader :operator, :term
|
||||||
|
|
||||||
def initialize(prefix, operator, term)
|
def initialize(operator, term)
|
||||||
@prefix = prefix
|
|
||||||
@operator = Operator.symbol(operator)
|
@operator = Operator.symbol(operator)
|
||||||
@term = term
|
@term = term
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def to_query
|
||||||
|
{ multi_match: { type: 'most_fields', query: @term, fields: ['text', 'text.stemmed'], operator: 'and' } }
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
class PhraseClause
|
class PhraseClause
|
||||||
attr_reader :prefix, :operator, :phrase
|
attr_reader :operator, :phrase
|
||||||
|
|
||||||
def initialize(prefix, operator, phrase)
|
def initialize(operator, phrase)
|
||||||
@prefix = prefix
|
|
||||||
@operator = Operator.symbol(operator)
|
@operator = Operator.symbol(operator)
|
||||||
@phrase = phrase
|
@phrase = phrase
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def to_query
|
||||||
|
{ match_phrase: { text: { query: @phrase } } }
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
class PrefixClause
|
class PrefixClause
|
||||||
attr_reader :type, :filter, :operator, :term
|
attr_reader :operator, :prefix, :term
|
||||||
|
|
||||||
def initialize(prefix, operator, term, options = {})
|
def initialize(prefix, operator, term, options = {})
|
||||||
@negated = operator == '-'
|
@prefix = prefix
|
||||||
@options = options
|
@negated = operator == '-'
|
||||||
|
@options = options
|
||||||
@operator = :filter
|
@operator = :filter
|
||||||
|
|
||||||
case prefix
|
case prefix
|
||||||
|
@ -116,12 +105,16 @@ class SearchQueryTransformer < Parslet::Transform
|
||||||
@type = :range
|
@type = :range
|
||||||
@term = { gte: term, lte: term, time_zone: @options[:current_account]&.user_time_zone || 'UTC' }
|
@term = { gte: term, lte: term, time_zone: @options[:current_account]&.user_time_zone || 'UTC' }
|
||||||
else
|
else
|
||||||
raise Mastodon::SyntaxError
|
raise "Unknown prefix: #{prefix}"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def negated?
|
def to_query
|
||||||
@negated
|
if @negated
|
||||||
|
{ bool: { must_not: { @type => { @filter => @term } } } }
|
||||||
|
else
|
||||||
|
{ @type => { @filter => @term } }
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
private
|
private
|
||||||
|
@ -159,18 +152,26 @@ class SearchQueryTransformer < Parslet::Transform
|
||||||
prefix = clause[:prefix][:term].to_s if clause[:prefix]
|
prefix = clause[:prefix][:term].to_s if clause[:prefix]
|
||||||
operator = clause[:operator]&.to_s
|
operator = clause[:operator]&.to_s
|
||||||
|
|
||||||
if clause[:prefix]
|
if clause[:prefix] && SUPPORTED_PREFIXES.include?(prefix)
|
||||||
PrefixClause.new(prefix, operator, clause[:term].to_s, current_account: current_account)
|
PrefixClause.new(prefix, operator, clause[:term].to_s, current_account: current_account)
|
||||||
|
elsif clause[:prefix]
|
||||||
|
TermClause.new(operator, "#{prefix} #{clause[:term]}")
|
||||||
elsif clause[:term]
|
elsif clause[:term]
|
||||||
TermClause.new(prefix, operator, clause[:term].to_s)
|
TermClause.new(operator, clause[:term].to_s)
|
||||||
elsif clause[:shortcode]
|
elsif clause[:shortcode]
|
||||||
TermClause.new(prefix, operator, ":#{clause[:term]}:")
|
TermClause.new(operator, ":#{clause[:term]}:")
|
||||||
elsif clause[:phrase]
|
elsif clause[:phrase]
|
||||||
PhraseClause.new(prefix, operator, clause[:phrase].is_a?(Array) ? clause[:phrase].map { |p| p[:term].to_s }.join(' ') : clause[:phrase].to_s)
|
PhraseClause.new(operator, clause[:phrase].is_a?(Array) ? clause[:phrase].map { |p| p[:term].to_s }.join(' ') : clause[:phrase].to_s)
|
||||||
else
|
else
|
||||||
raise "Unexpected clause type: #{clause}"
|
raise "Unexpected clause type: #{clause}"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
rule(query: sequence(:clauses)) { Query.new(clauses) }
|
rule(junk: subtree(:junk)) do
|
||||||
|
nil
|
||||||
|
end
|
||||||
|
|
||||||
|
rule(query: sequence(:clauses)) do
|
||||||
|
Query.new(clauses)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -100,6 +100,8 @@ class MediaAttachment < ApplicationRecord
|
||||||
output: {
|
output: {
|
||||||
'loglevel' => 'fatal',
|
'loglevel' => 'fatal',
|
||||||
'preset' => 'veryfast',
|
'preset' => 'veryfast',
|
||||||
|
'movflags' => 'faststart', # Move metadata to start of file so playback can begin before download finishes
|
||||||
|
'pix_fmt' => 'yuv420p', # Ensure color space for cross-browser compatibility
|
||||||
'c:v' => 'h264',
|
'c:v' => 'h264',
|
||||||
'c:a' => 'aac',
|
'c:a' => 'aac',
|
||||||
'b:a' => '192k',
|
'b:a' => '192k',
|
||||||
|
|
|
@ -18,18 +18,31 @@ class WebfingerSerializer < ActiveModel::Serializer
|
||||||
end
|
end
|
||||||
|
|
||||||
def links
|
def links
|
||||||
if object.instance_actor?
|
[
|
||||||
[
|
{ rel: 'http://webfinger.net/rel/profile-page', type: 'text/html', href: profile_page_href },
|
||||||
{ rel: 'http://webfinger.net/rel/profile-page', type: 'text/html', href: about_more_url(instance_actor: true) },
|
{ rel: 'self', type: 'application/activity+json', href: self_href },
|
||||||
{ rel: 'self', type: 'application/activity+json', href: instance_actor_url },
|
{ rel: 'http://ostatus.org/schema/1.0/subscribe', template: "#{authorize_interaction_url}?uri={uri}" },
|
||||||
{ rel: 'http://ostatus.org/schema/1.0/subscribe', template: "#{authorize_interaction_url}?uri={uri}" },
|
].tap do |x|
|
||||||
]
|
x << { rel: 'http://webfinger.net/rel/avatar', type: object.avatar.content_type, href: full_asset_url(object.avatar_original_url) } if show_avatar?
|
||||||
else
|
|
||||||
[
|
|
||||||
{ rel: 'http://webfinger.net/rel/profile-page', type: 'text/html', href: short_account_url(object) },
|
|
||||||
{ rel: 'self', type: 'application/activity+json', href: account_url(object) },
|
|
||||||
{ rel: 'http://ostatus.org/schema/1.0/subscribe', template: "#{authorize_interaction_url}?uri={uri}" },
|
|
||||||
]
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def show_avatar?
|
||||||
|
media_present = object.avatar.present? && object.avatar.content_type.present?
|
||||||
|
|
||||||
|
# Show avatar only if an instance shows profiles to logged out users
|
||||||
|
allowed_by_config = ENV['DISALLOW_UNAUTHENTICATED_API_ACCESS'] != 'true' && !Rails.configuration.x.limited_federation_mode
|
||||||
|
|
||||||
|
media_present && allowed_by_config
|
||||||
|
end
|
||||||
|
|
||||||
|
def profile_page_href
|
||||||
|
object.instance_actor? ? about_more_url(instance_actor: true) : short_account_url(object)
|
||||||
|
end
|
||||||
|
|
||||||
|
def self_href
|
||||||
|
object.instance_actor? ? instance_actor_url : account_url(object)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,8 +1,10 @@
|
||||||
# frozen_string_literal: true
|
# frozen_string_literal: true
|
||||||
|
|
||||||
class SearchService < BaseService
|
class SearchService < BaseService
|
||||||
|
QUOTE_EQUIVALENT_CHARACTERS = /[“”„«»「」『』《》]/
|
||||||
|
|
||||||
def call(query, account, limit, options = {})
|
def call(query, account, limit, options = {})
|
||||||
@query = query&.strip
|
@query = query&.strip&.gsub(QUOTE_EQUIVALENT_CHARACTERS, '"')
|
||||||
@account = account
|
@account = account
|
||||||
@options = options
|
@options = options
|
||||||
@limit = limit.to_i
|
@limit = limit.to_i
|
||||||
|
|
|
@ -16,9 +16,7 @@ class Scheduler::IndexingScheduler
|
||||||
indexes.each do |type|
|
indexes.each do |type|
|
||||||
with_redis do |redis|
|
with_redis do |redis|
|
||||||
redis.sscan_each("chewy:queue:#{type.name}", count: SCAN_BATCH_SIZE).each_slice(IMPORT_BATCH_SIZE) do |ids|
|
redis.sscan_each("chewy:queue:#{type.name}", count: SCAN_BATCH_SIZE).each_slice(IMPORT_BATCH_SIZE) do |ids|
|
||||||
with_read_replica do
|
type.import!(ids)
|
||||||
type.import!(ids)
|
|
||||||
end
|
|
||||||
|
|
||||||
redis.srem("chewy:queue:#{type.name}", ids)
|
redis.srem("chewy:queue:#{type.name}", ids)
|
||||||
end
|
end
|
||||||
|
|
|
@ -15,10 +15,22 @@ class AddUniqueIndexOnPreviewCardsStatuses < ActiveRecord::Migration[6.1]
|
||||||
|
|
||||||
private
|
private
|
||||||
|
|
||||||
|
def supports_concurrent_reindex?
|
||||||
|
@supports_concurrent_reindex ||= begin
|
||||||
|
version = select_one("SELECT current_setting('server_version_num') AS v")['v'].to_i
|
||||||
|
version >= 12_000
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def deduplicate_and_reindex!
|
def deduplicate_and_reindex!
|
||||||
deduplicate_preview_cards!
|
deduplicate_preview_cards!
|
||||||
|
|
||||||
safety_assured { execute 'REINDEX INDEX CONCURRENTLY preview_cards_statuses_pkey' }
|
if supports_concurrent_reindex?
|
||||||
|
safety_assured { execute 'REINDEX INDEX CONCURRENTLY preview_cards_statuses_pkey' }
|
||||||
|
else
|
||||||
|
remove_index :preview_cards_statuses, name: :preview_cards_statuses_pkey
|
||||||
|
add_index :preview_cards_statuses, [:status_id, :preview_card_id], name: :preview_cards_statuses_pkey, algorithm: :concurrently, unique: true
|
||||||
|
end
|
||||||
rescue ActiveRecord::RecordNotUnique
|
rescue ActiveRecord::RecordNotUnique
|
||||||
retry
|
retry
|
||||||
end
|
end
|
||||||
|
|
|
@ -5,36 +5,66 @@ require 'rails_helper'
|
||||||
describe Api::V1::Timelines::TagController do
|
describe Api::V1::Timelines::TagController do
|
||||||
render_views
|
render_views
|
||||||
|
|
||||||
let(:user) { Fabricate(:user) }
|
let(:user) { Fabricate(:user) }
|
||||||
|
let(:token) { Fabricate(:accessible_access_token, resource_owner_id: user.id, scopes: 'read:statuses') }
|
||||||
|
|
||||||
before do
|
before do
|
||||||
allow(controller).to receive(:doorkeeper_token) { token }
|
allow(controller).to receive(:doorkeeper_token) { token }
|
||||||
end
|
end
|
||||||
|
|
||||||
context 'with a user context' do
|
describe 'GET #show' do
|
||||||
let(:token) { Fabricate(:accessible_access_token, resource_owner_id: user.id) }
|
subject do
|
||||||
|
get :show, params: { id: 'test' }
|
||||||
|
end
|
||||||
|
|
||||||
describe 'GET #show' do
|
before do
|
||||||
before do
|
PostStatusService.new.call(user.account, text: 'It is a #test')
|
||||||
PostStatusService.new.call(user.account, text: 'It is a #test')
|
end
|
||||||
|
|
||||||
|
context 'when the instance allows public preview' do
|
||||||
|
context 'when the user is not authenticated' do
|
||||||
|
let(:token) { nil }
|
||||||
|
|
||||||
|
it 'returns http success', :aggregate_failures do
|
||||||
|
subject
|
||||||
|
|
||||||
|
expect(response).to have_http_status(200)
|
||||||
|
expect(response.headers['Link'].links.size).to eq(2)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'returns http success' do
|
context 'when the user is authenticated' do
|
||||||
get :show, params: { id: 'test' }
|
it 'returns http success', :aggregate_failures do
|
||||||
expect(response).to have_http_status(200)
|
subject
|
||||||
expect(response.headers['Link'].links.size).to eq(2)
|
|
||||||
|
expect(response).to have_http_status(200)
|
||||||
|
expect(response.headers['Link'].links.size).to eq(2)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
|
||||||
|
|
||||||
context 'without a user context' do
|
context 'when the instance does not allow public preview' do
|
||||||
let(:token) { Fabricate(:accessible_access_token, resource_owner_id: nil) }
|
before do
|
||||||
|
Form::AdminSettings.new(timeline_preview: false).save
|
||||||
|
end
|
||||||
|
|
||||||
describe 'GET #show' do
|
context 'when the user is not authenticated' do
|
||||||
it 'returns http success' do
|
let(:token) { nil }
|
||||||
get :show, params: { id: 'test' }
|
|
||||||
expect(response).to have_http_status(200)
|
it 'returns http unauthorized' do
|
||||||
expect(response.headers['Link']).to be_nil
|
subject
|
||||||
|
|
||||||
|
expect(response).to have_http_status(401)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'when the user is authenticated' do
|
||||||
|
it 'returns http success', :aggregate_failures do
|
||||||
|
subject
|
||||||
|
|
||||||
|
expect(response).to have_http_status(200)
|
||||||
|
expect(response.headers['Link'].links.size).to eq(2)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -3,6 +3,8 @@
|
||||||
require 'rails_helper'
|
require 'rails_helper'
|
||||||
|
|
||||||
describe WellKnown::WebfingerController do
|
describe WellKnown::WebfingerController do
|
||||||
|
include RoutingHelper
|
||||||
|
|
||||||
render_views
|
render_views
|
||||||
|
|
||||||
describe 'GET #show' do
|
describe 'GET #show' do
|
||||||
|
@ -167,5 +169,67 @@ describe WellKnown::WebfingerController do
|
||||||
expect(response).to have_http_status(400)
|
expect(response).to have_http_status(400)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
context 'when an account has an avatar' do
|
||||||
|
let(:alice) { Fabricate(:account, username: 'alice', avatar: attachment_fixture('attachment.jpg')) }
|
||||||
|
let(:resource) { alice.to_webfinger_s }
|
||||||
|
|
||||||
|
it 'returns avatar in response' do
|
||||||
|
perform_show!
|
||||||
|
|
||||||
|
avatar_link = get_avatar_link(body_as_json)
|
||||||
|
expect(avatar_link).to_not be_nil
|
||||||
|
expect(avatar_link[:type]).to eq alice.avatar.content_type
|
||||||
|
expect(avatar_link[:href]).to eq full_asset_url(alice.avatar)
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'with limited federation mode' do
|
||||||
|
before do
|
||||||
|
allow(Rails.configuration.x).to receive(:limited_federation_mode).and_return(true)
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'does not return avatar in response' do
|
||||||
|
perform_show!
|
||||||
|
|
||||||
|
avatar_link = get_avatar_link(body_as_json)
|
||||||
|
expect(avatar_link).to be_nil
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'when enabling DISALLOW_UNAUTHENTICATED_API_ACCESS' do
|
||||||
|
around do |example|
|
||||||
|
ClimateControl.modify DISALLOW_UNAUTHENTICATED_API_ACCESS: 'true' do
|
||||||
|
example.run
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'does not return avatar in response' do
|
||||||
|
perform_show!
|
||||||
|
|
||||||
|
avatar_link = get_avatar_link(body_as_json)
|
||||||
|
expect(avatar_link).to be_nil
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'when an account does not have an avatar' do
|
||||||
|
let(:alice) { Fabricate(:account, username: 'alice', avatar: nil) }
|
||||||
|
let(:resource) { alice.to_webfinger_s }
|
||||||
|
|
||||||
|
before do
|
||||||
|
perform_show!
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'does not return avatar in response' do
|
||||||
|
avatar_link = get_avatar_link(body_as_json)
|
||||||
|
expect(avatar_link).to be_nil
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def get_avatar_link(json)
|
||||||
|
json[:links].find { |link| link[:rel] == 'http://webfinger.net/rel/avatar' }
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -0,0 +1,98 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require 'rails_helper'
|
||||||
|
require 'parslet/rig/rspec'
|
||||||
|
|
||||||
|
describe SearchQueryParser do
|
||||||
|
let(:parser) { described_class.new }
|
||||||
|
|
||||||
|
context 'with term' do
|
||||||
|
it 'consumes "hello"' do
|
||||||
|
expect(parser.term).to parse('hello')
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'with prefix' do
|
||||||
|
it 'consumes "foo:"' do
|
||||||
|
expect(parser.prefix).to parse('foo:')
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'with operator' do
|
||||||
|
it 'consumes "+"' do
|
||||||
|
expect(parser.operator).to parse('+')
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'consumes "-"' do
|
||||||
|
expect(parser.operator).to parse('-')
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'with shortcode' do
|
||||||
|
it 'consumes ":foo:"' do
|
||||||
|
expect(parser.shortcode).to parse(':foo:')
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'with phrase' do
|
||||||
|
it 'consumes "hello world"' do
|
||||||
|
expect(parser.phrase).to parse('"hello world"')
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'with clause' do
|
||||||
|
it 'consumes "foo"' do
|
||||||
|
expect(parser.clause).to parse('foo')
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'consumes "-foo"' do
|
||||||
|
expect(parser.clause).to parse('-foo')
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'consumes "foo:bar"' do
|
||||||
|
expect(parser.clause).to parse('foo:bar')
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'consumes "-foo:bar"' do
|
||||||
|
expect(parser.clause).to parse('-foo:bar')
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'consumes \'foo:"hello world"\'' do
|
||||||
|
expect(parser.clause).to parse('foo:"hello world"')
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'consumes \'-foo:"hello world"\'' do
|
||||||
|
expect(parser.clause).to parse('-foo:"hello world"')
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'consumes "foo:"' do
|
||||||
|
expect(parser.clause).to parse('foo:')
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'consumes \'"\'' do
|
||||||
|
expect(parser.clause).to parse('"')
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'with query' do
|
||||||
|
it 'consumes "hello -world"' do
|
||||||
|
expect(parser.query).to parse('hello -world')
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'consumes \'foo "hello world"\'' do
|
||||||
|
expect(parser.query).to parse('foo "hello world"')
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'consumes "foo:bar hello"' do
|
||||||
|
expect(parser.query).to parse('foo:bar hello')
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'consumes \'"hello" world "\'' do
|
||||||
|
expect(parser.query).to parse('"hello" world "')
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'consumes "foo:bar bar: hello"' do
|
||||||
|
expect(parser.query).to parse('foo:bar bar: hello')
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -3,16 +3,57 @@
|
||||||
require 'rails_helper'
|
require 'rails_helper'
|
||||||
|
|
||||||
describe SearchQueryTransformer do
|
describe SearchQueryTransformer do
|
||||||
describe 'initialization' do
|
subject { described_class.new.apply(parser, current_account: nil) }
|
||||||
let(:parser) { SearchQueryParser.new.parse('query') }
|
|
||||||
|
|
||||||
it 'sets attributes' do
|
let(:parser) { SearchQueryParser.new.parse(query) }
|
||||||
transformer = described_class.new.apply(parser)
|
|
||||||
|
|
||||||
expect(transformer.should_clauses.first).to be_nil
|
context 'with "hello world"' do
|
||||||
expect(transformer.must_clauses.first).to be_a(SearchQueryTransformer::TermClause)
|
let(:query) { 'hello world' }
|
||||||
expect(transformer.must_not_clauses.first).to be_nil
|
|
||||||
expect(transformer.filter_clauses.first).to be_nil
|
it 'transforms clauses' do
|
||||||
|
expect(subject.must_clauses.map(&:term)).to match_array %w(hello world)
|
||||||
|
expect(subject.must_not_clauses).to be_empty
|
||||||
|
expect(subject.filter_clauses).to be_empty
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'with "hello -world"' do
|
||||||
|
let(:query) { 'hello -world' }
|
||||||
|
|
||||||
|
it 'transforms clauses' do
|
||||||
|
expect(subject.must_clauses.map(&:term)).to match_array %w(hello)
|
||||||
|
expect(subject.must_not_clauses.map(&:term)).to match_array %w(world)
|
||||||
|
expect(subject.filter_clauses).to be_empty
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'with "hello is:reply"' do
|
||||||
|
let(:query) { 'hello is:reply' }
|
||||||
|
|
||||||
|
it 'transforms clauses' do
|
||||||
|
expect(subject.must_clauses.map(&:term)).to match_array %w(hello)
|
||||||
|
expect(subject.must_not_clauses).to be_empty
|
||||||
|
expect(subject.filter_clauses.map(&:term)).to match_array %w(reply)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'with "foo: bar"' do
|
||||||
|
let(:query) { 'foo: bar' }
|
||||||
|
|
||||||
|
it 'transforms clauses' do
|
||||||
|
expect(subject.must_clauses.map(&:term)).to match_array %w(foo bar)
|
||||||
|
expect(subject.must_not_clauses).to be_empty
|
||||||
|
expect(subject.filter_clauses).to be_empty
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'with "foo:bar"' do
|
||||||
|
let(:query) { 'foo:bar' }
|
||||||
|
|
||||||
|
it 'transforms clauses' do
|
||||||
|
expect(subject.must_clauses.map(&:term)).to contain_exactly('foo bar')
|
||||||
|
expect(subject.must_not_clauses).to be_empty
|
||||||
|
expect(subject.filter_clauses).to be_empty
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
Loading…
Reference in New Issue