Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Searchindex fix when objects have the same name #9649

Merged
merged 4 commits into from Sep 26, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
5 changes: 5 additions & 0 deletions CHANGES
Expand Up @@ -7,6 +7,9 @@ Dependencies
Incompatible changes
--------------------

* #9649: ``searchindex.js``: the embedded data has changed format to allow
objects with the same name in different domains.

Deprecated
----------

Expand All @@ -24,6 +27,8 @@ Bugs fixed
* #9630: autosummary: Failed to build summary table if :confval:`primary_domain`
is not 'py'
* #9670: html: Fix download file with special characters
* #9649: HTML search: when objects have the same name but in different domains,
return all of them as result instead of just one.

Testing
--------
Expand Down
8 changes: 4 additions & 4 deletions sphinx/search/__init__.py
Expand Up @@ -304,8 +304,8 @@ def dump(self, stream: IO, format: Any) -> None:
format.dump(self.freeze(), stream)

def get_objects(self, fn2index: Dict[str, int]
) -> Dict[str, Dict[str, Tuple[int, int, int, str]]]:
rv: Dict[str, Dict[str, Tuple[int, int, int, str]]] = {}
) -> Dict[str, List[Tuple[int, int, int, str, str]]]:
rv: Dict[str, List[Tuple[int, int, int, str, str]]] = {}
otypes = self._objtypes
onames = self._objnames
for domainname, domain in sorted(self.env.domains.items()):
Expand All @@ -318,7 +318,7 @@ def get_objects(self, fn2index: Dict[str, int]
fullname = html.escape(fullname)
dispname = html.escape(dispname)
prefix, _, name = dispname.rpartition('.')
pdict = rv.setdefault(prefix, {})
plist = rv.setdefault(prefix, [])
try:
typeindex = otypes[domainname, type]
except KeyError:
Expand All @@ -337,7 +337,7 @@ def get_objects(self, fn2index: Dict[str, int]
shortanchor = '-'
else:
shortanchor = anchor
pdict[name] = (fn2index[docname], typeindex, prio, shortanchor)
plist.append((fn2index[docname], typeindex, prio, shortanchor, name))
return rv

def get_terms(self, fn2index: Dict) -> Tuple[Dict[str, List[str]], Dict[str, List[str]]]:
Expand Down
5 changes: 3 additions & 2 deletions sphinx/themes/basic/static/searchtools.js
Expand Up @@ -328,7 +328,9 @@ var Search = {
var results = [];

for (var prefix in objects) {
for (var name in objects[prefix]) {
for (var iMatch = 0; iMatch != objects[prefix].length; ++iMatch) {
var match = objects[prefix][iMatch];
var name = match[4];
var fullname = (prefix ? prefix + '.' : '') + name;
var fullnameLower = fullname.toLowerCase()
if (fullnameLower.indexOf(object) > -1) {
Expand All @@ -342,7 +344,6 @@ var Search = {
} else if (parts[parts.length - 1].indexOf(object) > -1) {
score += Scorer.objPartialMatch;
}
var match = objects[prefix][name];
var objname = objnames[match[1]][2];
var title = titles[match[0]];
// If more than one term searched for, we require other words to be
Expand Down
125 changes: 69 additions & 56 deletions tests/test_search.py
Expand Up @@ -66,7 +66,11 @@ def is_registered_term(index, keyword):
def test_objects_are_escaped(app, status, warning):
app.builder.build_all()
index = jsload(app.outdir / 'searchindex.js')
assert 'n::Array&lt;T, d&gt;' in index.get('objects').get('') # n::Array<T,d> is escaped
for item in index.get('objects').get(''):
if item[-1] == 'n::Array&lt;T, d&gt;': # n::Array<T,d> is escaped
break
else:
assert False, index.get('objects').get('')


@pytest.mark.sphinx(testroot='search')
Expand Down Expand Up @@ -129,50 +133,58 @@ def test_term_in_raw_directive(app, status, warning):


def test_IndexBuilder():
domain = DummyDomain([('objname', 'objdispname', 'objtype', 'docname', '#anchor', 1),
('objname2', 'objdispname2', 'objtype2', 'docname2', '', -1)])
env = DummyEnvironment('1.0', {'dummy': domain})
domain1 = DummyDomain([('objname1', 'objdispname1', 'objtype1', 'docname1_1', '#anchor', 1),
('objname2', 'objdispname2', 'objtype2', 'docname1_2', '', -1)])
domain2 = DummyDomain([('objname1', 'objdispname1', 'objtype1', 'docname2_1', '#anchor', 1),
('objname2', 'objdispname2', 'objtype2', 'docname2_2', '', -1)])
env = DummyEnvironment('1.0', {'dummy1': domain1, 'dummy2': domain2})
doc = utils.new_document(b'test data', settings)
doc['file'] = 'dummy'
parser.parse(FILE_CONTENTS, doc)

# feed
index = IndexBuilder(env, 'en', {}, None)
index.feed('docname', 'filename', 'title', doc)
index.feed('docname2', 'filename2', 'title2', doc)
assert index._titles == {'docname': 'title', 'docname2': 'title2'}
assert index._filenames == {'docname': 'filename', 'docname2': 'filename2'}
index.feed('docname1_1', 'filename1_1', 'title1_1', doc)
index.feed('docname1_2', 'filename1_2', 'title1_2', doc)
index.feed('docname2_1', 'filename2_1', 'title2_1', doc)
index.feed('docname2_2', 'filename2_2', 'title2_2', doc)
assert index._titles == {'docname1_1': 'title1_1', 'docname1_2': 'title1_2',
'docname2_1': 'title2_1', 'docname2_2': 'title2_2'}
assert index._filenames == {'docname1_1': 'filename1_1', 'docname1_2': 'filename1_2',
'docname2_1': 'filename2_1', 'docname2_2': 'filename2_2'}
assert index._mapping == {
'ar': {'docname', 'docname2'},
'fermion': {'docname', 'docname2'},
'comment': {'docname', 'docname2'},
'non': {'docname', 'docname2'},
'index': {'docname', 'docname2'},
'test': {'docname', 'docname2'}
'ar': {'docname1_1', 'docname1_2', 'docname2_1', 'docname2_2'},
'fermion': {'docname1_1', 'docname1_2', 'docname2_1', 'docname2_2'},
'comment': {'docname1_1', 'docname1_2', 'docname2_1', 'docname2_2'},
'non': {'docname1_1', 'docname1_2', 'docname2_1', 'docname2_2'},
'index': {'docname1_1', 'docname1_2', 'docname2_1', 'docname2_2'},
'test': {'docname1_1', 'docname1_2', 'docname2_1', 'docname2_2'}
}
assert index._title_mapping == {'section_titl': {'docname', 'docname2'}}
assert index._title_mapping == {'section_titl': {'docname1_1', 'docname1_2', 'docname2_1', 'docname2_2'}}
assert index._objtypes == {}
assert index._objnames == {}

# freeze
assert index.freeze() == {
'docnames': ('docname', 'docname2'),
'docnames': ('docname1_1', 'docname1_2', 'docname2_1', 'docname2_2'),
'envversion': '1.0',
'filenames': ['filename', 'filename2'],
'objects': {'': {'objdispname': (0, 0, 1, '#anchor')}},
'objnames': {0: ('dummy', 'objtype', 'objtype')},
'objtypes': {0: 'dummy:objtype'},
'terms': {'ar': [0, 1],
'comment': [0, 1],
'fermion': [0, 1],
'index': [0, 1],
'non': [0, 1],
'test': [0, 1]},
'titles': ('title', 'title2'),
'titleterms': {'section_titl': [0, 1]}
'filenames': ['filename1_1', 'filename1_2', 'filename2_1', 'filename2_2'],
'objects': {'': [(0, 0, 1, '#anchor', 'objdispname1'),
(2, 1, 1, '#anchor', 'objdispname1')]},
'objnames': {0: ('dummy1', 'objtype1', 'objtype1'), 1: ('dummy2', 'objtype1', 'objtype1')},
'objtypes': {0: 'dummy1:objtype1', 1: 'dummy2:objtype1'},
'terms': {'ar': [0, 1, 2, 3],
'comment': [0, 1, 2, 3],
'fermion': [0, 1, 2, 3],
'index': [0, 1, 2, 3],
'non': [0, 1, 2, 3],
'test': [0, 1, 2, 3]},
'titles': ('title1_1', 'title1_2', 'title2_1', 'title2_2'),
'titleterms': {'section_titl': [0, 1, 2, 3]}
}
assert index._objtypes == {('dummy', 'objtype'): 0}
assert index._objnames == {0: ('dummy', 'objtype', 'objtype')}
assert index._objtypes == {('dummy1', 'objtype1'): 0, ('dummy2', 'objtype1'): 1}
assert index._objnames == {0: ('dummy1', 'objtype1', 'objtype1'),
1: ('dummy2', 'objtype1', 'objtype1')}

# dump / load
stream = BytesIO()
Expand All @@ -195,40 +207,41 @@ def test_IndexBuilder():
assert index2._objnames == index._objnames

# prune
index.prune(['docname2'])
assert index._titles == {'docname2': 'title2'}
assert index._filenames == {'docname2': 'filename2'}
index.prune(['docname1_2', 'docname2_2'])
assert index._titles == {'docname1_2': 'title1_2', 'docname2_2': 'title2_2'}
assert index._filenames == {'docname1_2': 'filename1_2', 'docname2_2': 'filename2_2'}
assert index._mapping == {
'ar': {'docname2'},
'fermion': {'docname2'},
'comment': {'docname2'},
'non': {'docname2'},
'index': {'docname2'},
'test': {'docname2'}
'ar': {'docname1_2', 'docname2_2'},
'fermion': {'docname1_2', 'docname2_2'},
'comment': {'docname1_2', 'docname2_2'},
'non': {'docname1_2', 'docname2_2'},
'index': {'docname1_2', 'docname2_2'},
'test': {'docname1_2', 'docname2_2'}
}
assert index._title_mapping == {'section_titl': {'docname2'}}
assert index._objtypes == {('dummy', 'objtype'): 0}
assert index._objnames == {0: ('dummy', 'objtype', 'objtype')}
assert index._title_mapping == {'section_titl': {'docname1_2', 'docname2_2'}}
assert index._objtypes == {('dummy1', 'objtype1'): 0, ('dummy2', 'objtype1'): 1}
assert index._objnames == {0: ('dummy1', 'objtype1', 'objtype1'), 1: ('dummy2', 'objtype1', 'objtype1')}

# freeze after prune
assert index.freeze() == {
'docnames': ('docname2',),
'docnames': ('docname1_2', 'docname2_2'),
'envversion': '1.0',
'filenames': ['filename2'],
'filenames': ['filename1_2', 'filename2_2'],
'objects': {},
'objnames': {0: ('dummy', 'objtype', 'objtype')},
'objtypes': {0: 'dummy:objtype'},
'terms': {'ar': 0,
'comment': 0,
'fermion': 0,
'index': 0,
'non': 0,
'test': 0},
'titles': ('title2',),
'titleterms': {'section_titl': 0}
'objnames': {0: ('dummy1', 'objtype1', 'objtype1'), 1: ('dummy2', 'objtype1', 'objtype1')},
'objtypes': {0: 'dummy1:objtype1', 1: 'dummy2:objtype1'},
'terms': {'ar': [0, 1],
'comment': [0, 1],
'fermion': [0, 1],
'index': [0, 1],
'non': [0, 1],
'test': [0, 1]},
'titles': ('title1_2', 'title2_2'),
'titleterms': {'section_titl': [0, 1]}
}
assert index._objtypes == {('dummy', 'objtype'): 0}
assert index._objnames == {0: ('dummy', 'objtype', 'objtype')}
assert index._objtypes == {('dummy1', 'objtype1'): 0, ('dummy2', 'objtype1'): 1}
assert index._objnames == {0: ('dummy1', 'objtype1', 'objtype1'),
1: ('dummy2', 'objtype1', 'objtype1')}


def test_IndexBuilder_lookup():
Expand Down