fix side-effects of merge, and deprecation warnings

This commit is contained in:
Fabien C. Y. Benureau 2020-05-10 09:53:04 +09:00
parent def25609a1
commit c9aa8ddd41
No known key found for this signature in database
GPG Key ID: C3FB5E831A249A9A
6 changed files with 18 additions and 18 deletions

View File

@ -8,7 +8,7 @@
### Implemented enhancements ### Implemented enhancements
- Added support for non-standard bibtex types, e.g. @collection, @software, etc. ([#226](https://github.com/pubs/pubs/pull/226) - Added support for non-standard bibtex types, e.g. @collection, @software, etc. ([#226](https://github.com/pubs/pubs/pull/226)
- The number of displayed authors in listings is now configurable, as the `n_authors` value in the `main` section of the configuration. ([#225](https://github.com/pubs/pubs/pull/225) - The number of displayed authors in listings is now configurable, as the `max_authors` value in the `main` section of the configuration. ([#225](https://github.com/pubs/pubs/pull/225)
### Fixed bugs ### Fixed bugs

View File

@ -144,7 +144,7 @@ def setup(conf, force_colors=False):
# undye # undye
undye_re = re.compile('\x1b\[[;\d]*[A-Za-z]') undye_re = re.compile('\x1b\\[[;\d]*[A-Za-z]')
def undye(s): def undye(s):
"""Purge string s of color""" """Purge string s of color"""

View File

@ -30,7 +30,7 @@ def command(conf, args):
if force is None: if force is None:
to_remove_str = '\n'.join(pretty.paper_oneliner(rp.pull_paper(key), to_remove_str = '\n'.join(pretty.paper_oneliner(rp.pull_paper(key),
n_authors=conf['main']['n_authors']) max_authors=conf['main']['max_authors'])
for key in keys) for key in keys)
are_you_sure = (("Are you sure you want to delete the following publication{}" are_you_sure = (("Are you sure you want to delete the following publication{}"
" (this will also delete associated documents)?:\n{}\n") " (this will also delete associated documents)?:\n{}\n")

View File

@ -16,7 +16,7 @@ def filter_filename(filename, ext):
""" Return the filename without the extension if the extension matches ext. """ Return the filename without the extension if the extension matches ext.
Otherwise return None Otherwise return None
""" """
pattern = '.*\{}$'.format(ext) pattern = '.*\\{}$'.format(ext)
if re.match(pattern, filename) is not None: if re.match(pattern, filename) is not None:
return u_maybe(filename[:-len(ext)]) return u_maybe(filename[:-len(ext)])

View File

@ -1,7 +1,7 @@
from __future__ import unicode_literals from __future__ import unicode_literals
bibtex_external0 = """ bibtex_external0 = r"""
@techreport{Page99, @techreport{Page99,
number = {1999-66}, number = {1999-66},
month = {November}, month = {November},
@ -17,7 +17,7 @@ institution = {Stanford InfoLab},
} }
""" """
bibtex_external_alt = """ bibtex_external_alt = r"""
@techreport{Page99, @techreport{Page99,
number = {1999-66}, number = {1999-66},
month = {November}, month = {November},
@ -33,7 +33,7 @@ institution = {Stanford InfoLab},
} }
""" """
bibtex_raw0 = """@techreport{ bibtex_raw0 = r"""@techreport{
Page99, Page99,
author = "Page, Lawrence and Brin, Sergey and Motwani, Rajeev and Winograd, Terry", author = "Page, Lawrence and Brin, Sergey and Motwani, Rajeev and Winograd, Terry",
publisher = "Stanford InfoLab", publisher = "Stanford InfoLab",
@ -50,12 +50,12 @@ bibtex_raw0 = """@techreport{
""" """
metadata_raw0 = """docfile: docsdir://Page99.pdf metadata_raw0 = r"""docfile: docsdir://Page99.pdf
tags: [search, network] tags: [search, network]
added: '2013-11-14 13:14:20' added: '2013-11-14 13:14:20'
""" """
turing_bib = """@article{turing1950computing, turing_bib = r"""@article{turing1950computing,
title={Computing machinery and intelligence}, title={Computing machinery and intelligence},
author={Turing, Alan M}, author={Turing, Alan M},
journal={Mind}, journal={Mind},
@ -75,7 +75,7 @@ added: '2013-11-14 13:14:20'
""" """
# Should not parse (see #113) # Should not parse (see #113)
bibtex_no_citekey = """@Manual{, bibtex_no_citekey = r"""@Manual{,
title = {R: A Language and Environment for Statistical Computing}, title = {R: A Language and Environment for Statistical Computing},
author = {{R Core Team}}, author = {{R Core Team}},
organization = {R Foundation for Statistical Computing}, organization = {R Foundation for Statistical Computing},
@ -85,7 +85,7 @@ bibtex_no_citekey = """@Manual{,
} }
""" """
bibtex_month = """@inproceedings{Goyal2017, bibtex_month = r"""@inproceedings{Goyal2017,
author = {Goyal, Anirudh and Sordoni, Alessandro and C{\^{o}}t{\'{e}}, Marc-Alexandre and Ke, Nan Rosemary and Bengio, Yoshua}, author = {Goyal, Anirudh and Sordoni, Alessandro and C{\^{o}}t{\'{e}}, Marc-Alexandre and Ke, Nan Rosemary and Bengio, Yoshua},
title = {Z-Forcing: Training Stochastic Recurrent Networks}, title = {Z-Forcing: Training Stochastic Recurrent Networks},
year = {2017}, year = {2017},
@ -94,15 +94,15 @@ bibtex_month = """@inproceedings{Goyal2017,
} }
""" """
not_bibtex = """@misc{this looks, not_bibtex = r"""@misc{this looks,
like = a = bibtex file but like = a = bibtex file but
, is not a real one! , is not a real one!
""" """
bibtex_with_latex = """@article{kjaer2018large, bibtex_with_latex = r"""@article{kjaer2018large,
title={A large impact crater beneath Hiawatha Glacier in northwest Greenland}, title={A large impact crater beneath Hiawatha Glacier in northwest Greenland},
author={Kj{\\ae}r, Kurt H and Larsen, Nicolaj K and Binder, Tobias and Bj{\\o}rk, Anders A and Eisen, Olaf and Fahnestock, Mark A and Funder, Svend and Garde, Adam A and Haack, Henning and Helm, Veit and others}, author={Kj{\ae}r, Kurt H and Larsen, Nicolaj K and Binder, Tobias and Bj{\o}rk, Anders A and Eisen, Olaf and Fahnestock, Mark A and Funder, Svend and Garde, Adam A and Haack, Henning and Helm, Veit and others},
journal={Science advances}, journal={Science advances},
volume={4}, volume={4},
number={11}, number={11},

View File

@ -200,8 +200,8 @@ class TestFilterPaper(unittest.TestCase):
def test_latex_enc(self): def test_latex_enc(self):
latexenc_paper = doe_paper.deepcopy() latexenc_paper = doe_paper.deepcopy()
latexenc_paper.bibentry['Doe2013']['title'] = "{E}l Ni{\~n}o" latexenc_paper.bibentry['Doe2013']['title'] = r"{E}l Ni{\~n}o"
latexenc_paper.bibentry['Doe2013']['author'][0] = "Erd\H{o}s, Paul" latexenc_paper.bibentry['Doe2013']['author'][0] = r"Erd\H{o}s, Paul"
self.assertTrue(get_paper_filter(['title:El'])(latexenc_paper)) self.assertTrue(get_paper_filter(['title:El'])(latexenc_paper))
self.assertTrue(get_paper_filter(['title:Niño'])(latexenc_paper)) self.assertTrue(get_paper_filter(['title:Niño'])(latexenc_paper))
self.assertTrue(get_paper_filter(['author:erdős'])(latexenc_paper)) self.assertTrue(get_paper_filter(['author:erdős'])(latexenc_paper))
@ -209,12 +209,12 @@ class TestFilterPaper(unittest.TestCase):
def test_normalize_unicode(self): def test_normalize_unicode(self):
latexenc_paper = doe_paper.deepcopy() latexenc_paper = doe_paper.deepcopy()
latexenc_paper.bibentry['Doe2013']['title'] = "{E}l Ni{\~n}o" latexenc_paper.bibentry['Doe2013']['title'] = r"{E}l Ni{\~n}o"
self.assertTrue(get_paper_filter(['title:Nin\u0303o'])(latexenc_paper)) self.assertTrue(get_paper_filter(['title:Nin\u0303o'])(latexenc_paper))
def test_strict(self): def test_strict(self):
latexenc_paper = doe_paper.deepcopy() latexenc_paper = doe_paper.deepcopy()
latexenc_paper.bibentry['Doe2013']['title'] = "El Ni{\~n}o" latexenc_paper.bibentry['Doe2013']['title'] = r"El Ni{\~n}o"
self.assertFalse(get_paper_filter( self.assertFalse(get_paper_filter(
['title:Nin\u0303o'], strict=True)(latexenc_paper)) ['title:Nin\u0303o'], strict=True)(latexenc_paper))