handle / in citekeys
This commit is contained in:
parent
3c6d547a91
commit
b99c5b43fa
@ -15,7 +15,7 @@ class ReferenceNotFoundError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def get_bibentry_from_api(id_str, id_type, try_doi=True, ui=None):
|
||||
def get_bibentry_from_api(id_str, id_type, try_doi=True, ui=None, raw=False):
|
||||
"""Return a bibtex string from various ID methods.
|
||||
|
||||
This is a wrapper around functions that will return a bibtex string given
|
||||
@ -50,6 +50,9 @@ def get_bibentry_from_api(id_str, id_type, try_doi=True, ui=None):
|
||||
raise ValueError('id_type must be one of `doi`, `isbn`, or `arxiv`.')
|
||||
|
||||
bibentry_raw = id_fns[id_type](id_str, try_doi=try_doi, ui=ui)
|
||||
if raw:
|
||||
return bibentry_raw
|
||||
|
||||
bibentry = endecoder.EnDecoder().decode_bibdata(bibentry_raw)
|
||||
if bibentry is None:
|
||||
raise ReferenceNotFoundError(
|
||||
|
@ -51,7 +51,7 @@ def author_last(author_str):
|
||||
return author_str.split(',')[0]
|
||||
|
||||
|
||||
def generate_citekey(bibdata):
|
||||
def generate_citekey(bibdata, generate=True):
|
||||
""" Generate a citekey from bib_data.
|
||||
|
||||
:param generate: if False, return the citekey defined in the file,
|
||||
|
@ -18,3 +18,4 @@ from . import import_cmd
|
||||
# bonus
|
||||
from . import websearch_cmd
|
||||
from . import url_cmd
|
||||
#from . import bibtex_cmd
|
||||
|
@ -72,6 +72,22 @@ def bibentry_from_editor(conf, ui):
|
||||
return bibentry
|
||||
|
||||
|
||||
def bibentry_from_api(args, ui, raw=False):
|
||||
try:
|
||||
if args.doi is not None:
|
||||
return apis.get_bibentry_from_api(args.doi, 'doi', ui=ui, raw=raw)
|
||||
elif args.isbn is not None:
|
||||
return apis.get_bibentry_from_api(args.isbn, 'isbn', ui=ui, raw=raw)
|
||||
# TODO distinguish between cases, offer to open the error page in a webbrowser.
|
||||
# TODO offer to confirm/change citekey
|
||||
elif args.arxiv is not None:
|
||||
return apis.get_bibentry_from_api(args.arxiv, 'arxiv', ui=ui, raw=raw)
|
||||
except apis.ReferenceNotFoundError as e:
|
||||
ui.error(str(e))
|
||||
ui.exit(1)
|
||||
|
||||
|
||||
|
||||
def command(conf, args):
|
||||
"""
|
||||
:param bibfile: bibtex file (in .bib, .bibml or .yaml format.
|
||||
@ -92,19 +108,7 @@ def command(conf, args):
|
||||
if args.doi is None and args.isbn is None and args.arxiv is None:
|
||||
bibentry = bibentry_from_editor(conf, ui)
|
||||
else:
|
||||
bibentry = None
|
||||
try:
|
||||
if args.doi is not None:
|
||||
bibentry = apis.get_bibentry_from_api(args.doi, 'doi', ui=ui)
|
||||
elif args.isbn is not None:
|
||||
bibentry = apis.get_bibentry_from_api(args.isbn, 'isbn', ui=ui)
|
||||
# TODO distinguish between cases, offer to open the error page in a webbrowser.
|
||||
# TODO offer to confirm/change citekey
|
||||
elif args.arxiv is not None:
|
||||
bibentry = apis.get_bibentry_from_api(args.arxiv, 'arxiv', ui=ui)
|
||||
except apis.ReferenceNotFoundError as e:
|
||||
ui.error(str(e))
|
||||
ui.exit(1)
|
||||
bibentry = bibentry_from_api(args, ui)
|
||||
else:
|
||||
bibentry_raw = content.get_content(bibfile, ui=ui)
|
||||
bibentry = decoder.decode_bibdata(bibentry_raw)
|
||||
@ -116,7 +120,7 @@ def command(conf, args):
|
||||
citekey = args.citekey
|
||||
if citekey is None:
|
||||
base_key = bibstruct.extract_citekey(bibentry)
|
||||
citekey = rp.unique_citekey(base_key)
|
||||
citekey = rp.unique_citekey(base_key, bibentry)
|
||||
elif citekey in rp:
|
||||
ui.error('citekey already exist {}.'.format(citekey))
|
||||
ui.exit(1)
|
||||
|
@ -14,26 +14,28 @@ from ..content import system_path, read_text_file
|
||||
from ..command_utils import add_doc_copy_arguments
|
||||
|
||||
|
||||
_ABORT_USE_IGNORE_MSG = "Aborting import. Use --ignore-malformed to ignore."
|
||||
_ABORT_USE_IGNORE_MSG = " Aborting import. Use --ignore-malformed to ignore."
|
||||
_IGNORING_MSG = " Ignoring it."
|
||||
|
||||
|
||||
def parser(subparsers, conf):
|
||||
parser = subparsers.add_parser(
|
||||
'import',
|
||||
help='import paper(s) to the repository')
|
||||
help='import paper(s) to the repository.')
|
||||
parser.add_argument(
|
||||
'bibpath',
|
||||
help='path to bibtex, bibtexml or bibyaml file (or directory)')
|
||||
help=("path to bibtex, bibtexml or bibyaml file, or a directory "
|
||||
"containing such files; will not recurse into subdirectories."))
|
||||
parser.add_argument(
|
||||
'keys', nargs='*',
|
||||
help="one or several keys to import from the file")
|
||||
help=("one or several keys to import from the file; if not provided,"
|
||||
" all entries will be imported."))
|
||||
parser.add_argument(
|
||||
'-O', '--overwrite', action='store_true', default=False,
|
||||
help="Overwrite keys already in the database")
|
||||
help="overwrite keys already in the database.")
|
||||
parser.add_argument(
|
||||
'-i', '--ignore-malformed', action='store_true', default=False,
|
||||
help="Ignore malformed and unreadable files and entries")
|
||||
help="ignore malformed and unreadable files and entries.")
|
||||
add_doc_copy_arguments(parser, copy=False)
|
||||
return parser
|
||||
|
||||
|
@ -87,10 +87,14 @@ else:
|
||||
super(StdIO, self).__init__(*args, **kwargs)
|
||||
|
||||
def write(self, s):
|
||||
super(StdIO, self).write(s)
|
||||
if self.additional_out is not None:
|
||||
try:
|
||||
s = s.decode()
|
||||
except AttributeError:
|
||||
pass
|
||||
self.additional_out.write(s)
|
||||
|
||||
super(StdIO, self).write(s)
|
||||
|
||||
# Only for tests to capture std{out,err}
|
||||
def _fake_stdio(additional_out=False):
|
||||
|
@ -32,6 +32,7 @@ CORE_CMDS = collections.OrderedDict([
|
||||
|
||||
('websearch', commands.websearch_cmd),
|
||||
('url', commands.url_cmd),
|
||||
#('bibtex', commands.bibtex_cmd),
|
||||
])
|
||||
|
||||
|
||||
|
12
pubs/repo.py
12
pubs/repo.py
@ -192,8 +192,16 @@ class Repository(object):
|
||||
p.docpath = docfile
|
||||
self.push_paper(p, overwrite=True, event=False)
|
||||
|
||||
def unique_citekey(self, base_key):
|
||||
"""Create a unique citekey for a given basekey."""
|
||||
def unique_citekey(self, base_key, bibentry):
|
||||
"""Create a unique citekey for a given base key.
|
||||
|
||||
:param base_key: the base key in question.
|
||||
:param bibentry: the bib entry to possibly generate the citekey.
|
||||
"""
|
||||
# can't have `/` in citekeys
|
||||
# FIXME: a bit crude, but efficient for now (and allows unicode citekeys)
|
||||
if '/' in base_key:
|
||||
base_key = bibstruct.generate_citekey(bibentry)
|
||||
for n in itertools.count():
|
||||
if not base_key + _base27(n) in self.citekeys:
|
||||
return base_key + _base27(n)
|
||||
|
@ -86,7 +86,7 @@ and then add `\cite{Loeb_2012}` in your manuscript. After exporting the bibliogr
|
||||
|
||||
You can attach a document to a reference:
|
||||
```
|
||||
pubs add Loeb2012_downloaded.pdf Loeb_2012
|
||||
pubs doc add Loeb2012_downloaded.pdf Loeb_2012
|
||||
```
|
||||
|
||||
And open your documents automatically from the command line:
|
||||
|
41
tests/data/collection.bib
Normal file
41
tests/data/collection.bib
Normal file
@ -0,0 +1,41 @@
|
||||
@article{Einstein_1935,
|
||||
doi = {10.1103/physrev.47.777},
|
||||
url = {https://doi.org/10.1103%2Fphysrev.47.777},
|
||||
year = 1935,
|
||||
month = {may},
|
||||
publisher = {American Physical Society ({APS})},
|
||||
volume = {47},
|
||||
number = {10},
|
||||
pages = {777--780},
|
||||
author = {A. Einstein and B. Podolsky and N. Rosen},
|
||||
title = {Can Quantum-Mechanical Description of Physical Reality Be Considered Complete?},
|
||||
journal = {Physical Review}
|
||||
}
|
||||
|
||||
@article{Schrodinger_1935,
|
||||
doi = {10.1017/s0305004100013554},
|
||||
url = {https://doi.org/10.1017%2Fs0305004100013554},
|
||||
year = 1935,
|
||||
month = {oct},
|
||||
publisher = {Cambridge University Press ({CUP})},
|
||||
volume = {31},
|
||||
number = {04},
|
||||
pages = {555},
|
||||
author = {E. Schrödinger and M. Born},
|
||||
title = {Discussion of Probability Relations between Separated Systems},
|
||||
journal = {Mathematical Proceedings of the Cambridge Philosophical Society}
|
||||
}
|
||||
|
||||
@article{Bell_1964,
|
||||
doi = {10.1103/physicsphysiquefizika.1.195},
|
||||
url = {https://doi.org/10.1103%2Fphysicsphysiquefizika.1.195},
|
||||
year = 1964,
|
||||
month = {nov},
|
||||
publisher = {American Physical Society ({APS})},
|
||||
volume = {1},
|
||||
number = {3},
|
||||
pages = {195--200},
|
||||
author = {J. S. Bell},
|
||||
title = {On the Einstein Podolsky Rosen paradox},
|
||||
journal = {Physics Physique {\cyrchar\cyrf}{\cyrchar\cyri}{\cyrchar\cyrz}{\cyrchar\cyri}{\cyrchar\cyrk}{\cyrchar\cyra}}
|
||||
}
|
@ -1,7 +1,11 @@
|
||||
1. Install the dependencies using:
|
||||
> pip install -r requirements.txt
|
||||
```
|
||||
pip install -r ../dev_requirements.txt
|
||||
```
|
||||
|
||||
2. Run the tests using:
|
||||
> python -m unittest discover
|
||||
```
|
||||
python setup.py test
|
||||
```
|
||||
|
||||
If you use nosetest, it will complain about addExpectedFailure, which you can safely disregard.
|
||||
If you use nosetest, it will complain about addExpectedFailure, which you can safely disregard.
|
@ -16,8 +16,12 @@ import mock_requests
|
||||
|
||||
|
||||
class APITests(unittest.TestCase):
|
||||
pass
|
||||
|
||||
@mock.patch('pubs.apis.requests.get', side_effect=mock_requests.mock_requests_get)
|
||||
def test_readme(self, reqget):
|
||||
apis.doi2bibtex('10.1007/s00422-012-0514-6')
|
||||
apis.isbn2bibtex('978-0822324669')
|
||||
apis.arxiv2bibtex('math/9501234')
|
||||
|
||||
class TestDOI2Bibtex(APITests):
|
||||
|
||||
|
File diff suppressed because one or more lines are too long
@ -29,10 +29,12 @@ class TestCitekeyGeneration(TestRepo):
|
||||
|
||||
def test_generated_key_is_unique(self):
|
||||
self.repo.push_paper(Paper.from_bibentry(fixtures.doe_bibentry))
|
||||
c = self.repo.unique_citekey('Doe2013')
|
||||
c = self.repo.unique_citekey('Doe2013', fixtures.doe_bibentry)
|
||||
self.repo.push_paper(Paper.from_bibentry(fixtures.doe_bibentry,
|
||||
citekey='Doe2013a'))
|
||||
c = self.repo.unique_citekey('Doe2013')
|
||||
c = self.repo.unique_citekey('Doe2013', fixtures.doe_bibentry)
|
||||
self.assertEqual(c, 'Doe2013b')
|
||||
c = self.repo.unique_citekey('bla/bla', fixtures.doe_bibentry)
|
||||
self.assertEqual(c, 'Doe2013b')
|
||||
|
||||
|
||||
|
@ -10,10 +10,13 @@ import mock
|
||||
|
||||
import six
|
||||
import ddt
|
||||
import certifi
|
||||
import mock
|
||||
from pyfakefs.fake_filesystem import FakeFileOpen
|
||||
|
||||
import dotdot
|
||||
import fake_env
|
||||
import mock_requests
|
||||
|
||||
from pubs import pubs_cmd, color, content, uis, p3, endecoder
|
||||
from pubs.config import conf
|
||||
@ -153,9 +156,10 @@ class DataCommandTestCase(CommandTestCase):
|
||||
super(DataCommandTestCase, self).setUp(nsec_stat=nsec_stat)
|
||||
self.fs.add_real_directory(os.path.join(self.rootpath, 'data'), read_only=False)
|
||||
self.fs.add_real_directory(os.path.join(self.rootpath, 'bibexamples'), read_only=False)
|
||||
# add certificate for web querries
|
||||
self.fs.add_real_file(certifi.where(), read_only=True)
|
||||
self.fs.add_real_file(mock_requests._data_filepath, read_only=False)
|
||||
|
||||
# fake_env.copy_dir(self.fs, os.path.join(os.path.dirname(__file__), 'data'), 'data')
|
||||
# fake_env.copy_dir(self.fs, os.path.join(os.path.dirname(__file__), 'bibexamples'), 'bibexamples')
|
||||
|
||||
def assertFileContentEqual(self, path, expected_content):
|
||||
self.assertTrue(os.path.isfile(path))
|
||||
@ -849,7 +853,7 @@ class TestUsecase(DataCommandTestCase):
|
||||
]
|
||||
|
||||
outs = self.execute_cmds(cmds)
|
||||
self.assertEqual(4 + 1, len(outs[-1].split('\n')))
|
||||
self.assertEqual(8, len(outs[-1].split('\n')))
|
||||
|
||||
def test_import_one(self):
|
||||
cmds = ['pubs init',
|
||||
@ -1002,10 +1006,10 @@ class TestUsecase(DataCommandTestCase):
|
||||
self.assertEqual(lines[2], 'Total tags: 3, 2 (50%) of papers have at least one tag')
|
||||
|
||||
def test_add_no_extension(self):
|
||||
# This tests checks that a paper which document has no
|
||||
# extension does not raise issues when listing. This test might
|
||||
# be removed if decided to prevent such documents. It would then need
|
||||
# to be replaced by a check that this is prevented.
|
||||
"""This tests checks that a paper which document has no extension does
|
||||
not raise issues when listing. This test might be removed if decided to
|
||||
prevent such documents. It would then need to be replaced by a check
|
||||
that this is prevented."""
|
||||
self.fs.add_real_file(os.path.join(self.rootpath, 'data', 'pagerank.pdf'),
|
||||
target_path=os.path.join('data', 'no-ext'))
|
||||
correct = ['Initializing pubs in /pubs\n',
|
||||
@ -1019,6 +1023,26 @@ class TestUsecase(DataCommandTestCase):
|
||||
]
|
||||
self.assertEqual(correct, self.execute_cmds(cmds, capture_output=True))
|
||||
|
||||
@mock.patch('pubs.apis.requests.get', side_effect=mock_requests.mock_requests_get)
|
||||
def test_readme(self, reqget):
|
||||
"""Test that the readme example work."""
|
||||
self.fs.add_real_file(os.path.join(self.rootpath, 'data/pagerank.pdf'), target_path='data/Loeb_2012.pdf')
|
||||
self.fs.add_real_file(os.path.join(self.rootpath, 'data/pagerank.pdf'), target_path='data/oyama2000the.pdf')
|
||||
self.fs.add_real_file(os.path.join(self.rootpath, 'data/pagerank.pdf'), target_path='data/Knuth1995.pdf')
|
||||
|
||||
cmds = ['pubs init',
|
||||
'pubs import data/collection.bib',
|
||||
'pubs add data/pagerank.bib -d data/pagerank.pdf',
|
||||
#'pubs add -D 10.1007/s00422-012-0514-6 -d data/pagerank.pdf',
|
||||
'pubs add -I 978-0822324669 -d data/oyama2000the.pdf',
|
||||
'pubs add -X math/9501234 -d data/Knuth1995.pdf',
|
||||
'pubs add -D 10.1007/s00422-012-0514-6',
|
||||
'pubs doc add data/Loeb_2012.pdf Loeb_2012',
|
||||
]
|
||||
self.execute_cmds(cmds, capture_output=True)
|
||||
# self.assertEqual(correct, self.execute_cmds(cmds, capture_output=True))
|
||||
|
||||
|
||||
|
||||
@ddt.ddt
|
||||
class TestCache(DataCommandTestCase):
|
||||
|
Loading…
x
Reference in New Issue
Block a user