Merge pubs updates
This commit is contained in:
commit
f7cfe22bf6
@ -7,6 +7,8 @@
|
|||||||
|
|
||||||
### Implemented enhancements
|
### Implemented enhancements
|
||||||
|
|
||||||
|
- Possibily to exclude bibtext field when adding references ([#273](https://github.com/pubs/pubs/pull/273) by [Mitsuhiro Nakamura](https://github.com/m15a))
|
||||||
|
- Less ambiguous tag display for tags with space ([#265](https://github.com/pubs/pubs/pull/265) by [Florian Richoux](https://github.com/richoux))
|
||||||
- Migration from Travis CI to Github actions ([#260](https://github.com/pubs/pubs/pull/260))
|
- Migration from Travis CI to Github actions ([#260](https://github.com/pubs/pubs/pull/260))
|
||||||
- Allow passing named arguments to custom commands ([#241](https://github.com/pubs/pubs/pull/241) by [jkulhanek](https://github.com/jkulhanek))
|
- Allow passing named arguments to custom commands ([#241](https://github.com/pubs/pubs/pull/241) by [jkulhanek](https://github.com/jkulhanek))
|
||||||
- Added support for non-standard bibtex types, e.g. @collection, @software, etc. ([#226](https://github.com/pubs/pubs/pull/226))
|
- Added support for non-standard bibtex types, e.g. @collection, @software, etc. ([#226](https://github.com/pubs/pubs/pull/226))
|
||||||
|
@ -116,6 +116,9 @@ def command(conf, args):
|
|||||||
if bibentry is None:
|
if bibentry is None:
|
||||||
ui.error('invalid bibfile {}.'.format(bibfile))
|
ui.error('invalid bibfile {}.'.format(bibfile))
|
||||||
|
|
||||||
|
# exclude bibtex fields if specified
|
||||||
|
utils.remove_bibtex_fields(bibentry, conf['main']['exclude_bibtex_fields'])
|
||||||
|
|
||||||
# citekey
|
# citekey
|
||||||
|
|
||||||
citekey = args.citekey
|
citekey = args.citekey
|
||||||
|
@ -6,7 +6,7 @@ from .. import color
|
|||||||
|
|
||||||
from ..uis import get_ui
|
from ..uis import get_ui
|
||||||
from ..endecoder import EnDecoder
|
from ..endecoder import EnDecoder
|
||||||
from ..utils import resolve_citekey
|
from ..utils import resolve_citekey, remove_bibtex_fields
|
||||||
from ..completion import CiteKeyCompletion
|
from ..completion import CiteKeyCompletion
|
||||||
from ..events import ModifyEvent
|
from ..events import ModifyEvent
|
||||||
|
|
||||||
@ -59,6 +59,9 @@ def command(conf, args):
|
|||||||
ui.info(("The metadata of paper '{}' was successfully "
|
ui.info(("The metadata of paper '{}' was successfully "
|
||||||
"edited.".format(color.dye_out(citekey, 'citekey'))))
|
"edited.".format(color.dye_out(citekey, 'citekey'))))
|
||||||
else:
|
else:
|
||||||
|
# exclude bibtex fields if specified
|
||||||
|
remove_bibtex_fields(content, conf['main']['exclude_bibtex_fields'])
|
||||||
|
|
||||||
new_paper = Paper.from_bibentry(content,
|
new_paper = Paper.from_bibentry(content,
|
||||||
metadata=paper.metadata)
|
metadata=paper.metadata)
|
||||||
if rp.rename_paper(new_paper, old_citekey=paper.citekey):
|
if rp.rename_paper(new_paper, old_citekey=paper.citekey):
|
||||||
|
@ -5,7 +5,7 @@ import argparse
|
|||||||
from .. import repo
|
from .. import repo
|
||||||
from ..uis import get_ui
|
from ..uis import get_ui
|
||||||
from .. import endecoder
|
from .. import endecoder
|
||||||
from ..utils import resolve_citekey_list
|
from ..utils import resolve_citekey_list, remove_bibtex_fields
|
||||||
from ..endecoder import BIBFIELD_ORDER
|
from ..endecoder import BIBFIELD_ORDER
|
||||||
from ..completion import CiteKeyCompletion, CommaSeparatedListCompletion
|
from ..completion import CiteKeyCompletion, CommaSeparatedListCompletion
|
||||||
|
|
||||||
@ -53,6 +53,9 @@ def command(conf, args):
|
|||||||
for p in papers:
|
for p in papers:
|
||||||
bib[p.citekey] = p.bibdata
|
bib[p.citekey] = p.bibdata
|
||||||
|
|
||||||
|
# exclude bibtex fields if specified
|
||||||
|
remove_bibtex_fields(bib, conf['main']['exclude_bibtex_fields'])
|
||||||
|
|
||||||
exporter = endecoder.EnDecoder()
|
exporter = endecoder.EnDecoder()
|
||||||
bibdata_raw = exporter.encode_bibdata(bib, args.ignore_fields)
|
bibdata_raw = exporter.encode_bibdata(bib, args.ignore_fields)
|
||||||
ui.message(bibdata_raw)
|
ui.message(bibdata_raw)
|
||||||
|
@ -11,6 +11,7 @@ from .. import content
|
|||||||
from ..paper import Paper
|
from ..paper import Paper
|
||||||
from ..uis import get_ui
|
from ..uis import get_ui
|
||||||
from ..content import system_path, read_text_file
|
from ..content import system_path, read_text_file
|
||||||
|
from ..utils import remove_bibtex_fields
|
||||||
from ..command_utils import add_doc_copy_arguments
|
from ..command_utils import add_doc_copy_arguments
|
||||||
|
|
||||||
|
|
||||||
@ -21,7 +22,7 @@ _IGNORING_MSG = " Ignoring it."
|
|||||||
def parser(subparsers, conf):
|
def parser(subparsers, conf):
|
||||||
parser = subparsers.add_parser(
|
parser = subparsers.add_parser(
|
||||||
'import',
|
'import',
|
||||||
help='import paper(s) to the repository.')
|
help='import paper(s) to the repository')
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'bibpath',
|
'bibpath',
|
||||||
help=("path to bibtex, bibtexml or bibyaml file, or a directory "
|
help=("path to bibtex, bibtexml or bibyaml file, or a directory "
|
||||||
@ -40,7 +41,7 @@ def parser(subparsers, conf):
|
|||||||
return parser
|
return parser
|
||||||
|
|
||||||
|
|
||||||
def many_from_path(ui, bibpath, ignore=False):
|
def many_from_path(ui, bibpath, exclude_bibtex_fields=[], ignore=False):
|
||||||
"""Extract list of papers found in bibliographic files in path.
|
"""Extract list of papers found in bibliographic files in path.
|
||||||
|
|
||||||
The behavior is to:
|
The behavior is to:
|
||||||
@ -62,7 +63,10 @@ def many_from_path(ui, bibpath, ignore=False):
|
|||||||
biblist = []
|
biblist = []
|
||||||
for filepath in all_files:
|
for filepath in all_files:
|
||||||
try:
|
try:
|
||||||
biblist.append(coder.decode_bibdata(read_text_file(filepath)))
|
bibentry = coder.decode_bibdata(read_text_file(filepath))
|
||||||
|
# exclude bibtex fields if specified
|
||||||
|
remove_bibtex_fields(bibentry, exclude_bibtex_fields)
|
||||||
|
biblist.append(bibentry)
|
||||||
except coder.BibDecodingError:
|
except coder.BibDecodingError:
|
||||||
error = "Could not parse bibtex at {}.".format(filepath)
|
error = "Could not parse bibtex at {}.".format(filepath)
|
||||||
if ignore:
|
if ignore:
|
||||||
@ -100,7 +104,9 @@ def command(conf, args):
|
|||||||
|
|
||||||
rp = repo.Repository(conf)
|
rp = repo.Repository(conf)
|
||||||
# Extract papers from bib
|
# Extract papers from bib
|
||||||
papers = many_from_path(ui, bibpath, ignore=args.ignore_malformed)
|
papers = many_from_path(ui, bibpath,
|
||||||
|
exclude_bibtex_fields=conf['main']['exclude_bibtex_fields'],
|
||||||
|
ignore=args.ignore_malformed)
|
||||||
keys = args.keys or papers.keys()
|
keys = args.keys or papers.keys()
|
||||||
for k in keys:
|
for k in keys:
|
||||||
p = papers[k]
|
p = papers[k]
|
||||||
|
@ -6,7 +6,7 @@ from .. import color
|
|||||||
def parser(subparsers, conf):
|
def parser(subparsers, conf):
|
||||||
parser = subparsers.add_parser(
|
parser = subparsers.add_parser(
|
||||||
'statistics',
|
'statistics',
|
||||||
help="show statistics on the repository.")
|
help="show statistics on the repository")
|
||||||
return parser
|
return parser
|
||||||
|
|
||||||
|
|
||||||
|
@ -87,7 +87,7 @@ def command(conf, args):
|
|||||||
rp = Repository(conf)
|
rp = Repository(conf)
|
||||||
|
|
||||||
if citekeyOrTag is None:
|
if citekeyOrTag is None:
|
||||||
ui.message(color.dye_out(' '.join(sorted(rp.get_tags())), 'tag'))
|
ui.message(color.dye_out(', '.join(sorted(rp.get_tags())), 'tag'))
|
||||||
else:
|
else:
|
||||||
not_citekey = False
|
not_citekey = False
|
||||||
try:
|
try:
|
||||||
@ -97,7 +97,7 @@ def command(conf, args):
|
|||||||
if not not_citekey:
|
if not not_citekey:
|
||||||
p = rp.pull_paper(citekeyOrTag)
|
p = rp.pull_paper(citekeyOrTag)
|
||||||
if tags is None:
|
if tags is None:
|
||||||
ui.message(color.dye_out(' '.join(sorted(p.tags)), 'tag'))
|
ui.message(color.dye_out(', '.join(sorted(p.tags)), 'tag'))
|
||||||
else:
|
else:
|
||||||
add_tags, remove_tags = _tag_groups(_parse_tag_seq(tags))
|
add_tags, remove_tags = _tag_groups(_parse_tag_seq(tags))
|
||||||
for tag in add_tags:
|
for tag in add_tags:
|
||||||
|
@ -56,6 +56,11 @@ normalize_citekey = boolean(default=False)
|
|||||||
#
|
#
|
||||||
citekey_format = string(default='{{author_last_name:l}}{{year}}{{short_title:l}}')
|
citekey_format = string(default='{{author_last_name:l}}{{year}}{{short_title:l}}')
|
||||||
|
|
||||||
|
# which bibliographic fields to exclude from bibtex files. By default, none.
|
||||||
|
# Please note that excluding critical fields such as `title` or `author`
|
||||||
|
# will break many commands of pubs.
|
||||||
|
exclude_bibtex_fields = force_list(default=list())
|
||||||
|
|
||||||
[formating]
|
[formating]
|
||||||
|
|
||||||
# Enable bold formatting, if the terminal supports it.
|
# Enable bold formatting, if the terminal supports it.
|
||||||
|
@ -78,7 +78,7 @@ def paper_oneliner(p, citekey_only=False, max_authors=3):
|
|||||||
else 'NOEXT'),
|
else 'NOEXT'),
|
||||||
'tag')
|
'tag')
|
||||||
tags = '' if len(p.tags) == 0 else '| {}'.format(
|
tags = '' if len(p.tags) == 0 else '| {}'.format(
|
||||||
','.join(color.dye_out(t, 'tag') for t in sorted(p.tags)))
|
', '.join(color.dye_out(t, 'tag') for t in sorted(p.tags)))
|
||||||
return '[{citekey}] {descr}{doc} {tags}'.format(
|
return '[{citekey}] {descr}{doc} {tags}'.format(
|
||||||
citekey=color.dye_out(p.citekey, 'citekey'),
|
citekey=color.dye_out(p.citekey, 'citekey'),
|
||||||
descr=bibdesc, tags=tags, doc=doc_str)
|
descr=bibdesc, tags=tags, doc=doc_str)
|
||||||
|
@ -87,3 +87,9 @@ def standardize_doi(doi):
|
|||||||
new_doi = match.group(0)
|
new_doi = match.group(0)
|
||||||
|
|
||||||
return new_doi
|
return new_doi
|
||||||
|
|
||||||
|
def remove_bibtex_fields(bibentry, fields):
|
||||||
|
for item in bibentry.values():
|
||||||
|
for field in fields:
|
||||||
|
if field in item:
|
||||||
|
del item[field]
|
||||||
|
@ -187,3 +187,5 @@ You can access the self-documented configuration by using `pubs conf`, and all t
|
|||||||
- [Jonáš Kulhánek](https://github.com/jkulhanek)
|
- [Jonáš Kulhánek](https://github.com/jkulhanek)
|
||||||
- [Dominik Stańczak](https://github.com/StanczakDominik)
|
- [Dominik Stańczak](https://github.com/StanczakDominik)
|
||||||
- [Gustavo José de Sousa](https://github.com/guludo)
|
- [Gustavo José de Sousa](https://github.com/guludo)
|
||||||
|
- [Florian Richoux](https://github.com/richoux)
|
||||||
|
- [Mitsuhiro Nakamura](https://github.com/m15a)
|
||||||
|
@ -138,6 +138,9 @@ edit_cmd = "vim"
|
|||||||
# the full python stack is printed.
|
# the full python stack is printed.
|
||||||
debug = False
|
debug = False
|
||||||
|
|
||||||
|
# which bibliographic fields to exclude from bibtex files.
|
||||||
|
exclude_bibtex_fields =
|
||||||
|
|
||||||
[formating]
|
[formating]
|
||||||
|
|
||||||
# Enable bold formatting, if the terminal supports it.
|
# Enable bold formatting, if the terminal supports it.
|
||||||
|
@ -437,6 +437,18 @@ class TestAdd(URLContentTestCase):
|
|||||||
self.execute_cmds(cmds)
|
self.execute_cmds(cmds)
|
||||||
self.assertEqual(cm.exception.code, 1)
|
self.assertEqual(cm.exception.code, 1)
|
||||||
|
|
||||||
|
def test_add_excludes_bibtex_fields(self):
|
||||||
|
self.execute_cmds(['pubs init'])
|
||||||
|
config = conf.load_conf()
|
||||||
|
config['main']['exclude_bibtex_fields'] = ['abstract', 'publisher']
|
||||||
|
conf.save_conf(config)
|
||||||
|
self.execute_cmds(['pubs add data/pagerank.bib'])
|
||||||
|
with FakeFileOpen(self.fs)(self.default_pubs_dir + '/bib/Page99.bib', 'r') as buf:
|
||||||
|
out = endecoder.EnDecoder().decode_bibdata(buf.read())
|
||||||
|
for bib in out.values():
|
||||||
|
self.assertFalse('abstract' in bib or 'publisher' in bib)
|
||||||
|
self.assertTrue('title' in bib and 'author' in bib)
|
||||||
|
|
||||||
|
|
||||||
class TestList(DataCommandTestCase):
|
class TestList(DataCommandTestCase):
|
||||||
|
|
||||||
@ -590,7 +602,7 @@ class TestTag(DataCommandTestCase):
|
|||||||
'pubs list',
|
'pubs list',
|
||||||
]
|
]
|
||||||
correct = ['',
|
correct = ['',
|
||||||
'[Page99] Page, Lawrence et al. "The PageRank Citation Ranking: Bringing Order to the Web." (1999) | network,search\n' +
|
'[Page99] Page, Lawrence et al. "The PageRank Citation Ranking: Bringing Order to the Web." (1999) | network, search\n' +
|
||||||
'[Turing1950] Turing, Alan M "Computing machinery and intelligence" Mind (1950) \n',
|
'[Turing1950] Turing, Alan M "Computing machinery and intelligence" Mind (1950) \n',
|
||||||
]
|
]
|
||||||
out = self.execute_cmds(cmds)
|
out = self.execute_cmds(cmds)
|
||||||
@ -738,9 +750,9 @@ class TestUsecase(DataCommandTestCase):
|
|||||||
'[Page99] Page, Lawrence et al. "The PageRank Citation Ranking: Bringing Order to the Web." (1999) [pdf] \n',
|
'[Page99] Page, Lawrence et al. "The PageRank Citation Ranking: Bringing Order to the Web." (1999) [pdf] \n',
|
||||||
'\n',
|
'\n',
|
||||||
'',
|
'',
|
||||||
'network search\n',
|
'network, search\n',
|
||||||
'info: Assuming search to be a tag.\n'
|
'info: Assuming search to be a tag.\n'
|
||||||
'[Page99] Page, Lawrence et al. "The PageRank Citation Ranking: Bringing Order to the Web." (1999) [pdf] | network,search\n',
|
'[Page99] Page, Lawrence et al. "The PageRank Citation Ranking: Bringing Order to the Web." (1999) [pdf] | network, search\n',
|
||||||
]
|
]
|
||||||
|
|
||||||
cmds = ['pubs init -p /paper_first',
|
cmds = ['pubs init -p /paper_first',
|
||||||
@ -785,7 +797,7 @@ class TestUsecase(DataCommandTestCase):
|
|||||||
'',
|
'',
|
||||||
'',
|
'',
|
||||||
'',
|
'',
|
||||||
'search network\n',
|
'search, network\n',
|
||||||
]
|
]
|
||||||
|
|
||||||
cmds = ['pubs init -p paper_first/',
|
cmds = ['pubs init -p paper_first/',
|
||||||
@ -798,7 +810,7 @@ class TestUsecase(DataCommandTestCase):
|
|||||||
out = self.execute_cmds(cmds)
|
out = self.execute_cmds(cmds)
|
||||||
|
|
||||||
def clean(s):
|
def clean(s):
|
||||||
return set(s.strip().split(' '))
|
return set(s.strip().split(', '))
|
||||||
|
|
||||||
self.assertEqual(clean(correct[2]), clean(out[2]))
|
self.assertEqual(clean(correct[2]), clean(out[2]))
|
||||||
self.assertEqual(clean(correct[4]), clean(out[4]))
|
self.assertEqual(clean(correct[4]), clean(out[4]))
|
||||||
@ -829,6 +841,22 @@ class TestUsecase(DataCommandTestCase):
|
|||||||
]
|
]
|
||||||
self.execute_cmds(cmds)
|
self.execute_cmds(cmds)
|
||||||
|
|
||||||
|
def test_editor_excludes_bibtex_field(self):
|
||||||
|
cmds = ['pubs init',
|
||||||
|
'pubs add data/pagerank.bib',
|
||||||
|
]
|
||||||
|
self.execute_cmds(cmds)
|
||||||
|
config = conf.load_conf()
|
||||||
|
config['main']['exclude_bibtex_fields'] = ['author']
|
||||||
|
conf.save_conf(config)
|
||||||
|
cmds = [('pubs edit Page99', ['@misc{Page99, title="TTT", author="auth"}', 'n'])]
|
||||||
|
self.execute_cmds(cmds)
|
||||||
|
with FakeFileOpen(self.fs)(self.default_pubs_dir + '/bib/Page99.bib', 'r') as buf:
|
||||||
|
out = endecoder.EnDecoder().decode_bibdata(buf.read())
|
||||||
|
for bib in out.values():
|
||||||
|
self.assertFalse('author' in bib)
|
||||||
|
self.assertTrue('title' in bib)
|
||||||
|
|
||||||
def test_add_aborts(self):
|
def test_add_aborts(self):
|
||||||
with self.assertRaises(FakeSystemExit):
|
with self.assertRaises(FakeSystemExit):
|
||||||
cmds = ['pubs init',
|
cmds = ['pubs init',
|
||||||
@ -879,7 +907,7 @@ class TestUsecase(DataCommandTestCase):
|
|||||||
meta = str_fixtures.turing_meta
|
meta = str_fixtures.turing_meta
|
||||||
|
|
||||||
line = '[Page99] Page, Lawrence et al. "The PageRank Citation Ranking: Bringing Order to the Web." (1999) \n'
|
line = '[Page99] Page, Lawrence et al. "The PageRank Citation Ranking: Bringing Order to the Web." (1999) \n'
|
||||||
line1 = re.sub('\n', '| AI,computer\n', line)
|
line1 = re.sub('\n', '| AI, computer\n', line)
|
||||||
|
|
||||||
cmds = ['pubs init',
|
cmds = ['pubs init',
|
||||||
'pubs add data/pagerank.bib',
|
'pubs add data/pagerank.bib',
|
||||||
@ -908,6 +936,19 @@ class TestUsecase(DataCommandTestCase):
|
|||||||
fixtures.page_bibentry, ignore_fields=['author', 'title'])
|
fixtures.page_bibentry, ignore_fields=['author', 'title'])
|
||||||
self.assertEqual(outs[2], expected + os.linesep)
|
self.assertEqual(outs[2], expected + os.linesep)
|
||||||
|
|
||||||
|
def test_export_excludes_bibtex_field(self):
|
||||||
|
cmds = ['pubs init',
|
||||||
|
'pubs add data/pagerank.bib'
|
||||||
|
]
|
||||||
|
self.execute_cmds(cmds)
|
||||||
|
config = conf.load_conf()
|
||||||
|
config['main']['exclude_bibtex_fields'] = ['url']
|
||||||
|
conf.save_conf(config)
|
||||||
|
outs = self.execute_cmds(['pubs export Page99'])
|
||||||
|
for bib in endecoder.EnDecoder().decode_bibdata(outs[0]).values():
|
||||||
|
self.assertFalse('url' in bib)
|
||||||
|
self.assertTrue('title' in bib and 'author' in bib)
|
||||||
|
|
||||||
def test_import(self):
|
def test_import(self):
|
||||||
cmds = ['pubs init',
|
cmds = ['pubs init',
|
||||||
'pubs import data/',
|
'pubs import data/',
|
||||||
@ -970,6 +1011,18 @@ class TestUsecase(DataCommandTestCase):
|
|||||||
outs = self.execute_cmds(cmds)
|
outs = self.execute_cmds(cmds)
|
||||||
self.assertEqual(1 + 1, len(outs[-1].split('\n')))
|
self.assertEqual(1 + 1, len(outs[-1].split('\n')))
|
||||||
|
|
||||||
|
def test_import_excludes_bibtex_field(self):
|
||||||
|
self.execute_cmds(['pubs init'])
|
||||||
|
config = conf.load_conf()
|
||||||
|
config['main']['exclude_bibtex_fields'] = ['abstract']
|
||||||
|
conf.save_conf(config)
|
||||||
|
self.execute_cmds(['pubs import data/ Page99'])
|
||||||
|
with FakeFileOpen(self.fs)(self.default_pubs_dir + '/bib/Page99.bib', 'r') as buf:
|
||||||
|
out = endecoder.EnDecoder().decode_bibdata(buf.read())
|
||||||
|
for bib in out.values():
|
||||||
|
self.assertFalse('abstract' in bib)
|
||||||
|
self.assertTrue('title' in bib and 'author' in bib)
|
||||||
|
|
||||||
def test_update(self):
|
def test_update(self):
|
||||||
cmds = ['pubs init',
|
cmds = ['pubs init',
|
||||||
'pubs add data/pagerank.bib',
|
'pubs add data/pagerank.bib',
|
||||||
|
Loading…
x
Reference in New Issue
Block a user