Fix tests and terminology.
First this brings a change to the paper API: the terminology of bibdata and bibentry is clarified: now bibentry is a dictionary of the form {citekey: bibdata} where bibdata corresponds to the actual dictionary of bibliographic fields and values {author: ..., year: ...}. Now bibentry is an attribute of the paper object that is generated from citekey and bibdata. This commit fixes all tests in particular an issue with citekey in bibentry not updated. Also removes prints in tests and deprecated assertEquals. Usecase tests now fail if the command ask for unexpected inputs. Removes queries for user input in attach and add commands (for deletion of a copied document file). The input was not coherent with tests and is annoying.
This commit is contained in:
parent
d0a1d64720
commit
de2a50eeb2
@ -5,12 +5,12 @@ import re
|
|||||||
|
|
||||||
from .p3 import ustr, uchr
|
from .p3 import ustr, uchr
|
||||||
|
|
||||||
# citekey stuff
|
# Citekey stuff
|
||||||
|
|
||||||
TYPE_KEY = 'type'
|
TYPE_KEY = 'type'
|
||||||
|
|
||||||
CONTROL_CHARS = ''.join(map(uchr, list(range(0, 32)) + list(range(127, 160))))
|
CONTROL_CHARS = ''.join(map(uchr, list(range(0, 32)) + list(range(127, 160))))
|
||||||
CITEKEY_FORBIDDEN_CHARS = '@\'\\,#}{~%/' # '/' is OK for bibtex but forbidden
|
CITEKEY_FORBIDDEN_CHARS = '@\'\\,#}{~%/ ' # '/' is OK for bibtex but forbidden
|
||||||
# here since we transform citekeys into filenames
|
# here since we transform citekeys into filenames
|
||||||
CITEKEY_EXCLUDE_RE = re.compile('[%s]'
|
CITEKEY_EXCLUDE_RE = re.compile('[%s]'
|
||||||
% re.escape(CONTROL_CHARS + CITEKEY_FORBIDDEN_CHARS))
|
% re.escape(CONTROL_CHARS + CITEKEY_FORBIDDEN_CHARS))
|
||||||
@ -75,24 +75,22 @@ def extract_docfile(bibdata, remove=False):
|
|||||||
|
|
||||||
:param remove: remove field after extracting information (default: False)
|
:param remove: remove field after extracting information (default: False)
|
||||||
"""
|
"""
|
||||||
citekey, entry = get_entry(bibdata)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if 'file' in entry:
|
if 'file' in bibdata:
|
||||||
field = entry['file']
|
field = bibdata['file']
|
||||||
# Check if this is mendeley specific
|
# Check if this is mendeley specific
|
||||||
for f in field.split(':'):
|
for f in field.split(':'):
|
||||||
if len(f) > 0:
|
if len(f) > 0:
|
||||||
break
|
break
|
||||||
if remove:
|
if remove:
|
||||||
entry.pop('file')
|
bibdata.pop('file')
|
||||||
# This is a hck for Mendeley. Make clean
|
# This is a hck for Mendeley. Make clean
|
||||||
if f[0] != '/':
|
if f[0] != '/':
|
||||||
f = '/' + f
|
f = '/' + f
|
||||||
return f
|
return f
|
||||||
if 'attachments' in entry:
|
if 'attachments' in bibdata:
|
||||||
return entry['attachments']
|
return bibdata['attachments']
|
||||||
if 'pdf' in entry:
|
if 'pdf' in bibdata:
|
||||||
return entry['pdf']
|
return bibdata['pdf']
|
||||||
except (KeyError, IndexError):
|
except (KeyError, IndexError):
|
||||||
return None
|
return None
|
||||||
|
@ -12,7 +12,7 @@ from .. import pretty
|
|||||||
|
|
||||||
def parser(subparsers):
|
def parser(subparsers):
|
||||||
parser = subparsers.add_parser('add', help='add a paper to the repository')
|
parser = subparsers.add_parser('add', help='add a paper to the repository')
|
||||||
parser.add_argument('bibfile', nargs='?', default = None,
|
parser.add_argument('bibfile', nargs='?', default=None,
|
||||||
help='bibtex file')
|
help='bibtex file')
|
||||||
parser.add_argument('-D', '--doi', help='doi number to retrieve the bibtex entry, if it is not provided', default=None)
|
parser.add_argument('-D', '--doi', help='doi number to retrieve the bibtex entry, if it is not provided', default=None)
|
||||||
parser.add_argument('-I', '--isbn', help='isbn number to retrieve the bibtex entry, if it is not provided', default=None)
|
parser.add_argument('-I', '--isbn', help='isbn number to retrieve the bibtex entry, if it is not provided', default=None)
|
||||||
@ -26,7 +26,7 @@ def parser(subparsers):
|
|||||||
return parser
|
return parser
|
||||||
|
|
||||||
|
|
||||||
def bibdata_from_editor(ui, rp):
|
def bibentry_from_editor(ui, rp):
|
||||||
again = True
|
again = True
|
||||||
bibstr = templates.add_bib
|
bibstr = templates.add_bib
|
||||||
while again:
|
while again:
|
||||||
@ -41,8 +41,8 @@ def bibdata_from_editor(ui, rp):
|
|||||||
if not again:
|
if not again:
|
||||||
ui.exit(0)
|
ui.exit(0)
|
||||||
else:
|
else:
|
||||||
bibdata = rp.databroker.verify(bibstr)
|
bibentry = rp.databroker.verify(bibstr)
|
||||||
bibstruct.verify_bibdata(bibdata)
|
bibstruct.verify_bibdata(bibentry)
|
||||||
# REFACTOR Generate citykey
|
# REFACTOR Generate citykey
|
||||||
again = False
|
again = False
|
||||||
except ValueError:
|
except ValueError:
|
||||||
@ -52,7 +52,8 @@ def bibdata_from_editor(ui, rp):
|
|||||||
if not again:
|
if not again:
|
||||||
ui.exit(0)
|
ui.exit(0)
|
||||||
|
|
||||||
return bibdata
|
return bibentry
|
||||||
|
|
||||||
|
|
||||||
def command(args):
|
def command(args):
|
||||||
"""
|
"""
|
||||||
@ -64,47 +65,47 @@ def command(args):
|
|||||||
bibfile = args.bibfile
|
bibfile = args.bibfile
|
||||||
docfile = args.docfile
|
docfile = args.docfile
|
||||||
tags = args.tags
|
tags = args.tags
|
||||||
citekey = args.copy
|
citekey = args.citekey
|
||||||
|
|
||||||
rp = repo.Repository(config())
|
rp = repo.Repository(config())
|
||||||
|
|
||||||
# get bibtex entry
|
# get bibtex entry
|
||||||
if bibfile is None:
|
if bibfile is None:
|
||||||
if args.doi is None and args.isbn is None:
|
if args.doi is None and args.isbn is None:
|
||||||
bibdata = bibdata_from_editor(ui, rp)
|
bibentry = bibentry_from_editor(ui, rp)
|
||||||
else:
|
else:
|
||||||
if args.doi is not None:
|
if args.doi is not None:
|
||||||
bibdata_raw = apis.doi2bibtex(args.doi)
|
bibentry_raw = apis.doi2bibtex(args.doi)
|
||||||
bibdata = rp.databroker.verify(bibdata_raw)
|
bibentry = rp.databroker.verify(bibentry_raw)
|
||||||
if bibdata is None:
|
if bibentry is None:
|
||||||
ui.error('invalid doi {} or unable to retrieve bibfile from it.'.format(args.doi))
|
ui.error('invalid doi {} or unable to retrieve bibfile from it.'.format(args.doi))
|
||||||
if args.isbn is None:
|
if args.isbn is None:
|
||||||
ui.exit(1)
|
ui.exit(1)
|
||||||
if args.isbn is not None:
|
if args.isbn is not None:
|
||||||
bibdata_raw = apis.isbn2bibtex(args.isbn)
|
bibentry_raw = apis.isbn2bibtex(args.isbn)
|
||||||
bibdata = rp.databroker.verify(bibdata_raw)
|
bibentry = rp.databroker.verify(bibentry_raw)
|
||||||
if bibdata is None:
|
if bibentry is None:
|
||||||
ui.error('invalid isbn {} or unable to retrieve bibfile from it.'.format(args.isbn))
|
ui.error('invalid isbn {} or unable to retrieve bibfile from it.'.format(args.isbn))
|
||||||
ui.exit(1)
|
ui.exit(1)
|
||||||
# TODO distinguish between cases, offer to open the error page in a webbrowser.
|
# TODO distinguish between cases, offer to open the error page in a webbrowser.
|
||||||
# TODO offer to confirm/change citekey
|
# TODO offer to confirm/change citekey
|
||||||
else:
|
else:
|
||||||
bibdata_raw = content.get_content(bibfile, ui=ui)
|
bibentry_raw = content.get_content(bibfile, ui=ui)
|
||||||
bibdata = rp.databroker.verify(bibdata_raw)
|
bibentry = rp.databroker.verify(bibentry_raw)
|
||||||
if bibdata is None:
|
if bibentry is None:
|
||||||
ui.error('invalid bibfile {}.'.format(bibfile))
|
ui.error('invalid bibfile {}.'.format(bibfile))
|
||||||
|
|
||||||
# citekey
|
# citekey
|
||||||
|
|
||||||
citekey = args.citekey
|
citekey = args.citekey
|
||||||
if citekey is None:
|
if citekey is None:
|
||||||
base_key = bibstruct.extract_citekey(bibdata)
|
base_key = bibstruct.extract_citekey(bibentry)
|
||||||
citekey = rp.unique_citekey(base_key)
|
citekey = rp.unique_citekey(base_key)
|
||||||
elif citekey in rp:
|
elif citekey in rp:
|
||||||
ui.error('citekey already exist {}.'.format(citekey))
|
ui.error('citekey already exist {}.'.format(citekey))
|
||||||
ui.exit(1)
|
ui.exit(1)
|
||||||
|
|
||||||
p = paper.Paper(bibdata, citekey=citekey)
|
p = paper.Paper.from_bibentry(bibentry, citekey=citekey)
|
||||||
|
|
||||||
# tags
|
# tags
|
||||||
|
|
||||||
@ -113,7 +114,7 @@ def command(args):
|
|||||||
|
|
||||||
# document file
|
# document file
|
||||||
|
|
||||||
bib_docfile = bibstruct.extract_docfile(bibdata)
|
bib_docfile = bibstruct.extract_docfile(bibentry)
|
||||||
if docfile is None:
|
if docfile is None:
|
||||||
docfile = bib_docfile
|
docfile = bib_docfile
|
||||||
elif bib_docfile is not None:
|
elif bib_docfile is not None:
|
||||||
@ -126,9 +127,6 @@ def command(args):
|
|||||||
rp.push_paper(p)
|
rp.push_paper(p)
|
||||||
if docfile is not None:
|
if docfile is not None:
|
||||||
rp.push_doc(p.citekey, docfile, copy=args.copy)
|
rp.push_doc(p.citekey, docfile, copy=args.copy)
|
||||||
if args.copy:
|
|
||||||
if ui.input_yn('{} has been copied into pubs; should the original be removed?'.format(color.dye(docfile, color.bold))):
|
|
||||||
content.remove_file(docfile)
|
|
||||||
ui.print_('{}\nwas added to pubs.'.format(pretty.paper_oneliner(p)))
|
ui.print_('{}\nwas added to pubs.'.format(pretty.paper_oneliner(p)))
|
||||||
except ValueError as v:
|
except ValueError as v:
|
||||||
ui.error(v.message)
|
ui.error(v.message)
|
||||||
|
@ -32,9 +32,6 @@ def command(args):
|
|||||||
try:
|
try:
|
||||||
document = args.document
|
document = args.document
|
||||||
rp.push_doc(paper.citekey, document, copy=args.copy)
|
rp.push_doc(paper.citekey, document, copy=args.copy)
|
||||||
if args.copy:
|
|
||||||
if ui.input_yn('{} has been copied into pubs; should the original be removed?'.format(color.dye(document, color.bold))):
|
|
||||||
content.remove_file(document)
|
|
||||||
ui.print_('{} attached to {}'.format(color.dye(document, color.bold), color.dye(paper.citekey, color.citekey)))
|
ui.print_('{} attached to {}'.format(color.dye(document, color.bold), color.dye(paper.citekey, color.citekey)))
|
||||||
|
|
||||||
except ValueError as v:
|
except ValueError as v:
|
||||||
|
@ -5,6 +5,7 @@ from ..uis import get_ui
|
|||||||
from ..endecoder import EnDecoder
|
from ..endecoder import EnDecoder
|
||||||
from ..utils import resolve_citekey
|
from ..utils import resolve_citekey
|
||||||
|
|
||||||
|
|
||||||
def parser(subparsers):
|
def parser(subparsers):
|
||||||
parser = subparsers.add_parser('edit',
|
parser = subparsers.add_parser('edit',
|
||||||
help='open the paper bibliographic file in an editor')
|
help='open the paper bibliographic file in an editor')
|
||||||
@ -34,7 +35,7 @@ def command(args):
|
|||||||
encode = coder.encode_bibdata
|
encode = coder.encode_bibdata
|
||||||
decode = coder.decode_bibdata
|
decode = coder.decode_bibdata
|
||||||
suffix = '.bib'
|
suffix = '.bib'
|
||||||
raw_content = encode(paper.bibdata)
|
raw_content = encode(paper.bibentry)
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
# Get new content from user
|
# Get new content from user
|
||||||
@ -44,10 +45,11 @@ def command(args):
|
|||||||
content = decode(raw_content)
|
content = decode(raw_content)
|
||||||
|
|
||||||
if meta:
|
if meta:
|
||||||
new_paper = Paper(paper.bibdata, citekey=paper.citekey,
|
new_paper = Paper(paper.citekey, paper.bibdata,
|
||||||
metadata=content)
|
metadata=content)
|
||||||
else:
|
else:
|
||||||
new_paper = Paper(content, metadata=paper.metadata)
|
new_paper = Paper.from_bibentry(content,
|
||||||
|
metadata=paper.metadata)
|
||||||
rp.rename_paper(new_paper, old_citekey=paper.citekey)
|
rp.rename_paper(new_paper, old_citekey=paper.citekey)
|
||||||
break
|
break
|
||||||
|
|
||||||
|
@ -33,7 +33,7 @@ def command(args):
|
|||||||
papers = rp.all_papers()
|
papers = rp.all_papers()
|
||||||
bib = {}
|
bib = {}
|
||||||
for p in papers:
|
for p in papers:
|
||||||
bib[p.citekey] = p.bibentry
|
bib[p.citekey] = p.bibdata
|
||||||
exporter = endecoder.EnDecoder()
|
exporter = endecoder.EnDecoder()
|
||||||
bibdata_raw = exporter.encode_bibdata(bib)
|
bibdata_raw = exporter.encode_bibdata(bib)
|
||||||
ui.print_(bibdata_raw)
|
ui.print_(bibdata_raw)
|
||||||
|
@ -49,12 +49,9 @@ def many_from_path(bibpath):
|
|||||||
|
|
||||||
papers = {}
|
papers = {}
|
||||||
for b in biblist:
|
for b in biblist:
|
||||||
for k in b.keys():
|
for k, b in b.items():
|
||||||
try:
|
try:
|
||||||
bibdata = {}
|
papers[k] = Paper(k, b)
|
||||||
bibdata[k] = b[k]
|
|
||||||
|
|
||||||
papers[k] = Paper(bibdata, citekey=k)
|
|
||||||
papers[k].added = datetime.datetime.now()
|
papers[k].added = datetime.datetime.now()
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
papers[k] = e
|
papers[k] = e
|
||||||
|
@ -73,10 +73,10 @@ def _lower(s, lower=True):
|
|||||||
|
|
||||||
def _check_author_match(paper, query, case_sensitive=False):
|
def _check_author_match(paper, query, case_sensitive=False):
|
||||||
"""Only checks within last names."""
|
"""Only checks within last names."""
|
||||||
if not 'author' in paper.bibentry:
|
if not 'author' in paper.bibdata:
|
||||||
return False
|
return False
|
||||||
return any([query in _lower(bibstruct.author_last(p), lower=(not case_sensitive))
|
return any([query in _lower(bibstruct.author_last(p), lower=(not case_sensitive))
|
||||||
for p in paper.bibentry['author']])
|
for p in paper.bibdata['author']])
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@ -86,8 +86,8 @@ def _check_tag_match(paper, query, case_sensitive=False):
|
|||||||
|
|
||||||
|
|
||||||
def _check_field_match(paper, field, query, case_sensitive=False):
|
def _check_field_match(paper, field, query, case_sensitive=False):
|
||||||
return query in _lower(paper.bibentry[field],
|
return query in _lower(paper.bibdata[field],
|
||||||
lower=(not case_sensitive))
|
lower=(not case_sensitive))
|
||||||
|
|
||||||
|
|
||||||
def _check_query_block(paper, query_block, case_sensitive=None):
|
def _check_query_block(paper, query_block, case_sensitive=None):
|
||||||
@ -100,7 +100,7 @@ def _check_query_block(paper, query_block, case_sensitive=None):
|
|||||||
return _check_tag_match(paper, value, case_sensitive=case_sensitive)
|
return _check_tag_match(paper, value, case_sensitive=case_sensitive)
|
||||||
elif field == 'author':
|
elif field == 'author':
|
||||||
return _check_author_match(paper, value, case_sensitive=case_sensitive)
|
return _check_author_match(paper, value, case_sensitive=case_sensitive)
|
||||||
elif field in paper.bibentry:
|
elif field in paper.bibdata:
|
||||||
return _check_field_match(paper, field, value,
|
return _check_field_match(paper, field, value,
|
||||||
case_sensitive=case_sensitive)
|
case_sensitive=case_sensitive)
|
||||||
else:
|
else:
|
||||||
|
@ -66,12 +66,12 @@ class Config(object):
|
|||||||
if not check_file(path, fail=False):
|
if not check_file(path, fail=False):
|
||||||
raise IOError(("The configuration file {} does not exist."
|
raise IOError(("The configuration file {} does not exist."
|
||||||
" Did you run 'pubs init' ?").format(path))
|
" Did you run 'pubs init' ?").format(path))
|
||||||
with _open(path, 'rb+') as f:
|
with _open(path, 'r+') as f:
|
||||||
_read_config(self._cfg, f)
|
_read_config(self._cfg, f)
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def save(self, path=DFT_CONFIG_PATH):
|
def save(self, path=DFT_CONFIG_PATH):
|
||||||
with _open(path, 'wb+') as f:
|
with _open(path, 'w+') as f:
|
||||||
self._cfg.write(f)
|
self._cfg.write(f)
|
||||||
|
|
||||||
def __setattr__(self, name, value):
|
def __setattr__(self, name, value):
|
||||||
|
@ -21,7 +21,7 @@ class DataBroker(object):
|
|||||||
metadata_raw = self.filebroker.pull_metafile(citekey)
|
metadata_raw = self.filebroker.pull_metafile(citekey)
|
||||||
return self.endecoder.decode_metadata(metadata_raw)
|
return self.endecoder.decode_metadata(metadata_raw)
|
||||||
|
|
||||||
def pull_bibdata(self, citekey):
|
def pull_bibentry(self, citekey):
|
||||||
bibdata_raw = self.filebroker.pull_bibfile(citekey)
|
bibdata_raw = self.filebroker.pull_bibfile(citekey)
|
||||||
return self.endecoder.decode_bibdata(bibdata_raw)
|
return self.endecoder.decode_bibdata(bibdata_raw)
|
||||||
|
|
||||||
@ -29,7 +29,7 @@ class DataBroker(object):
|
|||||||
metadata_raw = self.endecoder.encode_metadata(metadata)
|
metadata_raw = self.endecoder.encode_metadata(metadata)
|
||||||
self.filebroker.push_metafile(citekey, metadata_raw)
|
self.filebroker.push_metafile(citekey, metadata_raw)
|
||||||
|
|
||||||
def push_bibdata(self, citekey, bibdata):
|
def push_bibentry(self, citekey, bibdata):
|
||||||
bibdata_raw = self.endecoder.encode_bibdata(bibdata)
|
bibdata_raw = self.endecoder.encode_bibdata(bibdata)
|
||||||
self.filebroker.push_bibfile(citekey, bibdata_raw)
|
self.filebroker.push_bibfile(citekey, bibdata_raw)
|
||||||
|
|
||||||
|
@ -31,14 +31,14 @@ class DataCache(object):
|
|||||||
def pull_metadata(self, citekey):
|
def pull_metadata(self, citekey):
|
||||||
return self.databroker.pull_metadata(citekey)
|
return self.databroker.pull_metadata(citekey)
|
||||||
|
|
||||||
def pull_bibdata(self, citekey):
|
def pull_bibentry(self, citekey):
|
||||||
return self.databroker.pull_bibdata(citekey)
|
return self.databroker.pull_bibentry(citekey)
|
||||||
|
|
||||||
def push_metadata(self, citekey, metadata):
|
def push_metadata(self, citekey, metadata):
|
||||||
self.databroker.push_metadata(citekey, metadata)
|
self.databroker.push_metadata(citekey, metadata)
|
||||||
|
|
||||||
def push_bibdata(self, citekey, bibdata):
|
def push_bibentry(self, citekey, bibdata):
|
||||||
self.databroker.push_bibdata(citekey, bibdata)
|
self.databroker.push_bibentry(citekey, bibdata)
|
||||||
|
|
||||||
def push(self, citekey, metadata, bibdata):
|
def push(self, citekey, metadata, bibdata):
|
||||||
self.databroker.push(citekey, metadata, bibdata)
|
self.databroker.push(citekey, metadata, bibdata)
|
||||||
|
@ -28,19 +28,11 @@ class Paper(object):
|
|||||||
in a pythonic manner.
|
in a pythonic manner.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, bibdata, citekey=None, metadata=None):
|
def __init__(self, citekey, bibdata, metadata=None):
|
||||||
self.citekey = citekey
|
self.citekey = citekey
|
||||||
self.metadata = _clean_metadata(metadata)
|
self.metadata = _clean_metadata(metadata)
|
||||||
self.bibdata = bibdata
|
self.bibdata = bibdata
|
||||||
|
bibstruct.check_citekey(self.citekey)
|
||||||
_, self.bibentry = bibstruct.get_entry(self.bibdata)
|
|
||||||
|
|
||||||
if self.citekey is None:
|
|
||||||
self.citekey = bibstruct.extract_citekey(self.bibdata)
|
|
||||||
bibstruct.check_citekey(self.citekey)
|
|
||||||
else:
|
|
||||||
def_citekey = bibstruct.extract_citekey(self.bibdata)
|
|
||||||
self.bibdata = {citekey: self.bibdata[def_citekey]}
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
return (isinstance(self, Paper) and type(other) is type(self)
|
return (isinstance(self, Paper) and type(other) is type(self)
|
||||||
@ -50,15 +42,15 @@ class Paper(object):
|
|||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return 'Paper(%s, %s, %s)' % (
|
return 'Paper(%s, %s, %s)' % (
|
||||||
self.citekey, self.bibentry, self.metadata)
|
self.citekey, self.bibdata, self.metadata)
|
||||||
|
|
||||||
def __deepcopy__(self, memo):
|
def __deepcopy__(self, memo):
|
||||||
return Paper(citekey =self.citekey,
|
return Paper(citekey=self.citekey,
|
||||||
metadata=copy.deepcopy(self.metadata, memo),
|
metadata=copy.deepcopy(self.metadata, memo),
|
||||||
bibdata=copy.deepcopy(self.bibdata, memo))
|
bibdata=copy.deepcopy(self.bibdata, memo))
|
||||||
|
|
||||||
def __copy__(self):
|
def __copy__(self):
|
||||||
return Paper(citekey =self.citekey,
|
return Paper(citekey=self.citekey,
|
||||||
metadata=self.metadata,
|
metadata=self.metadata,
|
||||||
bibdata=self.bibdata)
|
bibdata=self.bibdata)
|
||||||
|
|
||||||
@ -67,6 +59,10 @@ class Paper(object):
|
|||||||
|
|
||||||
# docpath
|
# docpath
|
||||||
|
|
||||||
|
@property
|
||||||
|
def bibentry(self):
|
||||||
|
return {self.citekey: self.bibdata}
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def docpath(self):
|
def docpath(self):
|
||||||
return self.metadata.get('docfile', '')
|
return self.metadata.get('docfile', '')
|
||||||
@ -105,3 +101,10 @@ class Paper(object):
|
|||||||
@added.setter
|
@added.setter
|
||||||
def added(self, value):
|
def added(self, value):
|
||||||
self.metadata['added'] = value
|
self.metadata['added'] = value
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_bibentry(bibentry, citekey=None, metadata=None):
|
||||||
|
bibentry_key, bibdata = bibstruct.get_entry(bibentry)
|
||||||
|
if citekey is None:
|
||||||
|
citekey = bibentry_key
|
||||||
|
return Paper(citekey, bibdata, metadata=metadata)
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
# display formatting
|
# display formatting
|
||||||
|
|
||||||
from . import color
|
from . import color
|
||||||
|
from .bibstruct import TYPE_KEY
|
||||||
|
|
||||||
|
|
||||||
# should be adaptated to bibtexparser dicts
|
# should be adaptated to bibtexparser dicts
|
||||||
@ -12,9 +13,9 @@ def person_repr(p):
|
|||||||
' '.join(p.lineage(abbr=True))] if s)
|
' '.join(p.lineage(abbr=True))] if s)
|
||||||
|
|
||||||
|
|
||||||
def short_authors(bibentry):
|
def short_authors(bibdata):
|
||||||
try:
|
try:
|
||||||
authors = [p for p in bibentry['author']]
|
authors = [p for p in bibdata['author']]
|
||||||
if len(authors) < 3:
|
if len(authors) < 3:
|
||||||
return ' and '.join(authors)
|
return ' and '.join(authors)
|
||||||
else:
|
else:
|
||||||
@ -23,19 +24,19 @@ def short_authors(bibentry):
|
|||||||
return ''
|
return ''
|
||||||
|
|
||||||
|
|
||||||
def bib_oneliner(bibentry):
|
def bib_oneliner(bibdata):
|
||||||
authors = short_authors(bibentry)
|
authors = short_authors(bibdata)
|
||||||
journal = ''
|
journal = ''
|
||||||
if 'journal' in bibentry:
|
if 'journal' in bibdata:
|
||||||
journal = ' ' + bibentry['journal']['name']
|
journal = ' ' + bibdata['journal']['name']
|
||||||
elif bibentry['type'] == 'inproceedings':
|
elif bibdata[TYPE_KEY] == 'inproceedings':
|
||||||
journal = ' ' + bibentry.get('booktitle', '')
|
journal = ' ' + bibdata.get('booktitle', '')
|
||||||
|
|
||||||
return u'{authors} \"{title}\"{journal}{year}'.format(
|
return u'{authors} \"{title}\"{journal}{year}'.format(
|
||||||
authors=color.dye(authors, color.grey, bold=True),
|
authors=color.dye(authors, color.grey, bold=True),
|
||||||
title=bibentry.get('title', ''),
|
title=bibdata.get('title', ''),
|
||||||
journal=color.dye(journal, color.yellow),
|
journal=color.dye(journal, color.yellow),
|
||||||
year=' ({})'.format(bibentry['year']) if 'year' in bibentry else '',
|
year=' ({})'.format(bibdata['year']) if 'year' in bibdata else '',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -48,11 +49,11 @@ def bib_desc(bib_data):
|
|||||||
return s
|
return s
|
||||||
|
|
||||||
|
|
||||||
def paper_oneliner(p, citekey_only = False):
|
def paper_oneliner(p, citekey_only=False):
|
||||||
if citekey_only:
|
if citekey_only:
|
||||||
return p.citekey
|
return p.citekey
|
||||||
else:
|
else:
|
||||||
bibdesc = bib_oneliner(p.bibentry)
|
bibdesc = bib_oneliner(p.bibdata)
|
||||||
tags = '' if len(p.tags) == 0 else '| {}'.format(
|
tags = '' if len(p.tags) == 0 else '| {}'.format(
|
||||||
','.join(color.dye(t, color.tag) for t in sorted(p.tags)))
|
','.join(color.dye(t, color.tag) for t in sorted(p.tags)))
|
||||||
return u'[{citekey}] {descr} {tags}'.format(
|
return u'[{citekey}] {descr} {tags}'.format(
|
||||||
|
@ -58,9 +58,10 @@ class Repository(object):
|
|||||||
def pull_paper(self, citekey):
|
def pull_paper(self, citekey):
|
||||||
"""Load a paper by its citekey from disk, if necessary."""
|
"""Load a paper by its citekey from disk, if necessary."""
|
||||||
if citekey in self:
|
if citekey in self:
|
||||||
return Paper(self.databroker.pull_bibdata(citekey),
|
return Paper.from_bibentry(
|
||||||
citekey=citekey,
|
self.databroker.pull_bibentry(citekey),
|
||||||
metadata=self.databroker.pull_metadata(citekey))
|
citekey=citekey,
|
||||||
|
metadata=self.databroker.pull_metadata(citekey))
|
||||||
else:
|
else:
|
||||||
raise InvalidReference('{} citekey not found'.format(citekey))
|
raise InvalidReference('{} citekey not found'.format(citekey))
|
||||||
|
|
||||||
@ -75,7 +76,7 @@ class Repository(object):
|
|||||||
raise CiteKeyCollision('citekey {} already in use'.format(paper.citekey))
|
raise CiteKeyCollision('citekey {} already in use'.format(paper.citekey))
|
||||||
if not paper.added:
|
if not paper.added:
|
||||||
paper.added = datetime.now()
|
paper.added = datetime.now()
|
||||||
self.databroker.push_bibdata(paper.citekey, paper.bibdata)
|
self.databroker.push_bibentry(paper.citekey, paper.bibentry)
|
||||||
self.databroker.push_metadata(paper.citekey, paper.metadata)
|
self.databroker.push_metadata(paper.citekey, paper.metadata)
|
||||||
self.citekeys.add(paper.citekey)
|
self.citekeys.add(paper.citekey)
|
||||||
if event:
|
if event:
|
||||||
|
@ -202,6 +202,9 @@ class FakeInput():
|
|||||||
input() raises IndexError
|
input() raises IndexError
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
class UnexpectedInput(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
def __init__(self, inputs, module_list=tuple()):
|
def __init__(self, inputs, module_list=tuple()):
|
||||||
self.inputs = list(inputs) or []
|
self.inputs = list(inputs) or []
|
||||||
self.module_list = module_list
|
self.module_list = module_list
|
||||||
@ -218,9 +221,12 @@ class FakeInput():
|
|||||||
self.inputs.append(inp)
|
self.inputs.append(inp)
|
||||||
|
|
||||||
def __call__(self, *args, **kwargs):
|
def __call__(self, *args, **kwargs):
|
||||||
inp = self.inputs[self._cursor]
|
try:
|
||||||
self._cursor += 1
|
inp = self.inputs[self._cursor]
|
||||||
return inp
|
self._cursor += 1
|
||||||
|
return inp
|
||||||
|
except IndexError:
|
||||||
|
raise self.UnexpectedInput('Unexpected user input in test.')
|
||||||
|
|
||||||
|
|
||||||
class TestFakeFs(unittest.TestCase):
|
class TestFakeFs(unittest.TestCase):
|
||||||
|
@ -23,18 +23,18 @@ doe_bib = """
|
|||||||
|
|
||||||
dummy_metadata = {'docfile': 'docsdir://hop.la', 'tags': set(['a', 'b'])}
|
dummy_metadata = {'docfile': 'docsdir://hop.la', 'tags': set(['a', 'b'])}
|
||||||
|
|
||||||
franny_bibdata = coder.decode_bibdata(franny_bib)
|
franny_bibentry = coder.decode_bibdata(franny_bib)
|
||||||
franny_bibentry = franny_bibdata['Franny1961']
|
franny_bibdata = franny_bibentry['Franny1961']
|
||||||
|
|
||||||
doe_bibdata = coder.decode_bibdata(doe_bib)
|
doe_bibentry = coder.decode_bibdata(doe_bib)
|
||||||
doe_bibentry = doe_bibdata['Doe2013']
|
doe_bibdata = doe_bibentry['Doe2013']
|
||||||
|
|
||||||
turing_bibdata = coder.decode_bibdata(str_fixtures.turing_bib)
|
turing_bibentry = coder.decode_bibdata(str_fixtures.turing_bib)
|
||||||
turing_bibentry = turing_bibdata['turing1950computing']
|
turing_bibdata = turing_bibentry['turing1950computing']
|
||||||
turing_metadata = coder.decode_metadata(str_fixtures.turing_meta)
|
turing_metadata = coder.decode_metadata(str_fixtures.turing_meta)
|
||||||
|
|
||||||
page_bibdata = coder.decode_bibdata(str_fixtures.bibtex_raw0)
|
page_bibentry = coder.decode_bibdata(str_fixtures.bibtex_raw0)
|
||||||
page_bibentry = page_bibdata['Page99']
|
page_bibdata = page_bibentry['Page99']
|
||||||
page_metadata = coder.decode_metadata(str_fixtures.metadata_raw0)
|
page_metadata = coder.decode_metadata(str_fixtures.metadata_raw0)
|
||||||
|
|
||||||
page_metadata = coder.decode_metadata(str_fixtures.metadata_raw0)
|
page_metadata = coder.decode_metadata(str_fixtures.metadata_raw0)
|
||||||
|
@ -16,18 +16,19 @@ class TestGenerateCitekey(unittest.TestCase):
|
|||||||
bibstruct.generate_citekey(None)
|
bibstruct.generate_citekey(None)
|
||||||
|
|
||||||
def test_escapes_chars(self):
|
def test_escapes_chars(self):
|
||||||
doe_bibdata = copy.deepcopy(fixtures.doe_bibdata)
|
doe_bibentry = copy.deepcopy(fixtures.doe_bibentry)
|
||||||
citekey, entry = bibstruct.get_entry(doe_bibdata)
|
citekey, bibdata = bibstruct.get_entry(doe_bibentry)
|
||||||
entry['author'] = [u'Zôu\\@/ , John']
|
bibdata['author'] = [u'Zôu\\@/ , John']
|
||||||
key = bibstruct.generate_citekey(doe_bibdata)
|
key = bibstruct.generate_citekey(doe_bibentry)
|
||||||
|
self.assertEqual(key, 'Zou2013')
|
||||||
|
|
||||||
def test_simple(self):
|
def test_simple(self):
|
||||||
bibdata = copy.deepcopy(fixtures.doe_bibdata)
|
bibentry = copy.deepcopy(fixtures.doe_bibentry)
|
||||||
key = bibstruct.generate_citekey(bibdata)
|
key = bibstruct.generate_citekey(bibentry)
|
||||||
self.assertEqual(key, 'Doe2013')
|
self.assertEqual(key, 'Doe2013')
|
||||||
|
|
||||||
bibdata = copy.deepcopy(fixtures.franny_bibdata)
|
bibentry = copy.deepcopy(fixtures.franny_bibentry)
|
||||||
key = bibstruct.generate_citekey(bibdata)
|
key = bibstruct.generate_citekey(bibentry)
|
||||||
self.assertEqual(key, 'Salinger1961')
|
self.assertEqual(key, 'Salinger1961')
|
||||||
|
|
||||||
|
|
||||||
|
@ -17,7 +17,7 @@ class TestDataBroker(unittest.TestCase):
|
|||||||
|
|
||||||
ende = endecoder.EnDecoder()
|
ende = endecoder.EnDecoder()
|
||||||
page99_metadata = ende.decode_metadata(str_fixtures.metadata_raw0)
|
page99_metadata = ende.decode_metadata(str_fixtures.metadata_raw0)
|
||||||
page99_bibdata = ende.decode_bibdata(str_fixtures.bibtex_raw0)
|
page99_bibentry = ende.decode_bibdata(str_fixtures.bibtex_raw0)
|
||||||
|
|
||||||
for db_class in [databroker.DataBroker, datacache.DataCache]:
|
for db_class in [databroker.DataBroker, datacache.DataCache]:
|
||||||
self.fs = fake_env.create_fake_fs([content, filebroker, configs])
|
self.fs = fake_env.create_fake_fs([content, filebroker, configs])
|
||||||
@ -28,22 +28,22 @@ class TestDataBroker(unittest.TestCase):
|
|||||||
self.assertFalse(db.exists('citekey1', meta_check=True))
|
self.assertFalse(db.exists('citekey1', meta_check=True))
|
||||||
self.assertFalse(db.exists('citekey1', meta_check=False))
|
self.assertFalse(db.exists('citekey1', meta_check=False))
|
||||||
|
|
||||||
db.push_bibdata('citekey1', page99_bibdata)
|
db.push_bibentry('citekey1', page99_bibentry)
|
||||||
self.assertTrue(db.exists('citekey1', meta_check=False))
|
self.assertTrue(db.exists('citekey1', meta_check=False))
|
||||||
self.assertTrue(db.exists('citekey1', meta_check=True))
|
self.assertTrue(db.exists('citekey1', meta_check=True))
|
||||||
|
|
||||||
self.assertEqual(db.pull_metadata('citekey1'), page99_metadata)
|
self.assertEqual(db.pull_metadata('citekey1'), page99_metadata)
|
||||||
pulled = db.pull_bibdata('citekey1')['Page99']
|
pulled = db.pull_bibentry('citekey1')['Page99']
|
||||||
for key, value in pulled.items():
|
for key, value in pulled.items():
|
||||||
self.assertEqual(pulled[key], page99_bibdata['Page99'][key])
|
self.assertEqual(pulled[key], page99_bibentry['Page99'][key])
|
||||||
self.assertEqual(db.pull_bibdata('citekey1'), page99_bibdata)
|
self.assertEqual(db.pull_bibentry('citekey1'), page99_bibentry)
|
||||||
|
|
||||||
fake_env.unset_fake_fs([content, filebroker])
|
fake_env.unset_fake_fs([content, filebroker])
|
||||||
|
|
||||||
def test_existing_data(self):
|
def test_existing_data(self):
|
||||||
|
|
||||||
ende = endecoder.EnDecoder()
|
ende = endecoder.EnDecoder()
|
||||||
page99_bibdata = ende.decode_bibdata(str_fixtures.bibtex_raw0)
|
page99_bibentry = ende.decode_bibdata(str_fixtures.bibtex_raw0)
|
||||||
|
|
||||||
for db_class in [databroker.DataBroker, datacache.DataCache]:
|
for db_class in [databroker.DataBroker, datacache.DataCache]:
|
||||||
self.fs = fake_env.create_fake_fs([content, filebroker])
|
self.fs = fake_env.create_fake_fs([content, filebroker])
|
||||||
@ -51,16 +51,16 @@ class TestDataBroker(unittest.TestCase):
|
|||||||
|
|
||||||
db = db_class('repo', create=False)
|
db = db_class('repo', create=False)
|
||||||
|
|
||||||
self.assertEqual(db.pull_bibdata('Page99'), page99_bibdata)
|
self.assertEqual(db.pull_bibentry('Page99'), page99_bibentry)
|
||||||
|
|
||||||
for citekey in ['10.1371_journal.pone.0038236',
|
for citekey in ['10.1371_journal.pone.0038236',
|
||||||
'10.1371journal.pone.0063400',
|
'10.1371journal.pone.0063400',
|
||||||
'journal0063400']:
|
'journal0063400']:
|
||||||
db.pull_bibdata(citekey)
|
db.pull_bibentry(citekey)
|
||||||
db.pull_metadata(citekey)
|
db.pull_metadata(citekey)
|
||||||
|
|
||||||
with self.assertRaises(IOError):
|
with self.assertRaises(IOError):
|
||||||
db.pull_bibdata('citekey')
|
db.pull_bibentry('citekey')
|
||||||
with self.assertRaises(IOError):
|
with self.assertRaises(IOError):
|
||||||
db.pull_metadata('citekey')
|
db.pull_metadata('citekey')
|
||||||
|
|
||||||
|
@ -86,9 +86,7 @@ class TestEnDecode(unittest.TestCase):
|
|||||||
biblines = turing_bib.splitlines()
|
biblines = turing_bib.splitlines()
|
||||||
biblines.insert(-3, keyword_str)
|
biblines.insert(-3, keyword_str)
|
||||||
bibsrc = '\n'.join(biblines)
|
bibsrc = '\n'.join(biblines)
|
||||||
print(bibsrc)
|
|
||||||
entry = decoder.decode_bibdata(bibsrc)['turing1950computing']
|
entry = decoder.decode_bibdata(bibsrc)['turing1950computing']
|
||||||
print(entry)
|
|
||||||
self.assertNotIn(u'keywords', entry)
|
self.assertNotIn(u'keywords', entry)
|
||||||
self.assertIn(u'keyword', entry)
|
self.assertIn(u'keyword', entry)
|
||||||
self.assertEqual(set(keywords), set(entry[u'keyword']))
|
self.assertEqual(set(keywords), set(entry[u'keyword']))
|
||||||
|
@ -75,19 +75,19 @@ class TestEvents(unittest.TestCase):
|
|||||||
correct = ['abcdefghijklmnopqrstuvwxyz 12 15',
|
correct = ['abcdefghijklmnopqrstuvwxyz 12 15',
|
||||||
'Helloword',
|
'Helloword',
|
||||||
'one']
|
'one']
|
||||||
self.assertEquals(_output, correct)
|
self.assertEqual(_output, correct)
|
||||||
|
|
||||||
def test_listen_AddEvent(self):
|
def test_listen_AddEvent(self):
|
||||||
addevent = AddEvent()
|
addevent = AddEvent()
|
||||||
addevent.send()
|
addevent.send()
|
||||||
correct = [42]
|
correct = [42]
|
||||||
self.assertEquals(_output, correct)
|
self.assertEqual(_output, correct)
|
||||||
|
|
||||||
def test_listen_Info(self):
|
def test_listen_Info(self):
|
||||||
Info('info').send()
|
Info('info').send()
|
||||||
SpecificInfo('info', 'specific').send()
|
SpecificInfo('info', 'specific').send()
|
||||||
correct = ['info', 'info', 'specific']
|
correct = ['info', 'info', 'specific']
|
||||||
self.assertEquals(_output, correct)
|
self.assertEqual(_output, correct)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
import os
|
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
import dotdot
|
import dotdot
|
||||||
@ -9,38 +9,38 @@ from pubs.paper import Paper
|
|||||||
|
|
||||||
class TestAttributes(unittest.TestCase):
|
class TestAttributes(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.p = Paper.from_bibentry(
|
||||||
|
fixtures.page_bibentry,
|
||||||
|
metadata=fixtures.page_metadata).deepcopy()
|
||||||
|
|
||||||
def test_tags(self):
|
def test_tags(self):
|
||||||
p = Paper(fixtures.page_bibdata, metadata=fixtures.page_metadata).deepcopy()
|
self.assertEqual(self.p.tags, set(['search', 'network']))
|
||||||
self.assertEqual(p.tags, set(['search', 'network']))
|
|
||||||
|
|
||||||
def test_add_tag(self):
|
def test_add_tag(self):
|
||||||
p = Paper(fixtures.page_bibdata, metadata=fixtures.page_metadata).deepcopy()
|
self.p.add_tag('algorithm')
|
||||||
p.add_tag('algorithm')
|
self.assertEqual(self.p.tags, set(['search', 'network', 'algorithm']))
|
||||||
self.assertEqual(p.tags, set(['search', 'network', 'algorithm']))
|
self.p.add_tag('algorithm')
|
||||||
p.add_tag('algorithm')
|
self.assertEqual(self.p.tags, set(['search', 'network', 'algorithm']))
|
||||||
self.assertEqual(p.tags, set(['search', 'network', 'algorithm']))
|
|
||||||
|
|
||||||
def test_set_tags(self):
|
def test_set_tags(self):
|
||||||
p = Paper(fixtures.page_bibdata, metadata=fixtures.page_metadata).deepcopy()
|
self.p.tags = ['algorithm']
|
||||||
p.tags = ['algorithm']
|
self.assertEqual(self.p.tags, set(['algorithm']))
|
||||||
self.assertEqual(p.tags, set(['algorithm']))
|
|
||||||
|
|
||||||
def test_remove_tags(self):
|
def test_remove_tags(self):
|
||||||
p = Paper(fixtures.page_bibdata, metadata=fixtures.page_metadata).deepcopy()
|
self.p.remove_tag('network')
|
||||||
p.remove_tag('network')
|
self.assertEqual(self.p.tags, set(['search']))
|
||||||
self.assertEqual(p.tags, set(['search']))
|
|
||||||
|
|
||||||
def test_mixed_tags(self):
|
def test_mixed_tags(self):
|
||||||
p = Paper(fixtures.page_bibdata, metadata=fixtures.page_metadata).deepcopy()
|
self.p.add_tag('algorithm')
|
||||||
p.add_tag('algorithm')
|
self.assertEqual(self.p.tags, set(['search', 'network', 'algorithm']))
|
||||||
self.assertEqual(p.tags, set(['search', 'network', 'algorithm']))
|
self.p.remove_tag('network')
|
||||||
p.remove_tag('network')
|
self.assertEqual(self.p.tags, set(['search', 'algorithm']))
|
||||||
self.assertEqual(p.tags, set(['search', 'algorithm']))
|
self.p.tags = ['ranking']
|
||||||
p.tags = ['ranking']
|
self.assertEqual(self.p.tags, set(['ranking']))
|
||||||
self.assertEqual(p.tags, set(['ranking']))
|
self.p.remove_tag('ranking')
|
||||||
p.remove_tag('ranking')
|
self.assertEqual(self.p.tags, set())
|
||||||
self.assertEqual(p.tags, set())
|
self.p.remove_tag('ranking')
|
||||||
p.remove_tag('ranking')
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
@ -2,18 +2,19 @@ import unittest
|
|||||||
|
|
||||||
import dotdot
|
import dotdot
|
||||||
from pubs.commands.list_cmd import (_check_author_match,
|
from pubs.commands.list_cmd import (_check_author_match,
|
||||||
_check_field_match,
|
_check_field_match,
|
||||||
_check_query_block,
|
_check_query_block,
|
||||||
filter_paper,
|
filter_paper,
|
||||||
InvalidQuery)
|
InvalidQuery)
|
||||||
|
|
||||||
from pubs.paper import Paper
|
from pubs.paper import Paper
|
||||||
|
|
||||||
import fixtures
|
import fixtures
|
||||||
|
|
||||||
doe_paper = Paper(fixtures.doe_bibdata)
|
doe_paper = Paper.from_bibentry(fixtures.doe_bibentry)
|
||||||
page_paper = Paper(fixtures.page_bibdata)
|
page_paper = Paper.from_bibentry(fixtures.page_bibentry)
|
||||||
turing_paper = Paper(fixtures.turing_bibdata, metadata=fixtures.turing_metadata)
|
turing_paper = Paper.from_bibentry(fixtures.turing_bibentry,
|
||||||
|
metadata=fixtures.turing_metadata)
|
||||||
|
|
||||||
class TestAuthorFilter(unittest.TestCase):
|
class TestAuthorFilter(unittest.TestCase):
|
||||||
|
|
||||||
|
@ -15,7 +15,7 @@ class TestRepo(fake_env.TestFakeFs):
|
|||||||
def setUp(self):
|
def setUp(self):
|
||||||
super(TestRepo, self).setUp()
|
super(TestRepo, self).setUp()
|
||||||
self.repo = Repository(configs.Config(), create=True)
|
self.repo = Repository(configs.Config(), create=True)
|
||||||
self.repo.push_paper(Paper(fixtures.turing_bibdata))
|
self.repo.push_paper(Paper.from_bibentry(fixtures.turing_bibentry))
|
||||||
|
|
||||||
|
|
||||||
class TestCitekeyGeneration(TestRepo):
|
class TestCitekeyGeneration(TestRepo):
|
||||||
@ -27,9 +27,10 @@ class TestCitekeyGeneration(TestRepo):
|
|||||||
self.assertEqual(_base27(26 + i + 1), 'a' + chr(97 + i))
|
self.assertEqual(_base27(26 + i + 1), 'a' + chr(97 + i))
|
||||||
|
|
||||||
def test_generated_key_is_unique(self):
|
def test_generated_key_is_unique(self):
|
||||||
self.repo.push_paper(Paper(fixtures.doe_bibdata))
|
self.repo.push_paper(Paper.from_bibentry(fixtures.doe_bibentry))
|
||||||
c = self.repo.unique_citekey('Doe2013')
|
c = self.repo.unique_citekey('Doe2013')
|
||||||
self.repo.push_paper(Paper(fixtures.doe_bibdata, citekey='Doe2013a'))
|
self.repo.push_paper(Paper.from_bibentry(fixtures.doe_bibentry,
|
||||||
|
citekey='Doe2013a'))
|
||||||
c = self.repo.unique_citekey('Doe2013')
|
c = self.repo.unique_citekey('Doe2013')
|
||||||
self.assertEqual(c, 'Doe2013b')
|
self.assertEqual(c, 'Doe2013b')
|
||||||
|
|
||||||
@ -38,25 +39,27 @@ class TestPushPaper(TestRepo):
|
|||||||
|
|
||||||
def test_raises_value_error_on_existing_key(self):
|
def test_raises_value_error_on_existing_key(self):
|
||||||
with self.assertRaises(CiteKeyCollision):
|
with self.assertRaises(CiteKeyCollision):
|
||||||
self.repo.push_paper(Paper(fixtures.turing_bibdata))
|
self.repo.push_paper(Paper.from_bibentry(fixtures.turing_bibentry))
|
||||||
|
|
||||||
def test_pushes_paper_bibdata(self):
|
def test_pushes_paper_bibdata(self):
|
||||||
orig = fixtures.doe_bibdata
|
orig = fixtures.doe_bibentry
|
||||||
self.repo.push_paper(Paper(orig))
|
self.repo.push_paper(Paper.from_bibentry(orig))
|
||||||
retrieved = self.repo.databroker.pull_bibdata('Doe2013')
|
retrieved = self.repo.databroker.pull_bibentry('Doe2013')
|
||||||
self.assertEquals(orig, retrieved)
|
self.assertEqual(orig, retrieved)
|
||||||
|
|
||||||
def test_pushes_paper_metadata(self):
|
def test_pushes_paper_metadata(self):
|
||||||
orig = {'docfile': 'dummy', 'tags': set(['tag', 'another']),
|
orig = {'docfile': 'dummy', 'tags': set(['tag', 'another']),
|
||||||
'added': datetime(2012, 12, 12, 12, 12, 12, 12)}
|
'added': datetime(2012, 12, 12, 12, 12, 12, 12)}
|
||||||
self.repo.push_paper(Paper(fixtures.doe_bibdata, metadata=orig))
|
self.repo.push_paper(Paper.from_bibentry(fixtures.doe_bibentry,
|
||||||
|
metadata=orig))
|
||||||
retrieved = self.repo.databroker.pull_metadata('Doe2013')
|
retrieved = self.repo.databroker.pull_metadata('Doe2013')
|
||||||
self.assertEquals(orig, retrieved)
|
self.assertEqual(orig, retrieved)
|
||||||
|
|
||||||
def test_pushes_paper_metadata_set_added(self):
|
def test_pushes_paper_metadata_set_added(self):
|
||||||
orig = {'docfile': 'dummy', 'tags': set(['tag', 'another'])}
|
orig = {'docfile': 'dummy', 'tags': set(['tag', 'another'])}
|
||||||
now = datetime.now()
|
now = datetime.now()
|
||||||
self.repo.push_paper(Paper(fixtures.doe_bibdata, metadata=orig))
|
self.repo.push_paper(Paper.from_bibentry(fixtures.doe_bibentry,
|
||||||
|
metadata=orig))
|
||||||
retrieved = self.repo.databroker.pull_metadata('Doe2013')
|
retrieved = self.repo.databroker.pull_metadata('Doe2013')
|
||||||
self.assertIn('added', retrieved)
|
self.assertIn('added', retrieved)
|
||||||
self.assertTrue(now < retrieved['added'])
|
self.assertTrue(now < retrieved['added'])
|
||||||
|
@ -29,7 +29,7 @@ class TestFakeInput(unittest.TestCase):
|
|||||||
input = fake_env.FakeInput(['yes', 'no'])
|
input = fake_env.FakeInput(['yes', 'no'])
|
||||||
self.assertEqual(input(), 'yes')
|
self.assertEqual(input(), 'yes')
|
||||||
self.assertEqual(input(), 'no')
|
self.assertEqual(input(), 'no')
|
||||||
with self.assertRaises(IndexError):
|
with self.assertRaises(fake_env.FakeInput.UnexpectedInput):
|
||||||
input()
|
input()
|
||||||
|
|
||||||
def test_input2(self):
|
def test_input2(self):
|
||||||
@ -37,7 +37,7 @@ class TestFakeInput(unittest.TestCase):
|
|||||||
other_input.as_global()
|
other_input.as_global()
|
||||||
self.assertEqual(color.input(), 'yes')
|
self.assertEqual(color.input(), 'yes')
|
||||||
self.assertEqual(color.input(), 'no')
|
self.assertEqual(color.input(), 'no')
|
||||||
with self.assertRaises(IndexError):
|
with self.assertRaises(fake_env.FakeInput.UnexpectedInput):
|
||||||
color.input()
|
color.input()
|
||||||
|
|
||||||
def test_editor_input(self):
|
def test_editor_input(self):
|
||||||
@ -46,7 +46,7 @@ class TestFakeInput(unittest.TestCase):
|
|||||||
other_input.as_global()
|
other_input.as_global()
|
||||||
self.assertEqual(content.editor_input(), 'yes')
|
self.assertEqual(content.editor_input(), 'yes')
|
||||||
self.assertEqual(content.editor_input(), 'no')
|
self.assertEqual(content.editor_input(), 'no')
|
||||||
with self.assertRaises(IndexError):
|
with self.assertRaises(fake_env.FakeInput.UnexpectedInput):
|
||||||
color.input()
|
color.input()
|
||||||
|
|
||||||
|
|
||||||
@ -66,35 +66,41 @@ class CommandTestCase(unittest.TestCase):
|
|||||||
In the latter case, the command is :
|
In the latter case, the command is :
|
||||||
1. a string reprensenting the command to execute
|
1. a string reprensenting the command to execute
|
||||||
2. the user inputs to feed to the command during execution
|
2. the user inputs to feed to the command during execution
|
||||||
3. the output expected, verified with assertEqual
|
3. the output expected, verified with assertEqual. Always captures
|
||||||
|
output in this case.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
outs = []
|
outs = []
|
||||||
for cmd in cmds:
|
for cmd in cmds:
|
||||||
|
inputs = []
|
||||||
|
output = None
|
||||||
|
actual_cmd = cmd
|
||||||
|
current_capture_output = capture_output
|
||||||
if not isinstance(cmd, p3.ustr):
|
if not isinstance(cmd, p3.ustr):
|
||||||
if len(cmd) == 2:
|
actual_cmd = cmd[0]
|
||||||
input = fake_env.FakeInput(cmd[1], [content, uis, p3])
|
if len(cmd) == 2: # Inputs provided
|
||||||
input.as_global()
|
inputs = cmd[1]
|
||||||
|
if len(cmd) == 3: # Expected output provided
|
||||||
if capture_output:
|
current_capture_output = True
|
||||||
_, stdout, stderr = fake_env.redirect(pubs_cmd.execute)(cmd[0].split())
|
output = cmd[2]
|
||||||
if len(cmd) == 3 and capture_output:
|
# Always set fake input: test should not ask unexpected user input
|
||||||
actual_out = color.undye(stdout)
|
input = fake_env.FakeInput(inputs, [content, uis, p3])
|
||||||
correct_out = color.undye(cmd[2])
|
input.as_global()
|
||||||
|
try:
|
||||||
|
if current_capture_output:
|
||||||
|
_, stdout, stderr = fake_env.redirect(pubs_cmd.execute)(
|
||||||
|
actual_cmd.split())
|
||||||
|
self.assertEqual(stderr, '')
|
||||||
|
actual_out = color.undye(stdout)
|
||||||
|
if output is not None:
|
||||||
|
correct_out = color.undye(output)
|
||||||
self.assertEqual(actual_out, correct_out)
|
self.assertEqual(actual_out, correct_out)
|
||||||
|
outs.append(color.undye(actual_out))
|
||||||
else:
|
else:
|
||||||
pubs_cmd.execute(cmd.split())
|
pubs_cmd.execute(cmd.split())
|
||||||
|
except fake_env.FakeInput.UnexpectedInput:
|
||||||
else:
|
self.fail('Unexpected input asked by command: {}.'.format(
|
||||||
if capture_output:
|
actual_cmd))
|
||||||
assert p3.isbasestr(cmd)
|
|
||||||
_, stdout, stderr = fake_env.redirect(pubs_cmd.execute)(cmd.split())
|
|
||||||
else:
|
|
||||||
pubs_cmd.execute(cmd.split())
|
|
||||||
|
|
||||||
if capture_output:
|
|
||||||
assert(stderr == '')
|
|
||||||
outs.append(color.undye(stdout))
|
|
||||||
if PRINT_OUTPUT:
|
if PRINT_OUTPUT:
|
||||||
print(outs)
|
print(outs)
|
||||||
return outs
|
return outs
|
||||||
@ -161,7 +167,7 @@ class TestAdd(DataCommandTestCase):
|
|||||||
|
|
||||||
def test_add_doc_nocopy_does_not_copy(self):
|
def test_add_doc_nocopy_does_not_copy(self):
|
||||||
cmds = ['pubs init',
|
cmds = ['pubs init',
|
||||||
'pubs add /data/pagerank.bib -C -d /data/pagerank.pdf',
|
'pubs add /data/pagerank.bib --link -d /data/pagerank.pdf',
|
||||||
]
|
]
|
||||||
self.execute_cmds(cmds)
|
self.execute_cmds(cmds)
|
||||||
self.assertEqual(self.fs['os'].listdir(
|
self.assertEqual(self.fs['os'].listdir(
|
||||||
@ -186,10 +192,8 @@ class TestList(DataCommandTestCase):
|
|||||||
'pubs list',
|
'pubs list',
|
||||||
]
|
]
|
||||||
outs = self.execute_cmds(cmds)
|
outs = self.execute_cmds(cmds)
|
||||||
print(outs[1].splitlines())
|
self.assertEqual(0, len(outs[1].splitlines()))
|
||||||
self.assertEquals(0, len(outs[1].splitlines()))
|
self.assertEqual(1, len(outs[3].splitlines()))
|
||||||
print(outs[3].splitlines())
|
|
||||||
self.assertEquals(1, len(outs[3].splitlines()))
|
|
||||||
|
|
||||||
def test_list_several_no_date(self):
|
def test_list_several_no_date(self):
|
||||||
self.execute_cmds(['pubs init -p /testrepo'])
|
self.execute_cmds(['pubs init -p /testrepo'])
|
||||||
@ -203,14 +207,11 @@ class TestList(DataCommandTestCase):
|
|||||||
'pubs list',
|
'pubs list',
|
||||||
]
|
]
|
||||||
outs = self.execute_cmds(cmds)
|
outs = self.execute_cmds(cmds)
|
||||||
print(outs[0].splitlines())
|
self.assertEqual(4, len(outs[0].splitlines()))
|
||||||
self.assertEquals(4, len(outs[0].splitlines()))
|
self.assertEqual(3, len(outs[2].splitlines()))
|
||||||
print(outs[2].splitlines())
|
self.assertEqual(4, len(outs[4].splitlines()))
|
||||||
self.assertEquals(3, len(outs[2].splitlines()))
|
|
||||||
print(outs[4].splitlines())
|
|
||||||
self.assertEquals(4, len(outs[4].splitlines()))
|
|
||||||
# Last added should be last
|
# Last added should be last
|
||||||
self.assertEquals('[Page99]', outs[4].splitlines()[-1][:8])
|
self.assertEqual('[Page99]', outs[4].splitlines()[-1][:8])
|
||||||
|
|
||||||
def test_list_smart_case(self):
|
def test_list_smart_case(self):
|
||||||
cmds = ['pubs init',
|
cmds = ['pubs init',
|
||||||
@ -219,8 +220,7 @@ class TestList(DataCommandTestCase):
|
|||||||
'pubs list title:language author:Saunders',
|
'pubs list title:language author:Saunders',
|
||||||
]
|
]
|
||||||
outs = self.execute_cmds(cmds)
|
outs = self.execute_cmds(cmds)
|
||||||
print(outs[-1])
|
self.assertEqual(1, len(outs[-1].splitlines()))
|
||||||
self.assertEquals(1, len(outs[-1].splitlines()))
|
|
||||||
|
|
||||||
def test_list_ignore_case(self):
|
def test_list_ignore_case(self):
|
||||||
cmds = ['pubs init',
|
cmds = ['pubs init',
|
||||||
@ -229,8 +229,7 @@ class TestList(DataCommandTestCase):
|
|||||||
'pubs list --ignore-case title:lAnguAge author:saunders',
|
'pubs list --ignore-case title:lAnguAge author:saunders',
|
||||||
]
|
]
|
||||||
outs = self.execute_cmds(cmds)
|
outs = self.execute_cmds(cmds)
|
||||||
print(outs[-1])
|
self.assertEqual(1, len(outs[-1].splitlines()))
|
||||||
self.assertEquals(1, len(outs[-1].splitlines()))
|
|
||||||
|
|
||||||
def test_list_force_case(self):
|
def test_list_force_case(self):
|
||||||
cmds = ['pubs init',
|
cmds = ['pubs init',
|
||||||
@ -239,7 +238,7 @@ class TestList(DataCommandTestCase):
|
|||||||
'pubs list --force-case title:Language author:saunders',
|
'pubs list --force-case title:Language author:saunders',
|
||||||
]
|
]
|
||||||
outs = self.execute_cmds(cmds)
|
outs = self.execute_cmds(cmds)
|
||||||
self.assertEquals(0 + 1, len(outs[-1].split('\n')))
|
self.assertEqual(0 + 1, len(outs[-1].split('\n')))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@ -247,12 +246,12 @@ class TestUsecase(DataCommandTestCase):
|
|||||||
|
|
||||||
def test_first(self):
|
def test_first(self):
|
||||||
correct = ['Initializing pubs in /paper_first\n',
|
correct = ['Initializing pubs in /paper_first\n',
|
||||||
'',
|
'[Page99] Page, Lawrence et al. "The PageRank Citation Ranking: Bringing Order to the Web." (1999) \nwas added to pubs.\n',
|
||||||
'[Page99] Page, Lawrence et al. "The PageRank Citation Ranking: Bringing Order to the Web." (1999) \n',
|
'[Page99] Page, Lawrence et al. "The PageRank Citation Ranking: Bringing Order to the Web." (1999) \n',
|
||||||
'\n',
|
'\n',
|
||||||
'',
|
'',
|
||||||
'network search\n',
|
'network search\n',
|
||||||
'[Page99] Page, Lawrence et al. "The PageRank Citation Ranking: Bringing Order to the Web." (1999) network search\n'
|
'[Page99] Page, Lawrence et al. "The PageRank Citation Ranking: Bringing Order to the Web." (1999) | network,search\n',
|
||||||
]
|
]
|
||||||
|
|
||||||
cmds = ['pubs init -p paper_first/',
|
cmds = ['pubs init -p paper_first/',
|
||||||
@ -264,7 +263,7 @@ class TestUsecase(DataCommandTestCase):
|
|||||||
'pubs tag search',
|
'pubs tag search',
|
||||||
]
|
]
|
||||||
|
|
||||||
self.assertEqual(correct, self.execute_cmds(cmds))
|
self.assertEqual(correct, self.execute_cmds(cmds, capture_output=True))
|
||||||
|
|
||||||
def test_second(self):
|
def test_second(self):
|
||||||
cmds = ['pubs init -p paper_second/',
|
cmds = ['pubs init -p paper_second/',
|
||||||
@ -290,7 +289,6 @@ class TestUsecase(DataCommandTestCase):
|
|||||||
]
|
]
|
||||||
self.execute_cmds(cmds)
|
self.execute_cmds(cmds)
|
||||||
docdir = self.fs['os'].path.expanduser('~/.pubs/doc/')
|
docdir = self.fs['os'].path.expanduser('~/.pubs/doc/')
|
||||||
print(self.fs['os'].listdir(docdir))
|
|
||||||
self.assertNotIn('turing-mind-1950.pdf', self.fs['os'].listdir(docdir))
|
self.assertNotIn('turing-mind-1950.pdf', self.fs['os'].listdir(docdir))
|
||||||
|
|
||||||
|
|
||||||
@ -364,7 +362,7 @@ class TestUsecase(DataCommandTestCase):
|
|||||||
]
|
]
|
||||||
outs = self.execute_cmds(cmds)
|
outs = self.execute_cmds(cmds)
|
||||||
self.assertEqual(endecoder.EnDecoder().decode_bibdata(outs[2]),
|
self.assertEqual(endecoder.EnDecoder().decode_bibdata(outs[2]),
|
||||||
fixtures.page_bibdata)
|
fixtures.page_bibentry)
|
||||||
|
|
||||||
def test_import(self):
|
def test_import(self):
|
||||||
cmds = ['pubs init',
|
cmds = ['pubs init',
|
||||||
|
Loading…
x
Reference in New Issue
Block a user