forked from GitHub/gf-core
change the API for literals in Java and Python. The input sentence is no longer a parameter to the callbacks.
This commit is contained in:
@@ -122,8 +122,9 @@ def getKLinearizations(grammar, tgtlanguage, abstractParsesList, K=10):
|
||||
kBestTrans.append( ((parseprob,), postprocessor(linstring)) );
|
||||
yield kBestTrans;
|
||||
|
||||
def getKBestParses(grammar, language, K, callbacks=[], serializable=False, sentid=count(1), max_length=50):
|
||||
def getKBestParses(grammar, language, K, serializable=False, sentid=count(1), max_length=50):
|
||||
parser = grammar.languages[language].parse;
|
||||
import translation_pipeline
|
||||
def worker(sentence):
|
||||
sentence = sentence.strip();
|
||||
curid = sentid.next();
|
||||
@@ -135,6 +136,7 @@ def getKBestParses(grammar, language, K, callbacks=[], serializable=False, senti
|
||||
print >>sys.stderr, '%d\t%.4f\t%s' %(curid, tend-tstart, err);
|
||||
return tend-tstart, kBestParses; # temporary hack to make sure parser does not get killed for very long sentences;
|
||||
try:
|
||||
callbacks = [('PN', translation_pipeline.parseNames(grammar, args.srclang, sentence)), ('Symb', translation_pipeline.parseUnknown(grammar, args.srclang, sentence))]
|
||||
for parseidx, parse in enumerate( parser(sentence, heuristics=0, callbacks=callbacks) ):
|
||||
parseScores[parse[0]] = True;
|
||||
kBestParses.append( (parse[0], str(parse[1]) if serializable else parse[1]) );
|
||||
@@ -160,8 +162,7 @@ def pgf_parse(args):
|
||||
preprocessor = lexer();
|
||||
inputSet = translation_pipeline.web_lexer(grammar, args.srclang, imap(preprocessor, args.inputstream) );
|
||||
outputPrinter = lambda X: "%f\t%s" %(X[0], str(X[1])); #operator.itemgetter(1);
|
||||
callbacks = [('PN', translation_pipeline.parseNames(grammar, args.srclang)), ('Symb', translation_pipeline.parseUnknown(grammar, args.srclang))];
|
||||
parser = getKBestParses(grammar, args.srclang, 1, callbacks);
|
||||
parser = getKBestParses(grammar, args.srclang, 1);
|
||||
|
||||
sentidx = 0;
|
||||
for time, parsesBlock in imap(parser, inputSet):
|
||||
@@ -176,8 +177,7 @@ def pgf_kparse(args):
|
||||
preprocessor = lexer();
|
||||
inputSet = translation_pipeline.web_lexer(grammar, args.srclang, imap(preprocessor, args.inputstream) );
|
||||
outputPrinter = printJohnsonRerankerFormat;
|
||||
callbacks = [('PN', translation_pipeline.parseNames(grammar, args.srclang)), ('Symb', translation_pipeline.parseUnknown(grammar, args.srclang))];
|
||||
parser = getKBestParses(grammar, args.srclang, args.K, callbacks=callbacks);
|
||||
parser = getKBestParses(grammar, args.srclang, args.K);
|
||||
|
||||
sentidx = 0;
|
||||
for time, parsesBlock in imap(parser, inputSet):
|
||||
|
||||
@@ -129,8 +129,8 @@ def clean_gfstrings(sentence):
|
||||
sentence = sentence.replace(entry, ' '.join(entry[1:-1].split('_')[:-1]) if entry.find('_') != -1 else '');
|
||||
return ' '.join( sentence.split() );
|
||||
|
||||
def parseNames(grammar, language):
|
||||
def callback(lin_idx, sentence, start):
|
||||
def parseNames(grammar, language, sentence):
|
||||
def callback(lin_idx, start):
|
||||
moving_start, end, eot = start, len(sentence), True;
|
||||
if moving_start < end and (not sentence[moving_start].isupper()):
|
||||
return None;
|
||||
@@ -175,8 +175,8 @@ def parseNames(grammar, language):
|
||||
return None;
|
||||
return callback;
|
||||
|
||||
def parseUnknown(grammar, language):
|
||||
def callback(lin_idx, sentence, start):
|
||||
def parseUnknown(grammar, language, sentence):
|
||||
def callback(lin_idx, start):
|
||||
moving_start, end, eot = start, len(sentence), True;
|
||||
isNewToken = (moving_start == 0) or (moving_start > 1 and sentence[moving_start-1].isspace()) # -- added to deal with segmentation errors like may => ma_N + Symb y
|
||||
if moving_start < end and (not sentence[moving_start].isupper()):
|
||||
@@ -271,7 +271,7 @@ def pipelineParsing(grammar, language, sentences, K=20):
|
||||
#buf = [sent for sent in sentences];
|
||||
buf, sentences = itertools.tee(sentences, 2);
|
||||
sentences = itertools.imap(gf_utils.lexer(lang=language), sentences);
|
||||
parser = gf_utils.getKBestParses(grammar, language, K, callbacks=[("PN", parseNames(grammar, language)), ("Symb", parseUnknown(grammar, language))]);
|
||||
parser = gf_utils.getKBestParses(grammar, language, K);
|
||||
for sent, (time, parsesBlock) in itertools.izip(buf, itertools.imap(parser, sentences)):
|
||||
yield (sent, parsesBlock);
|
||||
|
||||
|
||||
@@ -1280,11 +1280,11 @@ pypgf_literal_callback_match(PgfLiteralCallback* self, PgfConcr* concr,
|
||||
gu_container(self, PyPgfLiteralCallback, callback);
|
||||
|
||||
PyObject* result =
|
||||
PyObject_CallFunction(callback->pycallback, "isi",
|
||||
lin_idx, sentence, *poffset);
|
||||
PyObject_CallFunction(callback->pycallback, "ii",
|
||||
lin_idx, *poffset);
|
||||
if (result == NULL)
|
||||
return NULL;
|
||||
|
||||
|
||||
if (result == Py_None) {
|
||||
Py_DECREF(result);
|
||||
return NULL;
|
||||
|
||||
Reference in New Issue
Block a user